Merge pull request #482 from meilisearch/review-settings-endpoint

Review settings endpoint
This commit is contained in:
Clément Renault 2020-02-28 11:39:38 +01:00 committed by GitHub
commit 06ace88901
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 337 additions and 267 deletions

View File

@ -1059,12 +1059,12 @@ mod tests {
let data = r#"
{
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)"
],
"searchableAttributes": ["name", "release_date"],

View File

@ -5,6 +5,10 @@ use std::iter::IntoIterator;
use serde::{Deserialize, Deserializer, Serialize};
use once_cell::sync::Lazy;
use self::RankingRule::*;
pub const DEFAULT_RANKING_RULES: [RankingRule; 6] = [Typo, Words, Proximity, Attribute, WordsPosition, Exactness];
static RANKING_RULE_REGEX: Lazy<regex::Regex> = Lazy::new(|| {
let regex = regex::Regex::new(r"(asc|dsc)\(([a-zA-Z0-9-_]*)\)").unwrap();
regex
@ -16,7 +20,7 @@ pub struct Settings {
#[serde(default, deserialize_with = "deserialize_some")]
pub ranking_rules: Option<Option<Vec<String>>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub ranking_distinct: Option<Option<String>>,
pub distinct_attribute: Option<Option<String>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub searchable_attributes: Option<Option<Vec<String>>>,
#[serde(default, deserialize_with = "deserialize_some")]
@ -26,7 +30,7 @@ pub struct Settings {
#[serde(default, deserialize_with = "deserialize_some")]
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub index_new_fields: Option<Option<bool>>,
pub accept_new_fields: Option<Option<bool>>,
}
// Any value that is present is considered Some value, including null.
@ -49,13 +53,13 @@ impl Settings {
Ok(SettingsUpdate {
ranking_rules,
ranking_distinct: settings.ranking_distinct.into(),
distinct_attribute: settings.distinct_attribute.into(),
identifier: UpdateState::Nothing,
searchable_attributes: settings.searchable_attributes.into(),
displayed_attributes: settings.displayed_attributes.into(),
stop_words: settings.stop_words.into(),
synonyms: settings.synonyms.into(),
index_new_fields: settings.index_new_fields.into(),
accept_new_fields: settings.accept_new_fields.into(),
})
}
}
@ -98,17 +102,17 @@ pub enum RankingRule {
Dsc(String),
}
impl ToString for RankingRule {
fn to_string(&self) -> String {
impl std::fmt::Display for RankingRule {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
RankingRule::Typo => "_typo".to_string(),
RankingRule::Words => "_words".to_string(),
RankingRule::Proximity => "_proximity".to_string(),
RankingRule::Attribute => "_attribute".to_string(),
RankingRule::WordsPosition => "_words_position".to_string(),
RankingRule::Exactness => "_exactness".to_string(),
RankingRule::Asc(field) => format!("asc({})", field),
RankingRule::Dsc(field) => format!("dsc({})", field),
RankingRule::Typo => f.write_str("typo"),
RankingRule::Words => f.write_str("words"),
RankingRule::Proximity => f.write_str("proximity"),
RankingRule::Attribute => f.write_str("attribute"),
RankingRule::WordsPosition => f.write_str("wordsPosition"),
RankingRule::Exactness => f.write_str("exactness"),
RankingRule::Asc(field) => write!(f, "asc({})", field),
RankingRule::Dsc(field) => write!(f, "dsc({})", field),
}
}
}
@ -118,12 +122,12 @@ impl FromStr for RankingRule {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let rule = match s {
"_typo" => RankingRule::Typo,
"_words" => RankingRule::Words,
"_proximity" => RankingRule::Proximity,
"_attribute" => RankingRule::Attribute,
"_words_position" => RankingRule::WordsPosition,
"_exactness" => RankingRule::Exactness,
"typo" => RankingRule::Typo,
"words" => RankingRule::Words,
"proximity" => RankingRule::Proximity,
"attribute" => RankingRule::Attribute,
"wordsPosition" => RankingRule::WordsPosition,
"exactness" => RankingRule::Exactness,
_ => {
let captures = RANKING_RULE_REGEX.captures(s).ok_or(RankingRuleConversionError)?;
match (captures.get(1).map(|m| m.as_str()), captures.get(2)) {
@ -155,26 +159,26 @@ impl RankingRule {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SettingsUpdate {
pub ranking_rules: UpdateState<Vec<RankingRule>>,
pub ranking_distinct: UpdateState<String>,
pub distinct_attribute: UpdateState<String>,
pub identifier: UpdateState<String>,
pub searchable_attributes: UpdateState<Vec<String>>,
pub displayed_attributes: UpdateState<HashSet<String>>,
pub stop_words: UpdateState<BTreeSet<String>>,
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
pub index_new_fields: UpdateState<bool>,
pub accept_new_fields: UpdateState<bool>,
}
impl Default for SettingsUpdate {
fn default() -> Self {
Self {
ranking_rules: UpdateState::Nothing,
ranking_distinct: UpdateState::Nothing,
distinct_attribute: UpdateState::Nothing,
identifier: UpdateState::Nothing,
searchable_attributes: UpdateState::Nothing,
displayed_attributes: UpdateState::Nothing,
stop_words: UpdateState::Nothing,
synonyms: UpdateState::Nothing,
index_new_fields: UpdateState::Nothing,
accept_new_fields: UpdateState::Nothing,
}
}
}

View File

@ -12,7 +12,7 @@ use crate::settings::RankingRule;
const CREATED_AT_KEY: &str = "created-at";
const RANKING_RULES_KEY: &str = "ranking-rules";
const RANKING_DISTINCT_KEY: &str = "ranking-distinct";
const DISTINCT_ATTRIBUTE_KEY: &str = "distinct-attribute";
const STOP_WORDS_KEY: &str = "stop-words";
const SYNONYMS_KEY: &str = "synonyms";
const CUSTOMS_KEY: &str = "customs";
@ -200,19 +200,19 @@ impl Main {
self.main.delete::<_, Str>(writer, RANKING_RULES_KEY)
}
pub fn ranking_distinct(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<String>> {
if let Some(value) = self.main.get::<_, Str, Str>(reader, RANKING_DISTINCT_KEY)? {
pub fn distinct_attribute(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<String>> {
if let Some(value) = self.main.get::<_, Str, Str>(reader, DISTINCT_ATTRIBUTE_KEY)? {
return Ok(Some(value.to_owned()))
}
return Ok(None)
}
pub fn put_ranking_distinct(self, writer: &mut heed::RwTxn<MainT>, value: &str) -> ZResult<()> {
self.main.put::<_, Str, Str>(writer, RANKING_DISTINCT_KEY, value)
pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>, value: &str) -> ZResult<()> {
self.main.put::<_, Str, Str>(writer, DISTINCT_ATTRIBUTE_KEY, value)
}
pub fn delete_ranking_distinct(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
self.main.delete::<_, Str>(writer, RANKING_DISTINCT_KEY)
pub fn delete_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
self.main.delete::<_, Str>(writer, DISTINCT_ATTRIBUTE_KEY)
}
pub fn put_customs(self, writer: &mut heed::RwTxn<MainT>, customs: &[u8]) -> ZResult<()> {

View File

@ -58,22 +58,22 @@ pub fn apply_settings_update(
UpdateState::Nothing => (),
}
match settings.ranking_distinct {
match settings.distinct_attribute {
UpdateState::Update(v) => {
index.main.put_ranking_distinct(writer, &v)?;
index.main.put_distinct_attribute(writer, &v)?;
},
UpdateState::Clear => {
index.main.delete_ranking_distinct(writer)?;
index.main.delete_distinct_attribute(writer)?;
},
UpdateState::Nothing => (),
}
match settings.index_new_fields {
match settings.accept_new_fields {
UpdateState::Update(v) => {
schema.set_index_new_fields(v);
schema.set_accept_new_fields(v);
},
UpdateState::Clear => {
schema.set_index_new_fields(true);
schema.set_accept_new_fields(true);
},
UpdateState::Nothing => (),
}
@ -84,8 +84,7 @@ pub fn apply_settings_update(
must_reindex = true;
},
UpdateState::Clear => {
let clear: Vec<&str> = Vec::new();
schema.update_indexed(clear)?;
schema.set_all_fields_as_indexed();
must_reindex = true;
},
UpdateState::Nothing => (),
@ -93,8 +92,7 @@ pub fn apply_settings_update(
match settings.displayed_attributes.clone() {
UpdateState::Update(v) => schema.update_displayed(v)?,
UpdateState::Clear => {
let clear: Vec<&str> = Vec::new();
schema.update_displayed(clear)?;
schema.set_all_fields_as_displayed();
},
UpdateState::Nothing => (),
}

View File

@ -6,8 +6,8 @@ use chrono::{DateTime, Utc};
use heed::types::{SerdeBincode, Str};
use log::error;
use meilisearch_core::{Database, Error as MError, MResult, MainT, UpdateT};
use sysinfo::Pid;
use sha2::Digest;
use sysinfo::Pid;
use crate::option::Opt;
use crate::routes::index::index_update_callback;
@ -117,9 +117,7 @@ impl DataInner {
// convert attributes to their names
let frequency: HashMap<_, _> = fields_frequency
.into_iter()
.filter_map(|(a, c)| {
schema.name(a).map(|name| (name.to_string(), c))
})
.filter_map(|(a, c)| schema.name(a).map(|name| (name.to_string(), c)))
.collect();
index

View File

@ -6,7 +6,7 @@ use tide::Request;
pub enum ACL {
Admin,
Private,
Public
Public,
}
pub trait RequestExt {
@ -23,31 +23,33 @@ impl RequestExt for Request<Data> {
match acl {
ACL::Admin => {
if user_api_key == self.state().api_keys.master.as_deref() {
return Ok(())
return Ok(());
}
},
}
ACL::Private => {
if user_api_key == self.state().api_keys.master.as_deref() {
return Ok(())
return Ok(());
}
if user_api_key == self.state().api_keys.private.as_deref() {
return Ok(())
return Ok(());
}
},
}
ACL::Public => {
if user_api_key == self.state().api_keys.master.as_deref() {
return Ok(())
return Ok(());
}
if user_api_key == self.state().api_keys.private.as_deref() {
return Ok(())
return Ok(());
}
if user_api_key == self.state().api_keys.public.as_deref() {
return Ok(())
return Ok(());
}
}
}
Err(ResponseError::InvalidToken(user_api_key.unwrap_or("Need a token").to_owned()))
Err(ResponseError::InvalidToken(
user_api_key.unwrap_or("Need a token").to_owned(),
))
}
fn url_param(&self, name: &str) -> SResult<String> {

View File

@ -18,19 +18,21 @@ mod analytics;
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
pub fn main() -> Result<(), MainError> {
let opt = Opt::from_args();
match opt.env.as_ref() {
"production" => {
if opt.master_key.is_none() {
return Err("In production mode, the environment variable MEILI_MASTER_KEY is mandatory".into());
return Err(
"In production mode, the environment variable MEILI_MASTER_KEY is mandatory"
.into(),
);
}
env_logger::init();
},
}
"development" => {
env_logger::from_env(env_logger::Env::default().default_filter_or("info")).init();
},
}
_ => unreachable!(),
}
@ -58,7 +60,6 @@ pub fn main() -> Result<(), MainError> {
Ok(())
}
pub fn print_launch_resume(opt: &Opt, data: &Data) {
let ascii_name = r#"
888b d888 d8b 888 d8b .d8888b. 888
@ -77,8 +78,14 @@ pub fn print_launch_resume(opt: &Opt, data: &Data) {
info!("Start server on: {:?}", opt.http_addr);
info!("Environment: {:?}", opt.env);
info!("Commit SHA: {:?}", env!("VERGEN_SHA").to_string());
info!("Build date: {:?}", env!("VERGEN_BUILD_TIMESTAMP").to_string());
info!("Package version: {:?}", env!("CARGO_PKG_VERSION").to_string());
info!(
"Build date: {:?}",
env!("VERGEN_BUILD_TIMESTAMP").to_string()
);
info!(
"Package version: {:?}",
env!("CARGO_PKG_VERSION").to_string()
);
if let Some(master_key) = &data.api_keys.master {
info!("Master Key: {:?}", master_key);

View File

@ -145,7 +145,7 @@ async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) ->
None => return Err(ResponseError::bad_request("Could not infer a schema")),
},
};
let settings_update = SettingsUpdate{
let settings_update = SettingsUpdate {
identifier: UpdateState::Update(id),
..SettingsUpdate::default()
};

View File

@ -42,7 +42,7 @@ pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
let identifier = match index.main.schema(&reader) {
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
_ => None
_ => None,
};
let index_response = IndexResponse {
@ -89,7 +89,7 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
let identifier = match index.main.schema(&reader) {
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
_ => None
_ => None,
};
let response_body = IndexResponse {
@ -97,7 +97,7 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
uid,
created_at,
updated_at,
identifier
identifier,
};
Ok(tide::Response::new(200).body_json(&response_body)?)
@ -220,9 +220,13 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
if let Some(identifier) = body.identifier {
if let Ok(Some(_)) = index.main.schema(&writer) {
return Err(ResponseError::bad_request("The index identifier cannot be updated"));
return Err(ResponseError::bad_request(
"The index identifier cannot be updated",
));
}
index.main.put_schema(&mut writer, &Schema::with_identifier(&identifier))?;
index
.main
.put_schema(&mut writer, &Schema::with_identifier(&identifier))?;
}
index.main.put_updated_at(&mut writer)?;
@ -235,7 +239,7 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
let identifier = match index.main.schema(&reader) {
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
_ => None
_ => None,
};
let response_body = UpdateIndexResponse {
@ -243,7 +247,7 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
uid: index_uid,
created_at,
updated_at,
identifier
identifier,
};
Ok(tide::Response::new(200).body_json(&response_body)?)

View File

@ -1,18 +1,17 @@
use tide::{Request, Response};
use serde_json::json;
use crate::error::SResult;
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::Data;
use serde_json::json;
use tide::{Request, Response};
pub async fn list(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let keys = &ctx.state().api_keys;
Ok(tide::Response::new(200)
.body_json(&json!({
"private": keys.private,
"public": keys.public,
}))?)
Ok(tide::Response::new(200).body_json(&json!({
"private": keys.private,
"public": keys.public,
}))?)
}

View File

@ -23,19 +23,15 @@ async fn into_response<T: IntoResponse, U: IntoResponse>(
}
pub fn load_routes(app: &mut tide::Server<Data>) {
app.at("/").get(|_| {
async move {
tide::Response::new(200)
.body_string(include_str!("../../public/interface.html").to_string())
.set_mime(mime::TEXT_HTML_UTF_8)
}
app.at("/").get(|_| async {
tide::Response::new(200)
.body_string(include_str!("../../public/interface.html").to_string())
.set_mime(mime::TEXT_HTML_UTF_8)
});
app.at("/bulma.min.css").get(|_| {
async {
tide::Response::new(200)
.body_string(include_str!("../../public/bulma.min.css").to_string())
.set_mime(mime::TEXT_CSS_UTF_8)
}
app.at("/bulma.min.css").get(|_| async {
tide::Response::new(200)
.body_string(include_str!("../../public/bulma.min.css").to_string())
.set_mime(mime::TEXT_CSS_UTF_8)
});
app.at("/indexes")
@ -82,7 +78,7 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
.post(|ctx| into_response(setting::update_rules(ctx)))
.delete(|ctx| into_response(setting::delete_rules(ctx)));
app.at("/indexes/:index/settings/ranking-distinct")
app.at("/indexes/:index/settings/distinct-attribute")
.get(|ctx| into_response(setting::get_distinct(ctx)))
.post(|ctx| into_response(setting::update_distinct(ctx)))
.delete(|ctx| into_response(setting::delete_distinct(ctx)));
@ -101,8 +97,8 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
.delete(|ctx| into_response(setting::delete_displayed(ctx)));
app.at("/indexes/:index/settings/index-new-field")
.get(|ctx| into_response(setting::get_index_new_fields(ctx)))
.post(|ctx| into_response(setting::update_index_new_fields(ctx)));
.get(|ctx| into_response(setting::get_accept_new_fields(ctx)))
.post(|ctx| into_response(setting::update_accept_new_fields(ctx)));
app.at("/indexes/:index/settings/synonyms")
.get(|ctx| into_response(synonym::get(ctx)))
@ -117,8 +113,7 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
app.at("/indexes/:index/stats")
.get(|ctx| into_response(stats::index_stats(ctx)));
app.at("/keys/")
.get(|ctx| into_response(key::list(ctx)));
app.at("/keys/").get(|ctx| into_response(key::list(ctx)));
app.at("/health")
.get(|ctx| into_response(health::get_health(ctx)))

View File

@ -7,10 +7,10 @@ use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::{Deserialize, Serialize};
use tide::{Request, Response};
use crate::helpers::tide::ACL::*;
use crate::error::{ResponseError, SResult};
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::Data;
#[derive(Deserialize)]

View File

@ -1,4 +1,4 @@
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState};
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES};
use serde::Deserialize;
use std::collections::{BTreeMap, BTreeSet, HashSet};
use tide::{Request, Response};
@ -46,16 +46,21 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
None
};
let ranking_rules = match index.main.ranking_rules(&reader)? {
Some(rules) => Some(rules.iter().map(|r| r.to_string()).collect()),
None => None,
};
let ranking_distinct = index.main.ranking_distinct(&reader)?;
let ranking_rules = index
.main
.ranking_rules(&reader)?
.unwrap_or(DEFAULT_RANKING_RULES.to_vec())
.into_iter()
.map(|r| r.to_string())
.collect();
let distinct_attribute = index.main.distinct_attribute(&reader)?;
let schema = index.main.schema(&reader)?;
let searchable_attributes = schema.clone().map(|s| {
let attrs = s.indexed_name()
let attrs = s
.indexed_name()
.iter()
.map(|s| (*s).to_string())
.collect::<Vec<String>>();
@ -67,7 +72,8 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
});
let displayed_attributes = schema.clone().map(|s| {
let attrs = s.displayed_name()
let attrs = s
.displayed_name()
.iter()
.map(|s| (*s).to_string())
.collect::<HashSet<String>>();
@ -77,16 +83,16 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
Some(attrs)
}
});
let index_new_fields = schema.map(|s| s.index_new_fields());
let accept_new_fields = schema.map(|s| s.accept_new_fields());
let settings = Settings {
ranking_rules: Some(ranking_rules),
ranking_distinct: Some(ranking_distinct),
ranking_rules: Some(Some(ranking_rules)),
distinct_attribute: Some(distinct_attribute),
searchable_attributes,
displayed_attributes,
stop_words: Some(stop_words),
synonyms: Some(synonyms),
index_new_fields: Some(index_new_fields),
accept_new_fields: Some(accept_new_fields),
};
Ok(tide::Response::new(200).body_json(&settings).unwrap())
@ -96,13 +102,13 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct UpdateSettings {
pub ranking_rules: Option<Vec<String>>,
pub ranking_distinct: Option<String>,
pub distinct_attribute: Option<String>,
pub identifier: Option<String>,
pub searchable_attributes: Option<Vec<String>>,
pub displayed_attributes: Option<HashSet<String>>,
pub stop_words: Option<BTreeSet<String>>,
pub synonyms: Option<BTreeMap<String, Vec<String>>>,
pub index_new_fields: Option<bool>,
pub accept_new_fields: Option<bool>,
}
pub async fn update_all(mut ctx: Request<Data>) -> SResult<Response> {
@ -114,12 +120,12 @@ pub async fn update_all(mut ctx: Request<Data>) -> SResult<Response> {
let settings = Settings {
ranking_rules: Some(settings_update.ranking_rules),
ranking_distinct: Some(settings_update.ranking_distinct),
distinct_attribute: Some(settings_update.distinct_attribute),
searchable_attributes: Some(settings_update.searchable_attributes),
displayed_attributes: Some(settings_update.displayed_attributes),
stop_words: Some(settings_update.stop_words),
synonyms: Some(settings_update.synonyms),
index_new_fields: Some(settings_update.index_new_fields),
accept_new_fields: Some(settings_update.accept_new_fields),
};
let mut writer = db.update_write_txn()?;
@ -138,13 +144,13 @@ pub async fn delete_all(ctx: Request<Data>) -> SResult<Response> {
let settings = SettingsUpdate {
ranking_rules: UpdateState::Clear,
ranking_distinct: UpdateState::Clear,
distinct_attribute: UpdateState::Clear,
identifier: UpdateState::Clear,
searchable_attributes: UpdateState::Clear,
displayed_attributes: UpdateState::Clear,
stop_words: UpdateState::Clear,
synonyms: UpdateState::Clear,
index_new_fields: UpdateState::Clear,
accept_new_fields: UpdateState::Clear,
};
let update_id = index.settings_update(&mut writer, settings)?;
@ -161,10 +167,13 @@ pub async fn get_rules(ctx: Request<Data>) -> SResult<Response> {
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let ranking_rules: Option<Vec<String>> = match index.main.ranking_rules(&reader)? {
Some(rules) => Some(rules.iter().map(|r| r.to_string()).collect()),
None => None,
};
let ranking_rules = index
.main
.ranking_rules(&reader)?
.unwrap_or(DEFAULT_RANKING_RULES.to_vec())
.into_iter()
.map(|r| r.to_string())
.collect::<Vec<String>>();
Ok(tide::Response::new(200).body_json(&ranking_rules).unwrap())
}
@ -214,22 +223,22 @@ pub async fn get_distinct(ctx: Request<Data>) -> SResult<Response> {
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let ranking_distinct = index.main.ranking_distinct(&reader)?;
let distinct_attribute = index.main.distinct_attribute(&reader)?;
Ok(tide::Response::new(200)
.body_json(&ranking_distinct)
.body_json(&distinct_attribute)
.unwrap())
}
pub async fn update_distinct(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let ranking_distinct: Option<String> =
let distinct_attribute: Option<String> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let settings = Settings {
ranking_distinct: Some(ranking_distinct),
distinct_attribute: Some(distinct_attribute),
..Settings::default()
};
@ -248,7 +257,7 @@ pub async fn delete_distinct(ctx: Request<Data>) -> SResult<Response> {
let mut writer = db.update_write_txn()?;
let settings = SettingsUpdate {
ranking_distinct: UpdateState::Clear,
distinct_attribute: UpdateState::Clear,
..SettingsUpdate::default()
};
@ -385,7 +394,7 @@ pub async fn delete_displayed(ctx: Request<Data>) -> SResult<Response> {
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn get_index_new_fields(ctx: Request<Data>) -> SResult<Response> {
pub async fn get_accept_new_fields(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
@ -393,22 +402,22 @@ pub async fn get_index_new_fields(ctx: Request<Data>) -> SResult<Response> {
let schema = index.main.schema(&reader)?;
let index_new_fields = schema.map(|s| s.index_new_fields());
let accept_new_fields = schema.map(|s| s.accept_new_fields());
Ok(tide::Response::new(200)
.body_json(&index_new_fields)
.body_json(&accept_new_fields)
.unwrap())
}
pub async fn update_index_new_fields(mut ctx: Request<Data>) -> SResult<Response> {
pub async fn update_accept_new_fields(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let index_new_fields: Option<bool> =
let accept_new_fields: Option<bool> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let settings = Settings {
index_new_fields: Some(index_new_fields),
accept_new_fields: Some(accept_new_fields),
..Settings::default()
};

View File

@ -57,16 +57,16 @@ pub fn enrich_server_with_movies_settings(
) -> Result<(), Box<dyn Error>> {
let json = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"dsc(popularity)",
"_exactness",
"exactness",
"dsc(vote_average)",
],
"rankingDistinct": null,
"distinctAttribute": null,
"searchableAttributes": [
"title",
"tagline",
@ -92,7 +92,7 @@ pub fn enrich_server_with_movies_settings(
],
"stopWords": null,
"synonyms": null,
"indexNewFields": false,
"acceptNewFields": false,
});
let body = json.to_string().into_bytes();
@ -179,7 +179,7 @@ pub fn wait_update_id(server: &mut TestBackend<Service<Data>>, update_id: u64) {
let response: Value = serde_json::from_slice(&buf).unwrap();
if response["status"] == "processed" {
return
return;
}
block_on(sleep(Duration::from_secs(1)));
}

View File

@ -626,16 +626,16 @@ fn search_with_settings_basic() {
let config = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"dsc(popularity)",
"_exactness",
"exactness",
"dsc(vote_average)"
],
"rankingDistinct": null,
"distinctAttribute": null,
"identifier": "id",
"searchableAttributes": [
"title",
@ -662,7 +662,7 @@ fn search_with_settings_basic() {
],
"stopWords": null,
"synonyms": null,
"indexNewFields": false,
"acceptNewFields": false,
});
common::update_config(&mut server, config);
@ -732,16 +732,16 @@ fn search_with_settings_stop_words() {
let config = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"dsc(popularity)",
"_exactness",
"exactness",
"dsc(vote_average)"
],
"rankingDistinct": null,
"distinctAttribute": null,
"identifier": "id",
"searchableAttributes": [
"title",
@ -768,7 +768,7 @@ fn search_with_settings_stop_words() {
],
"stopWords": ["the"],
"synonyms": null,
"indexNewFields": false,
"acceptNewFields": false,
});
common::update_config(&mut server, config);
@ -839,16 +839,16 @@ fn search_with_settings_synonyms() {
let config = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"dsc(popularity)",
"_exactness",
"exactness",
"dsc(vote_average)"
],
"rankingDistinct": null,
"distinctAttribute": null,
"identifier": "id",
"searchableAttributes": [
"title",
@ -880,7 +880,7 @@ fn search_with_settings_synonyms() {
"Iron Man"
]
},
"indexNewFields": false,
"acceptNewFields": false,
});
common::update_config(&mut server, config);
@ -951,16 +951,16 @@ fn search_with_settings_ranking_rules() {
let config = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"asc(vote_average)",
"_exactness",
"exactness",
"dsc(popularity)"
],
"rankingDistinct": null,
"distinctAttribute": null,
"identifier": "id",
"searchableAttributes": [
"title",
@ -987,7 +987,7 @@ fn search_with_settings_ranking_rules() {
],
"stopWords": null,
"synonyms": null,
"indexNewFields": false,
"acceptNewFields": false,
});
common::update_config(&mut server, config);
@ -1058,16 +1058,16 @@ fn search_with_settings_searchable_attributes() {
let config = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"dsc(popularity)",
"_exactness",
"exactness",
"dsc(vote_average)"
],
"rankingDistinct": null,
"distinctAttribute": null,
"identifier": "id",
"searchableAttributes": [
"tagline",
@ -1093,7 +1093,7 @@ fn search_with_settings_searchable_attributes() {
],
"stopWords": null,
"synonyms": null,
"indexNewFields": false,
"acceptNewFields": false,
});
common::update_config(&mut server, config);
@ -1164,16 +1164,16 @@ fn search_with_settings_displayed_attributes() {
let config = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"dsc(popularity)",
"_exactness",
"exactness",
"dsc(vote_average)"
],
"rankingDistinct": null,
"distinctAttribute": null,
"identifier": "id",
"searchableAttributes": [
"title",
@ -1194,7 +1194,7 @@ fn search_with_settings_displayed_attributes() {
],
"stopWords": null,
"synonyms": null,
"indexNewFields": false,
"acceptNewFields": false,
});
common::update_config(&mut server, config);
@ -1235,16 +1235,16 @@ fn search_with_settings_searchable_attributes_2() {
let config = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"dsc(popularity)",
"_exactness",
"exactness",
"dsc(vote_average)"
],
"rankingDistinct": null,
"distinctAttribute": null,
"identifier": "id",
"searchableAttributes": [
"tagline",
@ -1265,7 +1265,7 @@ fn search_with_settings_searchable_attributes_2() {
],
"stopWords": null,
"synonyms": null,
"indexNewFields": false,
"acceptNewFields": false,
});
common::update_config(&mut server, config);

View File

@ -41,16 +41,16 @@ fn write_all_and_delete() {
let json = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)",
"dsc(rank)",
],
"rankingDistinct": "movie_id",
"distinctAttribute": "movie_id",
"searchableAttributes": [
"id",
"movie_id",
@ -76,7 +76,7 @@ fn write_all_and_delete() {
"wolverine": ["xmen", "logan"],
"logan": ["wolverine"],
},
"indexNewFields": false,
"acceptNewFields": false,
});
let body = json.to_string().into_bytes();
@ -126,13 +126,36 @@ fn write_all_and_delete() {
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let json = json!({
"rankingRules": null,
"rankingDistinct": null,
"searchableAttributes": null,
"displayedAttributes": null,
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness"
],
"distinctAttribute": null,
"searchableAttributes": [
"id",
"release_date",
"poster",
"description",
"title",
"movie_id",
"rank"
],
"displayedAttributes": [
"movie_id",
"description",
"poster",
"id",
"release_date",
"rank",
"title"
],
"stopWords": null,
"synonyms": null,
"indexNewFields": true,
"acceptNewFields": true,
});
assert_json_eq!(json, res_value, ordered: false);
@ -169,16 +192,16 @@ fn write_all_and_update() {
let json = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)",
"dsc(rank)",
],
"rankingDistinct": "movie_id",
"distinctAttribute": "movie_id",
"searchableAttributes": [
"uid",
"movie_id",
@ -204,7 +227,7 @@ fn write_all_and_update() {
"wolverine": ["xmen", "logan"],
"logan": ["wolverine"],
},
"indexNewFields": false,
"acceptNewFields": false,
});
let body = json.to_string().into_bytes();
@ -235,12 +258,12 @@ fn write_all_and_update() {
let json_update = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)",
],
"searchableAttributes": [
@ -261,7 +284,7 @@ fn write_all_and_update() {
"wolverine": ["xmen", "logan"],
"logan": ["wolverine", "xmen"],
},
"indexNewFields": false,
"acceptNewFields": false,
});
let body_update = json_update.to_string().into_bytes();
@ -288,15 +311,15 @@ fn write_all_and_update() {
let res_expected = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)",
],
"rankingDistinct": null,
"distinctAttribute": null,
"searchableAttributes": [
"title",
"description",
@ -314,7 +337,7 @@ fn write_all_and_update() {
"wolverine": ["xmen", "logan"],
"logan": ["wolverine", "xmen"],
},
"indexNewFields": false
"acceptNewFields": false
});
assert_json_eq!(res_expected, res_value, ordered: false);

View File

@ -39,12 +39,12 @@ fn write_all_and_delete() {
// 2 - Send the settings
let json = json!([
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)",
"dsc(rank)",
]);
@ -95,7 +95,14 @@ fn write_all_and_delete() {
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let json = json!(null);
let json = json!([
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness"
]);
assert_json_eq!(json, res_value, ordered: false);
}
@ -130,12 +137,12 @@ fn write_all_and_update() {
// 2 - Send the settings
let json = json!([
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)",
"dsc(rank)",
]);
@ -167,12 +174,12 @@ fn write_all_and_update() {
// 4 - Update all settings
let json_update = json!([
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)",
]);
@ -199,12 +206,12 @@ fn write_all_and_update() {
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let res_expected = json!([
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exactness",
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"dsc(release_date)",
]);

View File

@ -1,4 +1,5 @@
use std::collections::HashMap;
use std::collections::hash_map::Iter;
use serde::{Deserialize, Serialize};
@ -45,6 +46,10 @@ impl FieldsMap {
pub fn name<I: Into<FieldId>>(&self, id: I) -> Option<&str> {
self.id_map.get(&id.into()).map(|s| s.as_str())
}
pub fn iter(&self) -> Iter<'_, String, FieldId> {
self.name_map.iter()
}
}
#[cfg(test)]

View File

@ -13,7 +13,7 @@ pub struct Schema {
indexed: Vec<FieldId>,
indexed_map: HashMap<FieldId, IndexedPos>,
index_new_fields: bool,
accept_new_fields: bool,
}
impl Schema {
@ -28,7 +28,7 @@ impl Schema {
displayed: HashSet::new(),
indexed: Vec::new(),
indexed_map: HashMap::new(),
index_new_fields: true,
accept_new_fields: true,
}
}
@ -68,7 +68,7 @@ impl Schema {
Ok(id)
}
None => {
if self.index_new_fields {
if self.accept_new_fields {
self.set_indexed(name)?;
self.set_displayed(name)
} else {
@ -190,11 +190,30 @@ impl Schema {
Ok(())
}
pub fn index_new_fields(&self) -> bool {
self.index_new_fields
pub fn set_all_fields_as_indexed(&mut self) {
self.indexed.clear();
self.indexed_map.clear();
for (_name, id) in self.fields_map.iter() {
let pos = self.indexed.len() as u16;
self.indexed.push(*id);
self.indexed_map.insert(*id, pos.into());
}
}
pub fn set_index_new_fields(&mut self, value: bool) {
self.index_new_fields = value;
pub fn set_all_fields_as_displayed(&mut self) {
self.displayed.clear();
for (_name, id) in self.fields_map.iter() {
self.displayed.insert(*id);
}
}
pub fn accept_new_fields(&self) -> bool {
self.accept_new_fields
}
pub fn set_accept_new_fields(&mut self, value: bool) {
self.accept_new_fields = value;
}
}