diff --git a/datasets/movies/settings.json b/datasets/movies/settings.json index 66eeedba6..0edefd7d0 100644 --- a/datasets/movies/settings.json +++ b/datasets/movies/settings.json @@ -1,7 +1,7 @@ { "identifier": "id", - "searchable_attributes": ["title", "overview"], - "displayed_attributes": [ + "searchableAttributes": ["title", "overview"], + "displayedAttributes": [ "id", "title", "overview", diff --git a/meilisearch-core/src/bucket_sort.rs b/meilisearch-core/src/bucket_sort.rs index 8b2c7ebe5..4c34f1abd 100644 --- a/meilisearch-core/src/bucket_sort.rs +++ b/meilisearch-core/src/bucket_sort.rs @@ -172,7 +172,6 @@ where debug!("bucket sort took {:.02?}", before_bucket_sort.elapsed()); - Ok(documents) } diff --git a/meilisearch-core/src/error.rs b/meilisearch-core/src/error.rs index c65f198e7..862c517a0 100644 --- a/meilisearch-core/src/error.rs +++ b/meilisearch-core/src/error.rs @@ -87,12 +87,12 @@ impl fmt::Display for Error { match self { Io(e) => write!(f, "{}", e), IndexAlreadyExists => write!(f, "index already exists"), - MissingIdentifier => write!(f, "schema cannot be build without identifier"), + MissingIdentifier => write!(f, "schema cannot be built without identifier"), SchemaMissing => write!(f, "this index does not have a schema"), WordIndexMissing => write!(f, "this index does not have a word index"), MissingDocumentId => write!(f, "document id is missing"), - MaxFieldsLimitExceeded => write!(f, "maximum field in a document is exceeded"), - Schema(e) => write!(f, "schemas error; {}", e), + MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"), + Schema(e) => write!(f, "schema error; {}", e), Zlmdb(e) => write!(f, "heed error; {}", e), Fst(e) => write!(f, "fst error; {}", e), SerdeJson(e) => write!(f, "serde json error; {}", e), diff --git a/meilisearch-core/src/query_builder.rs b/meilisearch-core/src/query_builder.rs index f3a3f462b..5bde93665 100644 --- a/meilisearch-core/src/query_builder.rs +++ b/meilisearch-core/src/query_builder.rs @@ -275,7 +275,7 @@ mod tests { let mut final_indexes = Vec::new(); for index in indexes { let name = index.attribute.to_string(); - schema.get_or_create_empty(&name).unwrap(); + schema.insert(&name).unwrap(); let indexed_pos = schema.set_indexed(&name).unwrap().1; let index = DocIndex { attribute: indexed_pos.0, diff --git a/meilisearch-core/src/serde/serializer.rs b/meilisearch-core/src/serde/serializer.rs index 2b38e0aeb..20e26886d 100644 --- a/meilisearch-core/src/serde/serializer.rs +++ b/meilisearch-core/src/serde/serializer.rs @@ -235,7 +235,7 @@ impl<'a, 'b> ser::SerializeMap for MapSerializer<'a, 'b> { let key = key.serialize(ConvertToString)?; serialize_value( self.txn, - key, + key.as_str(), self.schema, self.document_id, self.document_store, @@ -275,7 +275,7 @@ impl<'a, 'b> ser::SerializeStruct for StructSerializer<'a, 'b> { { serialize_value( self.txn, - key.to_string(), + key, self.schema, self.document_id, self.document_store, @@ -293,7 +293,7 @@ impl<'a, 'b> ser::SerializeStruct for StructSerializer<'a, 'b> { pub fn serialize_value<'a, T: ?Sized>( txn: &mut heed::RwTxn, - attribute: String, + attribute: &str, schema: &'a mut Schema, document_id: DocumentId, document_store: DocumentsFields, @@ -305,7 +305,7 @@ pub fn serialize_value<'a, T: ?Sized>( where T: ser::Serialize, { - let field_id = schema.get_or_create(&attribute)?; + let field_id = schema.insert_and_index(&attribute)?; serialize_value_with_id( txn, field_id, diff --git a/meilisearch-core/src/settings.rs b/meilisearch-core/src/settings.rs index 551c44cf1..e895a5eee 100644 --- a/meilisearch-core/src/settings.rs +++ b/meilisearch-core/src/settings.rs @@ -1,5 +1,6 @@ use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::str::FromStr; +use std::iter::IntoIterator; use serde::{Deserialize, Deserializer, Serialize}; use once_cell::sync::Lazy; @@ -17,8 +18,6 @@ pub struct Settings { #[serde(default, deserialize_with = "deserialize_some")] pub ranking_distinct: Option>, #[serde(default, deserialize_with = "deserialize_some")] - pub identifier: Option>, - #[serde(default, deserialize_with = "deserialize_some")] pub searchable_attributes: Option>>, #[serde(default, deserialize_with = "deserialize_some")] pub displayed_attributes: Option>>, @@ -43,7 +42,7 @@ impl Settings { let settings = self.clone(); let ranking_rules = match settings.ranking_rules { - Some(Some(rules)) => UpdateState::Update(RankingRule::from_vec(rules.iter().map(|m| m.as_ref()).collect())?), + Some(Some(rules)) => UpdateState::Update(RankingRule::from_iter(rules.iter())?), Some(None) => UpdateState::Clear, None => UpdateState::Nothing, }; @@ -51,7 +50,7 @@ impl Settings { Ok(SettingsUpdate { ranking_rules, ranking_distinct: settings.ranking_distinct.into(), - identifier: settings.identifier.into(), + identifier: UpdateState::Nothing, searchable_attributes: settings.searchable_attributes.into(), displayed_attributes: settings.displayed_attributes.into(), stop_words: settings.stop_words.into(), @@ -139,16 +138,16 @@ impl FromStr for RankingRule { } impl RankingRule { - pub fn get_field(&self) -> Option<&str> { + pub fn field(&self) -> Option<&str> { match self { RankingRule::Asc(field) | RankingRule::Dsc(field) => Some(field), _ => None, } } - pub fn from_vec(rules: Vec<&str>) -> Result, RankingRuleConversionError> { - rules.iter() - .map(|s| RankingRule::from_str(s)) + pub fn from_iter(rules: impl IntoIterator>) -> Result, RankingRuleConversionError> { + rules.into_iter() + .map(|s| RankingRule::from_str(s.as_ref())) .collect() } } diff --git a/meilisearch-core/src/store/main.rs b/meilisearch-core/src/store/main.rs index 1aa3d5d2a..2e13d52ec 100644 --- a/meilisearch-core/src/store/main.rs +++ b/meilisearch-core/src/store/main.rs @@ -192,8 +192,8 @@ impl Main { self.main.get::<_, Str, SerdeBincode>>(reader, RANKING_RULES_KEY) } - pub fn put_ranking_rules(self, writer: &mut heed::RwTxn, value: Vec) -> ZResult<()> { - self.main.put::<_, Str, SerdeBincode>>(writer, RANKING_RULES_KEY, &value) + pub fn put_ranking_rules(self, writer: &mut heed::RwTxn, value: &[RankingRule]) -> ZResult<()> { + self.main.put::<_, Str, SerdeBincode>>(writer, RANKING_RULES_KEY, &value.to_vec()) } pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn) -> ZResult { @@ -201,11 +201,14 @@ impl Main { } pub fn ranking_distinct(&self, reader: &heed::RoTxn) -> ZResult> { - self.main.get::<_, Str, SerdeBincode>(reader, RANKING_DISTINCT_KEY) + if let Some(value) = self.main.get::<_, Str, Str>(reader, RANKING_DISTINCT_KEY)? { + return Ok(Some(value.to_owned())) + } + return Ok(None) } - pub fn put_ranking_distinct(self, writer: &mut heed::RwTxn, value: String) -> ZResult<()> { - self.main.put::<_, Str, SerdeBincode>(writer, RANKING_DISTINCT_KEY, &value) + pub fn put_ranking_distinct(self, writer: &mut heed::RwTxn, value: &str) -> ZResult<()> { + self.main.put::<_, Str, Str>(writer, RANKING_DISTINCT_KEY, value) } pub fn delete_ranking_distinct(self, writer: &mut heed::RwTxn) -> ZResult { diff --git a/meilisearch-core/src/update/documents_deletion.rs b/meilisearch-core/src/update/documents_deletion.rs index 9152c4e5b..314632db3 100644 --- a/meilisearch-core/src/update/documents_deletion.rs +++ b/meilisearch-core/src/update/documents_deletion.rs @@ -106,7 +106,7 @@ pub fn apply_documents_deletion( let mut words_document_ids = HashMap::new(); for id in idset { // remove all the ranked attributes from the ranked_map - for ranked_attr in &ranked_fields { + for ranked_attr in ranked_fields { ranked_map.remove(id, *ranked_attr); } diff --git a/meilisearch-core/src/update/settings_update.rs b/meilisearch-core/src/update/settings_update.rs index ed75b0cf1..4482d52eb 100644 --- a/meilisearch-core/src/update/settings_update.rs +++ b/meilisearch-core/src/update/settings_update.rs @@ -44,9 +44,9 @@ pub fn apply_settings_update( match settings.ranking_rules { UpdateState::Update(v) => { - let ranked_field: Vec<&str> = v.iter().filter_map(RankingRule::get_field).collect(); + let ranked_field: Vec<&str> = v.iter().filter_map(RankingRule::field).collect(); schema.update_ranked(ranked_field)?; - index.main.put_ranking_rules(writer, v)?; + index.main.put_ranking_rules(writer, &v)?; must_reindex = true; }, UpdateState::Clear => { @@ -60,7 +60,7 @@ pub fn apply_settings_update( match settings.ranking_distinct { UpdateState::Update(v) => { - index.main.put_ranking_distinct(writer, v)?; + index.main.put_ranking_distinct(writer, &v)?; }, UpdateState::Clear => { index.main.delete_ranking_distinct(writer)?; @@ -99,16 +99,12 @@ pub fn apply_settings_update( UpdateState::Nothing => (), }; - match settings.identifier.clone() { - UpdateState::Update(v) => { - schema.set_identifier(v.as_ref())?; - index.main.put_schema(writer, &schema)?; - must_reindex = true; - }, - _ => { - index.main.put_schema(writer, &schema)?; - }, - }; + if let UpdateState::Update(v) = settings.identifier.clone() { + schema.set_identifier(v.as_ref())?; + must_reindex = true; + } + + index.main.put_schema(writer, &schema)?; match settings.stop_words { UpdateState::Update(stop_words) => { @@ -121,13 +117,13 @@ pub fn apply_settings_update( must_reindex = true; } }, - _ => (), + UpdateState::Nothing => (), } match settings.synonyms { UpdateState::Update(synonyms) => apply_synonyms_update(writer, index, synonyms)?, UpdateState::Clear => apply_synonyms_update(writer, index, BTreeMap::new())?, - _ => (), + UpdateState::Nothing => (), } if must_reindex { diff --git a/meilisearch-http/.DS_Store b/meilisearch-http/.DS_Store deleted file mode 100644 index 81fb6f831..000000000 Binary files a/meilisearch-http/.DS_Store and /dev/null differ diff --git a/meilisearch-http/src/data.rs b/meilisearch-http/src/data.rs index 7e5c92348..1d0e14c9a 100644 --- a/meilisearch-http/src/data.rs +++ b/meilisearch-http/src/data.rs @@ -92,7 +92,13 @@ impl DataInner { // convert attributes to their names let frequency: HashMap<_, _> = fields_frequency .into_iter() - .map(|(a, c)| (schema.name(a).unwrap().to_string(), c)) + .filter_map(|(a, c)| { + if let Some(name) = schema.name(a) { + return Some((name.to_string(), c)); + } else { + return None; + } + }) .collect(); index diff --git a/meilisearch-http/src/error.rs b/meilisearch-http/src/error.rs index 0ca377576..a2b539687 100644 --- a/meilisearch-http/src/error.rs +++ b/meilisearch-http/src/error.rs @@ -2,10 +2,10 @@ use std::fmt::Display; use http::status::StatusCode; use log::{error, warn}; +use meilisearch_core::{FstError, HeedError}; use serde::{Deserialize, Serialize}; use tide::IntoResponse; use tide::Response; -use meilisearch_core::{HeedError, FstError}; use crate::helpers::meilisearch::Error as SearchError; diff --git a/meilisearch-http/src/helpers/meilisearch.rs b/meilisearch-http/src/helpers/meilisearch.rs index db1ea3709..c15124d07 100644 --- a/meilisearch-http/src/helpers/meilisearch.rs +++ b/meilisearch-http/src/helpers/meilisearch.rs @@ -206,7 +206,8 @@ impl<'a> SearchBuilder<'a> { query_builder.with_fetch_timeout(self.timeout); let start = Instant::now(); - let docs = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit)); + let docs = + query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit)); let time_ms = start.elapsed().as_millis() as usize; let mut hits = Vec::with_capacity(self.limit); diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index e5a416cd5..608bc8a67 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -115,7 +115,7 @@ pub fn load_routes(app: &mut tide::Server) { .delete(|ctx| into_response(stop_words::delete(ctx))); app.at("/indexes/:index/stats") - .get(|ctx| into_response(stats::index_stat(ctx))); + .get(|ctx| into_response(stats::index_stats(ctx))); app.at("/keys/") .get(|ctx| into_response(key::list(ctx))) diff --git a/meilisearch-http/src/routes/search.rs b/meilisearch-http/src/routes/search.rs index ba25b8aba..2ed423334 100644 --- a/meilisearch-http/src/routes/search.rs +++ b/meilisearch-http/src/routes/search.rs @@ -64,7 +64,7 @@ pub async fn search_with_url_query(ctx: Request) -> SResult { let attributes_to_crop = schema .displayed_name() .iter() - .map(|attr| ((*attr).to_string(), crop_length)) + .map(|attr| (attr.to_string(), crop_length)) .collect(); search_builder.attributes_to_crop(attributes_to_crop); } else { @@ -81,7 +81,7 @@ pub async fn search_with_url_query(ctx: Request) -> SResult { schema .displayed_name() .iter() - .map(|s| (*s).to_string()) + .map(|s| s.to_string()) .collect() } else { attributes_to_highlight diff --git a/meilisearch-http/src/routes/stats.rs b/meilisearch-http/src/routes/stats.rs index c03dfefc0..a158d16da 100644 --- a/meilisearch-http/src/routes/stats.rs +++ b/meilisearch-http/src/routes/stats.rs @@ -21,7 +21,7 @@ struct IndexStatsResponse { fields_frequency: HashMap, } -pub async fn index_stat(ctx: Request) -> SResult { +pub async fn index_stats(ctx: Request) -> SResult { ctx.is_allowed(Admin)?; let index_uid = ctx.url_param("index")?; let index = ctx.index()?; diff --git a/meilisearch-http/tests/.DS_Store b/meilisearch-http/tests/.DS_Store deleted file mode 100644 index ceb1037ec..000000000 Binary files a/meilisearch-http/tests/.DS_Store and /dev/null differ diff --git a/meilisearch-http/tests/assets/.DS_Store b/meilisearch-http/tests/assets/.DS_Store deleted file mode 100644 index bbb23fefa..000000000 Binary files a/meilisearch-http/tests/assets/.DS_Store and /dev/null differ diff --git a/meilisearch-schema/src/schema.rs b/meilisearch-schema/src/schema.rs index 0181e2e2a..5a3858f0d 100644 --- a/meilisearch-schema/src/schema.rs +++ b/meilisearch-schema/src/schema.rs @@ -1,8 +1,6 @@ -use std::collections::{HashMap, HashSet}; - -use serde::{Serialize, Deserialize}; - use crate::{FieldsMap, FieldId, SResult, Error, IndexedPos}; +use serde::{Serialize, Deserialize}; +use std::collections::{HashMap, HashSet}; #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Schema { @@ -60,11 +58,11 @@ impl Schema { self.fields_map.id(name).is_some() } - pub fn get_or_create_empty(&mut self, name: &str) -> SResult { + pub fn insert(&mut self, name: &str) -> SResult { self.fields_map.insert(name) } - pub fn get_or_create(&mut self, name: &str) -> SResult { + pub fn insert_and_index(&mut self, name: &str) -> SResult { match self.fields_map.id(name) { Some(id) => { Ok(id) @@ -80,24 +78,24 @@ impl Schema { } } - pub fn ranked(&self) -> HashSet { - self.ranked.clone() + pub fn ranked(&self) -> &HashSet { + &self.ranked } pub fn ranked_name(&self) -> HashSet<&str> { self.ranked.iter().filter_map(|a| self.name(*a)).collect() } - pub fn displayed(&self) -> HashSet { - self.displayed.clone() + pub fn displayed(&self) -> &HashSet { + &self.displayed } pub fn displayed_name(&self) -> HashSet<&str> { self.displayed.iter().filter_map(|a| self.name(*a)).collect() } - pub fn indexed(&self) -> Vec { - self.indexed.clone() + pub fn indexed(&self) -> &Vec { + &self.indexed } pub fn indexed_name(&self) -> Vec<&str> { @@ -168,7 +166,7 @@ impl Schema { } pub fn update_ranked>(&mut self, data: impl IntoIterator) -> SResult<()> { - self.ranked = HashSet::new(); + self.ranked.clear(); for name in data { self.set_ranked(name.as_ref())?; } @@ -176,7 +174,7 @@ impl Schema { } pub fn update_displayed>(&mut self, data: impl IntoIterator) -> SResult<()> { - self.displayed = HashSet::new(); + self.displayed.clear(); for name in data { self.set_displayed(name.as_ref())?; } @@ -184,8 +182,8 @@ impl Schema { } pub fn update_indexed>(&mut self, data: Vec) -> SResult<()> { - self.indexed = Vec::new(); - self.indexed_map = HashMap::new(); + self.indexed.clear(); + self.indexed_map.clear(); for name in data { self.set_indexed(name.as_ref())?; } diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index 54202edbc..fac361415 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -28,7 +28,7 @@ pub struct DocIndex { /// The attribute in the document where the word was found /// along with the index in it. - /// Is an IndexedPos and not FieldId. Must be convert each time. + /// This is an IndexedPos and not a FieldId. Must be converted each time. pub attribute: u16, pub word_index: u16,