diff --git a/Cargo.lock b/Cargo.lock index 9cee246dd..0f0d2a914 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1037,6 +1037,7 @@ dependencies = [ "main_error 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "meilisearch-core 0.8.4", "meilisearch-schema 0.8.4", + "mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "pretty-bytes 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1047,7 +1048,7 @@ dependencies = [ "structopt 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "sysinfo 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", - "tide 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "tide 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ureq 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", "vergen 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1870,10 +1871,11 @@ dependencies = [ [[package]] name = "tide" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "async-std 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cookie 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)", "http-service 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2563,7 +2565,7 @@ dependencies = [ "checksum termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "96d6098003bde162e4277c70665bd87c326f5a0c3f3fbfb285787fa482d54e6e" "checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" -"checksum tide 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13c99b1991db81e611a2614cd1b07fec89ae33c5f755e1f8eb70826fb5af0eea" +"checksum tide 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e619c99048ae107912703d0efeec4ff4fbff704f064e51d3eee614b28ea7b739" "checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" "checksum tinytemplate 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4574b75faccaacddb9b284faecdf0b544b80b6b294f3d062d325c5726a209c20" "checksum tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6" diff --git a/datasets/movies/settings.json b/datasets/movies/settings.json index c920e3220..66eeedba6 100644 --- a/datasets/movies/settings.json +++ b/datasets/movies/settings.json @@ -1,7 +1,7 @@ { - "attribute_identifier": "id", - "attributes_searchable": ["title", "overview"], - "attributes_displayed": [ + "identifier": "id", + "searchable_attributes": ["title", "overview"], + "displayed_attributes": [ "id", "title", "overview", diff --git a/meilisearch-core/Cargo.toml b/meilisearch-core/Cargo.toml index 6bd3a4789..54d0efd3a 100644 --- a/meilisearch-core/Cargo.toml +++ b/meilisearch-core/Cargo.toml @@ -32,7 +32,7 @@ serde_json = "1.0.41" siphasher = "0.3.1" slice-group-by = "0.2.6" zerocopy = "0.2.8" -regex = "1" +regex = "1.3.1" [dev-dependencies] assert_matches = "1.3" diff --git a/meilisearch-core/examples/from_file.rs b/meilisearch-core/examples/from_file.rs index 8870b999c..3762c99f0 100644 --- a/meilisearch-core/examples/from_file.rs +++ b/meilisearch-core/examples/from_file.rs @@ -123,7 +123,7 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box Result<(), Box< }; let attr = schema - .get_id(filter) + .id(filter) .expect("Could not find filtered attribute"); builder.with_filter(move |document_id| { @@ -390,7 +390,7 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box< for (name, text) in document.0 { print!("{}: ", name); - let attr = schema.get_id(&name).unwrap(); + let attr = schema.id(&name).unwrap(); let highlights = doc .highlights .iter() @@ -410,7 +410,7 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box< let mut matching_attributes = HashSet::new(); for highlight in doc.highlights { let attr = FieldId::new(highlight.attribute); - let name = schema.get_name(attr); + let name = schema.name(attr); matching_attributes.insert(name); } diff --git a/meilisearch-core/src/bucket_sort.rs b/meilisearch-core/src/bucket_sort.rs index 0fd4d0c58..005df09f0 100644 --- a/meilisearch-core/src/bucket_sort.rs +++ b/meilisearch-core/src/bucket_sort.rs @@ -161,11 +161,13 @@ where debug!("criterion loop took {:.02?}", before_criterion_loop.elapsed()); debug!("proximity evaluation called {} times", proximity_count.load(Ordering::Relaxed)); + let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?; let iter = raw_documents.into_iter().skip(range.start).take(range.len()); - let iter = iter.map(|rd| Document::from_raw(rd, &queries_kinds, &arena, searchable_attrs.as_ref())); + let iter = iter.map(|rd| Document::from_raw(rd, &automatons, &arena, searchable_attrs.as_ref(), &schema)); let documents = iter.collect(); debug!("bucket sort took {:.02?}", before_bucket_sort.elapsed()); + Ok(documents) } @@ -330,7 +332,7 @@ where // once we classified the documents related to the current // automatons we save that as the next valid result let mut seen = BufferedDistinctMap::new(&mut distinct_map); - let schema = main_store.schema(reader)?.unwrap(); + let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?; let mut documents = Vec::with_capacity(range.len()); for raw_document in raw_documents.into_iter().skip(distinct_raw_offset) { diff --git a/meilisearch-core/src/criterion/sort_by_attr.rs b/meilisearch-core/src/criterion/sort_by_attr.rs index 7183c9b49..621acca7a 100644 --- a/meilisearch-core/src/criterion/sort_by_attr.rs +++ b/meilisearch-core/src/criterion/sort_by_attr.rs @@ -68,12 +68,12 @@ impl<'a> SortByAttr<'a> { attr_name: &str, reversed: bool, ) -> Result, SortByAttrError> { - let field_id = match schema.get_id(attr_name) { + let field_id = match schema.id(attr_name) { Some(field_id) => field_id, None => return Err(SortByAttrError::AttributeNotFound), }; - if !schema.id_is_ranked(field_id) { + if !schema.is_ranked(field_id) { return Err(SortByAttrError::AttributeNotRegisteredForRanking); } diff --git a/meilisearch-core/src/database.rs b/meilisearch-core/src/database.rs index c99e6702e..a728c6ebd 100644 --- a/meilisearch-core/src/database.rs +++ b/meilisearch-core/src/database.rs @@ -353,7 +353,6 @@ impl Database { #[cfg(test)] mod tests { - use super::*; use crate::criterion::{self, CriteriaBuilder}; @@ -381,13 +380,13 @@ mod tests { let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name", "description"], - "attributesDisplayed": ["name", "description"] + "identifier": "id", + "searchableAttributes": ["name", "description"], + "displayedAttributes": ["name", "description"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut update_writer = db.update_write_txn().unwrap(); @@ -441,13 +440,13 @@ mod tests { let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name", "description"], - "attributesDisplayed": ["name", "description"] + "identifier": "id", + "searchableAttributes": ["name", "description"], + "displayedAttributes": ["name", "description"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut update_writer = db.update_write_txn().unwrap(); @@ -500,13 +499,13 @@ mod tests { let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name"], - "attributesDisplayed": ["name"] + "identifier": "id", + "searchableAttributes": ["name"], + "displayedAttributes": ["name"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut update_writer = db.update_write_txn().unwrap(); @@ -552,13 +551,13 @@ mod tests { let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name", "description"], - "attributesDisplayed": ["name", "description"] + "identifier": "id", + "searchableAttributes": ["name", "description"], + "displayedAttributes": ["name", "description"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut update_writer = db.update_write_txn().unwrap(); @@ -586,17 +585,16 @@ mod tests { let _update_id = additions.finalize(&mut update_writer).unwrap(); update_writer.commit().unwrap(); - let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name", "description", "age", "sex"], - "attributesDisplayed": ["name", "description", "age", "sex"] + "identifier": "id", + "searchableAttributes": ["name", "description", "age", "sex"], + "displayedAttributes": ["name", "description", "age", "sex"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut writer = db.update_write_txn().unwrap(); @@ -657,13 +655,13 @@ mod tests { let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name", "description", "city", "age", "sex"], - "attributesDisplayed": ["name", "description", "city", "age", "sex"] + "identifier": "id", + "searchableAttributes": ["name", "description", "city", "age", "sex"], + "displayedAttributes": ["name", "description", "city", "age", "sex"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut writer = db.update_write_txn().unwrap(); @@ -696,13 +694,13 @@ mod tests { let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name", "description"], - "attributesDisplayed": ["name", "description"] + "identifier": "id", + "searchableAttributes": ["name", "description"], + "displayedAttributes": ["name", "description"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut writer = db.update_write_txn().unwrap(); @@ -773,13 +771,13 @@ mod tests { let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name", "description"], - "attributesDisplayed": ["name", "description", "id"] + "identifier": "id", + "searchableAttributes": ["name", "description"], + "displayedAttributes": ["name", "description", "id"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut writer = db.update_write_txn().unwrap(); @@ -909,13 +907,13 @@ mod tests { let settings = { let data = r#" { - "attributeIdentifier": "id", - "attributesSearchable": ["name", "description"], - "attributesDisplayed": ["name", "description"] + "identifier": "id", + "searchableAttributes": ["name", "description"], + "displayedAttributes": ["name", "description"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut writer = db.update_write_txn().unwrap(); @@ -982,13 +980,13 @@ mod tests { "_exact", "dsc(release_date)" ], - "attributeIdentifier": "id", - "attributesSearchable": ["name", "release_date"], - "attributesDisplayed": ["name", "release_date"] + "identifier": "id", + "searchableAttributes": ["name", "release_date"], + "displayedAttributes": ["name", "release_date"] } "#; let settings: Settings = serde_json::from_str(data).unwrap(); - settings.into() + settings.into_update().unwrap() }; let mut writer = db.update_write_txn().unwrap(); diff --git a/meilisearch-core/src/error.rs b/meilisearch-core/src/error.rs index 739b8281a..177eecadc 100644 --- a/meilisearch-core/src/error.rs +++ b/meilisearch-core/src/error.rs @@ -8,7 +8,7 @@ pub type MResult = Result; pub enum Error { Io(io::Error), IndexAlreadyExists, - MissingSchemaIdentifier, + MissingIdentifier, SchemaMissing, WordIndexMissing, MissingDocumentId, @@ -83,7 +83,7 @@ impl fmt::Display for Error { match self { Io(e) => write!(f, "{}", e), IndexAlreadyExists => write!(f, "index already exists"), - MissingSchemaIdentifier => write!(f, "schema cannot be build without identifier"), + MissingIdentifier => write!(f, "schema cannot be build without identifier"), SchemaMissing => write!(f, "this index does not have a schema"), WordIndexMissing => write!(f, "this index does not have a word index"), MissingDocumentId => write!(f, "document id is missing"), diff --git a/meilisearch-core/src/fields_map.rs b/meilisearch-core/src/fields_map.rs deleted file mode 100644 index cca52bc46..000000000 --- a/meilisearch-core/src/fields_map.rs +++ /dev/null @@ -1,94 +0,0 @@ -use std::io::{Read, Write}; -use std::collections::HashMap; - -use serde::{Deserialize, Serialize}; -use crate::{MResult, Error}; - - -#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct FieldsMap { - name_map: HashMap, - id_map: HashMap, - next_id: u16 -} - -impl FieldsMap { - pub fn len(&self) -> usize { - self.name_map.len() - } - - pub fn is_empty(&self) -> bool { - self.name_map.is_empty() - } - - pub fn insert(&mut self, name: T) -> MResult { - let name = name.to_string(); - if let Some(id) = self.name_map.get(&name) { - return Ok(*id) - } - let id = self.next_id; - if self.next_id.checked_add(1).is_none() { - return Err(Error::MaxFieldsLimitExceeded) - } else { - self.next_id += 1; - } - self.name_map.insert(name.clone(), id); - self.id_map.insert(id, name); - Ok(id) - } - - pub fn remove(&mut self, name: T) { - let name = name.to_string(); - if let Some(id) = self.name_map.get(&name) { - self.id_map.remove(&id); - } - self.name_map.remove(&name); - } - - pub fn get_id(&self, name: T) -> Option<&u16> { - let name = name.to_string(); - self.name_map.get(&name) - } - - pub fn get_name(&self, id: u16) -> Option<&String> { - self.id_map.get(&id) - } - - pub fn read_from_bin(reader: R) -> bincode::Result { - bincode::deserialize_from(reader) - } - - pub fn write_to_bin(&self, writer: W) -> bincode::Result<()> { - bincode::serialize_into(writer, &self) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn fields_map() { - let mut fields_map = FieldsMap::default(); - - assert_eq!(fields_map.insert("id").unwrap(), 0); - assert_eq!(fields_map.insert("title").unwrap(), 1); - assert_eq!(fields_map.insert("descritpion").unwrap(), 2); - assert_eq!(fields_map.insert("id").unwrap(), 0); - assert_eq!(fields_map.insert("title").unwrap(), 1); - assert_eq!(fields_map.insert("descritpion").unwrap(), 2); - assert_eq!(fields_map.get_id("id"), Some(&0)); - assert_eq!(fields_map.get_id("title"), Some(&1)); - assert_eq!(fields_map.get_id("descritpion"), Some(&2)); - assert_eq!(fields_map.get_id("date"), None); - assert_eq!(fields_map.len(), 3); - assert_eq!(fields_map.get_name(0), Some(&"id".to_owned())); - assert_eq!(fields_map.get_name(1), Some(&"title".to_owned())); - assert_eq!(fields_map.get_name(2), Some(&"descritpion".to_owned())); - assert_eq!(fields_map.get_name(4), None); - fields_map.remove("title"); - assert_eq!(fields_map.get_id("title"), None); - assert_eq!(fields_map.insert("title").unwrap(), 3); - assert_eq!(fields_map.len(), 3); - } -} diff --git a/meilisearch-core/src/lib.rs b/meilisearch-core/src/lib.rs index 2ed0ceeed..ccedb0578 100644 --- a/meilisearch-core/src/lib.rs +++ b/meilisearch-core/src/lib.rs @@ -86,7 +86,7 @@ fn highlights_from_raw_document<'a, 'tag, 'txn>( Some(field_id) => field_id.0, None => { error!("Cannot convert indexed_pos {} to field_id", attribute); - trace!("Schema is compronized; {:?}", schema); + trace!("Schema is compromized; {:?}", schema); continue } }; @@ -164,7 +164,7 @@ impl Document { Some(field_id) => field_id.0, None => { error!("Cannot convert indexed_pos {} to field_id", attribute); - trace!("Schema is compronized; {:?}", schema); + trace!("Schema is compromized; {:?}", schema); continue } }; diff --git a/meilisearch-core/src/ranked_map.rs b/meilisearch-core/src/ranked_map.rs index 964e37375..48858c78c 100644 --- a/meilisearch-core/src/ranked_map.rs +++ b/meilisearch-core/src/ranked_map.rs @@ -19,16 +19,16 @@ impl RankedMap { self.0.is_empty() } - pub fn insert(&mut self, document: DocumentId, attribute: FieldId, number: Number) { - self.0.insert((document, attribute), number); + pub fn insert(&mut self, document: DocumentId, field: FieldId, number: Number) { + self.0.insert((document, field), number); } - pub fn remove(&mut self, document: DocumentId, attribute: FieldId) { - self.0.remove(&(document, attribute)); + pub fn remove(&mut self, document: DocumentId, field: FieldId) { + self.0.remove(&(document, field)); } - pub fn get(&self, document: DocumentId, attribute: FieldId) -> Option { - self.0.get(&(document, attribute)).cloned() + pub fn get(&self, document: DocumentId, field: FieldId) -> Option { + self.0.get(&(document, field)).cloned() } pub fn read_from_bin(reader: R) -> bincode::Result { diff --git a/meilisearch-core/src/raw_indexer.rs b/meilisearch-core/src/raw_indexer.rs index f20088ac0..8ed5966bb 100644 --- a/meilisearch-core/src/raw_indexer.rs +++ b/meilisearch-core/src/raw_indexer.rs @@ -178,7 +178,6 @@ fn token_to_docindex(id: DocumentId, indexed_pos: IndexedPos, token: Token) -> O #[cfg(test)] mod tests { - use super::*; use meilisearch_schema::IndexedPos; diff --git a/meilisearch-core/src/serde/deserializer.rs b/meilisearch-core/src/serde/deserializer.rs index 1ef0b85c8..e5e02a4d6 100644 --- a/meilisearch-core/src/serde/deserializer.rs +++ b/meilisearch-core/src/serde/deserializer.rs @@ -54,7 +54,7 @@ pub struct Deserializer<'a> { pub reader: &'a heed::RoTxn, pub documents_fields: DocumentsFields, pub schema: &'a Schema, - pub attributes: Option<&'a HashSet>, + pub fields: Option<&'a HashSet>, } impl<'de, 'a, 'b> de::Deserializer<'de> for &'b mut Deserializer<'a> { @@ -92,9 +92,9 @@ impl<'de, 'a, 'b> de::Deserializer<'de> for &'b mut Deserializer<'a> { } }; - let is_displayed = self.schema.id_is_displayed(attr); - if is_displayed && self.attributes.map_or(true, |f| f.contains(&attr)) { - if let Some(attribute_name) = self.schema.get_name(attr) { + let is_displayed = self.schema.is_displayed(attr); + if is_displayed && self.fields.map_or(true, |f| f.contains(&attr)) { + if let Some(attribute_name) = self.schema.name(attr) { let cursor = Cursor::new(value.to_owned()); let ioread = SerdeJsonIoRead::new(cursor); let value = Value(SerdeJsonDeserializer::new(ioread)); diff --git a/meilisearch-core/src/serde/extract_document_id.rs b/meilisearch-core/src/serde/extract_document_id.rs index 01427c5da..f8672da19 100644 --- a/meilisearch-core/src/serde/extract_document_id.rs +++ b/meilisearch-core/src/serde/extract_document_id.rs @@ -2,7 +2,7 @@ use std::hash::{Hash, Hasher}; use crate::DocumentId; use serde::{ser, Serialize}; -use serde_json::Value; +use serde_json::{Value, Number}; use siphasher::sip::SipHasher; use super::{ConvertToString, SerializerError}; @@ -18,18 +18,27 @@ where document.serialize(serializer) } +fn validate_number(value: &Number) -> Option { + if value.is_f64() { + return None + } + return Some(value.to_string()) +} + +fn validate_string(value: &String) -> Option { + if value.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') { + Some(value.to_string()) + } else { + None + } +} + pub fn value_to_string(value: &Value) -> Option { match value { Value::Null => None, Value::Bool(_) => None, - Value::Number(value) => Some(value.to_string()), - Value::String(value) => { - if value.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') { - Some(value.to_string()) - } else { - None - } - }, + Value::Number(value) => validate_number(value), + Value::String(value) => validate_string(value), Value::Array(_) => None, Value::Object(_) => None, } diff --git a/meilisearch-core/src/serde/indexer.rs b/meilisearch-core/src/serde/indexer.rs index 67599ef7f..c8b6abeaf 100644 --- a/meilisearch-core/src/serde/indexer.rs +++ b/meilisearch-core/src/serde/indexer.rs @@ -1,4 +1,4 @@ -use meilisearch_schema::{IndexedPos}; +use meilisearch_schema::IndexedPos; use serde::ser; use serde::Serialize; diff --git a/meilisearch-core/src/serde/mod.rs b/meilisearch-core/src/serde/mod.rs index 9a32d7843..f550728fa 100644 --- a/meilisearch-core/src/serde/mod.rs +++ b/meilisearch-core/src/serde/mod.rs @@ -57,7 +57,7 @@ impl fmt::Display for SerializerError { f.write_str("serialized document does not have an id according to the schema") } SerializerError::InvalidDocumentIdType => { - f.write_str("document identifier can only be of type number or string (A-Z, a-z, 0-9, -_)") + f.write_str("documents identifiers can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).") } SerializerError::Zlmdb(e) => write!(f, "heed related error: {}", e), SerializerError::SerdeJson(e) => write!(f, "serde json error: {}", e), diff --git a/meilisearch-core/src/serde/serializer.rs b/meilisearch-core/src/serde/serializer.rs index 2c1d124ca..02c68b7bb 100644 --- a/meilisearch-core/src/serde/serializer.rs +++ b/meilisearch-core/src/serde/serializer.rs @@ -305,7 +305,7 @@ pub fn serialize_value<'a, T: ?Sized>( where T: ser::Serialize, { - let field_id = schema.get_or_create(attribute.clone())?; + let field_id = schema.get_or_create(&attribute)?; serialize_value_with_id( txn, @@ -337,7 +337,7 @@ where let serialized = serde_json::to_vec(value)?; document_store.put_document_field(txn, document_id, field_id, &serialized)?; - if let Some(indexed_pos) = schema.id_is_indexed(field_id) { + if let Some(indexed_pos) = schema.is_indexed(field_id) { let indexer = Indexer { pos: *indexed_pos, indexer, @@ -353,7 +353,7 @@ where } } - if schema.id_is_ranked(field_id) { + if schema.is_ranked(field_id) { let number = value.serialize(ConvertToNumber)?; ranked_map.insert(document_id, field_id, number); } diff --git a/meilisearch-core/src/settings.rs b/meilisearch-core/src/settings.rs index efc5ec493..8895fb876 100644 --- a/meilisearch-core/src/settings.rs +++ b/meilisearch-core/src/settings.rs @@ -1,16 +1,14 @@ -use std::sync::Mutex; use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::str::FromStr; use serde::{Deserialize, Deserializer, Serialize}; use once_cell::sync::Lazy; -static RANKING_RULE_REGEX: Lazy> = Lazy::new(|| { +static RANKING_RULE_REGEX: Lazy = Lazy::new(|| { let regex = regex::Regex::new(r"(asc|dsc)\(([a-zA-Z0-9-_]*)\)").unwrap(); - Mutex::new(regex) + regex }); - #[derive(Default, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct Settings { @@ -19,11 +17,11 @@ pub struct Settings { #[serde(default, deserialize_with = "deserialize_some")] pub ranking_distinct: Option>, #[serde(default, deserialize_with = "deserialize_some")] - pub attribute_identifier: Option>, + pub identifier: Option>, #[serde(default, deserialize_with = "deserialize_some")] - pub attributes_searchable: Option>>, + pub searchable_attributes: Option>>, #[serde(default, deserialize_with = "deserialize_some")] - pub attributes_displayed: Option>>, + pub displayed_attributes: Option>>, #[serde(default, deserialize_with = "deserialize_some")] pub stop_words: Option>>, #[serde(default, deserialize_with = "deserialize_some")] @@ -40,34 +38,32 @@ fn deserialize_some<'de, T, D>(deserializer: D) -> Result, D::Error> Deserialize::deserialize(deserializer).map(Some) } -impl Into for Settings { - fn into(self) -> SettingsUpdate { +impl Settings { + pub fn into_update(&self) -> Result { let settings = self.clone(); let ranking_rules = match settings.ranking_rules { - Some(Some(rules)) => UpdateState::Update(RankingRule::from_vec(rules)), + Some(Some(rules)) => UpdateState::Update(RankingRule::from_vec(rules.iter().map(|m| m.as_ref()).collect())?), Some(None) => UpdateState::Clear, None => UpdateState::Nothing, }; - SettingsUpdate { + Ok(SettingsUpdate { ranking_rules: ranking_rules, ranking_distinct: settings.ranking_distinct.into(), - attribute_identifier: settings.attribute_identifier.into(), - attributes_searchable: settings.attributes_searchable.into(), - attributes_displayed: settings.attributes_displayed.into(), + identifier: settings.identifier.into(), + searchable_attributes: settings.searchable_attributes.into(), + displayed_attributes: settings.displayed_attributes.into(), stop_words: settings.stop_words.into(), synonyms: settings.synonyms.into(), index_new_fields: settings.index_new_fields.into(), - } + }) } } #[derive(Debug, Clone, Serialize, Deserialize)] pub enum UpdateState { Update(T), - Add(T), - Delete(T), Clear, Nothing, } @@ -82,15 +78,6 @@ impl From>> for UpdateState { } } -impl UpdateState { - pub fn is_changed(&self) -> bool { - match self { - UpdateState::Nothing => false, - _ => true, - } - } -} - #[derive(Debug, Clone)] pub struct RankingRuleConversionError; @@ -139,10 +126,10 @@ impl FromStr for RankingRule { "_words_position" => RankingRule::WordsPosition, "_exact" => RankingRule::Exact, _ => { - let captures = RANKING_RULE_REGEX.lock().unwrap().captures(s).unwrap(); - match captures[1].as_ref() { - "asc" => RankingRule::Asc(captures[2].to_string()), - "dsc" => RankingRule::Dsc(captures[2].to_string()), + let captures = RANKING_RULE_REGEX.captures(s).ok_or(RankingRuleConversionError)?; + match (captures.get(1).map(|m| m.as_str()), captures.get(2)) { + (Some("asc"), Some(field)) => RankingRule::Asc(field.as_str().to_string()), + (Some("dsc"), Some(field)) => RankingRule::Dsc(field.as_str().to_string()), _ => return Err(RankingRuleConversionError) } } @@ -152,17 +139,16 @@ impl FromStr for RankingRule { } impl RankingRule { - pub fn get_field(&self) -> Option { + pub fn get_field(&self) -> Option<&str> { match self { - RankingRule::Asc(field) | RankingRule::Dsc(field) => Some((*field).clone()), + RankingRule::Asc(field) | RankingRule::Dsc(field) => Some(field), _ => None, } } - pub fn from_vec(rules: Vec) -> Vec { + pub fn from_vec(rules: Vec<&str>) -> Result, RankingRuleConversionError> { rules.iter() - .map(|s| RankingRule::from_str(s.as_str())) - .filter_map(Result::ok) + .map(|s| RankingRule::from_str(s)) .collect() } } @@ -171,9 +157,9 @@ impl RankingRule { pub struct SettingsUpdate { pub ranking_rules: UpdateState>, pub ranking_distinct: UpdateState, - pub attribute_identifier: UpdateState, - pub attributes_searchable: UpdateState>, - pub attributes_displayed: UpdateState>, + pub identifier: UpdateState, + pub searchable_attributes: UpdateState>, + pub displayed_attributes: UpdateState>, pub stop_words: UpdateState>, pub synonyms: UpdateState>>, pub index_new_fields: UpdateState, @@ -184,9 +170,9 @@ impl Default for SettingsUpdate { Self { ranking_rules: UpdateState::Nothing, ranking_distinct: UpdateState::Nothing, - attribute_identifier: UpdateState::Nothing, - attributes_searchable: UpdateState::Nothing, - attributes_displayed: UpdateState::Nothing, + identifier: UpdateState::Nothing, + searchable_attributes: UpdateState::Nothing, + displayed_attributes: UpdateState::Nothing, stop_words: UpdateState::Nothing, synonyms: UpdateState::Nothing, index_new_fields: UpdateState::Nothing, diff --git a/meilisearch-core/src/store/documents_fields.rs b/meilisearch-core/src/store/documents_fields.rs index 05c2b7089..1dcad8488 100644 --- a/meilisearch-core/src/store/documents_fields.rs +++ b/meilisearch-core/src/store/documents_fields.rs @@ -16,10 +16,10 @@ impl DocumentsFields { self, writer: &mut heed::RwTxn, document_id: DocumentId, - attribute: FieldId, + field: FieldId, value: &[u8], ) -> ZResult<()> { - let key = DocumentFieldStoredKey::new(document_id, attribute); + let key = DocumentFieldStoredKey::new(document_id, field); self.documents_fields.put(writer, &key, value) } @@ -41,9 +41,9 @@ impl DocumentsFields { self, reader: &'txn heed::RoTxn, document_id: DocumentId, - attribute: FieldId, + field: FieldId, ) -> ZResult> { - let key = DocumentFieldStoredKey::new(document_id, attribute); + let key = DocumentFieldStoredKey::new(document_id, field); self.documents_fields.get(reader, &key) } diff --git a/meilisearch-core/src/store/main.rs b/meilisearch-core/src/store/main.rs index 573686bbc..1aa3d5d2a 100644 --- a/meilisearch-core/src/store/main.rs +++ b/meilisearch-core/src/store/main.rs @@ -11,11 +11,11 @@ use crate::RankedMap; use crate::settings::RankingRule; const CREATED_AT_KEY: &str = "created-at"; -const RANKING_RULES_KEY: &str = "ranking-rules-key"; -const RANKING_DISTINCT_KEY: &str = "ranking-distinct-key"; -const STOP_WORDS_KEY: &str = "stop-words-key"; -const SYNONYMS_KEY: &str = "synonyms-key"; -const CUSTOMS_KEY: &str = "customs-key"; +const RANKING_RULES_KEY: &str = "ranking-rules"; +const RANKING_DISTINCT_KEY: &str = "ranking-distinct"; +const STOP_WORDS_KEY: &str = "stop-words"; +const SYNONYMS_KEY: &str = "synonyms"; +const CUSTOMS_KEY: &str = "customs"; const FIELDS_FREQUENCY_KEY: &str = "fields-frequency"; const NAME_KEY: &str = "name"; const NUMBER_OF_DOCUMENTS_KEY: &str = "number-of-documents"; @@ -188,7 +188,7 @@ impl Main { } } - pub fn ranking_rules<'txn>(&self, reader: &'txn heed::RoTxn) -> ZResult>> { + pub fn ranking_rules(&self, reader: &heed::RoTxn) -> ZResult>> { self.main.get::<_, Str, SerdeBincode>>(reader, RANKING_RULES_KEY) } @@ -200,7 +200,7 @@ impl Main { self.main.delete::<_, Str>(writer, RANKING_RULES_KEY) } - pub fn ranking_distinct<'txn>(&self, reader: &'txn heed::RoTxn) -> ZResult> { + pub fn ranking_distinct(&self, reader: &heed::RoTxn) -> ZResult> { self.main.get::<_, Str, SerdeBincode>(reader, RANKING_DISTINCT_KEY) } diff --git a/meilisearch-core/src/store/mod.rs b/meilisearch-core/src/store/mod.rs index cbc5ecb44..f451ba929 100644 --- a/meilisearch-core/src/store/mod.rs +++ b/meilisearch-core/src/store/mod.rs @@ -223,7 +223,7 @@ impl Index { let schema = schema.ok_or(Error::SchemaMissing)?; let attributes = match attributes { - Some(attributes) => Some(attributes.iter().filter_map(|name| schema.get_id(*name)).collect()), + Some(attributes) => Some(attributes.iter().filter_map(|name| schema.id(*name)).collect()), None => None, }; @@ -232,7 +232,7 @@ impl Index { reader, documents_fields: self.documents_fields, schema: &schema, - attributes: attributes.as_ref(), + fields: attributes.as_ref(), }; Ok(Option::::deserialize(&mut deserializer)?) diff --git a/meilisearch-core/src/update/documents_addition.rs b/meilisearch-core/src/update/documents_addition.rs index 2844839c2..0d1cb480b 100644 --- a/meilisearch-core/src/update/documents_addition.rs +++ b/meilisearch-core/src/update/documents_addition.rs @@ -158,8 +158,6 @@ pub fn apply_documents_addition<'a, 'b>( document.serialize(serializer)?; } - - write_documents_addition_index( writer, index, @@ -199,7 +197,7 @@ pub fn apply_documents_partial_addition<'a, 'b>( reader: writer, documents_fields: index.documents_fields, schema: &schema, - attributes: None, + fields: None, }; // retrieve the old document and @@ -246,8 +244,6 @@ pub fn apply_documents_partial_addition<'a, 'b>( document.serialize(serializer)?; } - - write_documents_addition_index( writer, index, diff --git a/meilisearch-core/src/update/documents_deletion.rs b/meilisearch-core/src/update/documents_deletion.rs index 5627ee0fd..9152c4e5b 100644 --- a/meilisearch-core/src/update/documents_deletion.rs +++ b/meilisearch-core/src/update/documents_deletion.rs @@ -101,12 +101,12 @@ pub fn apply_documents_deletion( }; // collect the ranked attributes according to the schema - let ranked_attrs = schema.get_ranked(); + let ranked_fields = schema.ranked(); let mut words_document_ids = HashMap::new(); for id in idset { // remove all the ranked attributes from the ranked_map - for ranked_attr in &ranked_attrs { + for ranked_attr in &ranked_fields { ranked_map.remove(id, *ranked_attr); } diff --git a/meilisearch-core/src/update/mod.rs b/meilisearch-core/src/update/mod.rs index 7e4c9500a..f13429917 100644 --- a/meilisearch-core/src/update/mod.rs +++ b/meilisearch-core/src/update/mod.rs @@ -4,7 +4,6 @@ mod documents_addition; mod documents_deletion; mod settings_update; - pub use self::clear_all::{apply_clear_all, push_clear_all}; pub use self::customs_update::{apply_customs_update, push_customs_update}; pub use self::documents_addition::{ diff --git a/meilisearch-core/src/update/settings_update.rs b/meilisearch-core/src/update/settings_update.rs index 2c57ae02b..43c17a9bd 100644 --- a/meilisearch-core/src/update/settings_update.rs +++ b/meilisearch-core/src/update/settings_update.rs @@ -35,27 +35,27 @@ pub fn apply_settings_update( let mut schema = match index.main.schema(writer)? { Some(schema) => schema, None => { - match settings.attribute_identifier.clone() { - UpdateState::Update(id) => Schema::with_identifier(id), - _ => return Err(Error::MissingSchemaIdentifier) + match settings.identifier.clone() { + UpdateState::Update(id) => Schema::with_identifier(&id), + _ => return Err(Error::MissingIdentifier) } } }; match settings.ranking_rules { UpdateState::Update(v) => { - let ranked_field: Vec = v.iter().filter_map(RankingRule::get_field).collect(); + let ranked_field: Vec<&str> = v.iter().filter_map(RankingRule::get_field).collect(); schema.update_ranked(ranked_field)?; index.main.put_ranking_rules(writer, v)?; must_reindex = true; }, UpdateState::Clear => { - let clear: Vec = Vec::new(); + let clear: Vec<&str> = Vec::new(); schema.update_ranked(clear)?; index.main.delete_ranking_rules(writer)?; must_reindex = true; }, - _ => (), + UpdateState::Nothing => (), } match settings.ranking_distinct { @@ -65,65 +65,43 @@ pub fn apply_settings_update( UpdateState::Clear => { index.main.delete_ranking_distinct(writer)?; }, - _ => (), + UpdateState::Nothing => (), } match settings.index_new_fields { UpdateState::Update(v) => { - schema.set_must_index_new_fields(v); + schema.set_index_new_fields(v); }, UpdateState::Clear => { - schema.set_must_index_new_fields(true); + schema.set_index_new_fields(true); }, - _ => (), + UpdateState::Nothing => (), } - match settings.attributes_searchable.clone() { + match settings.searchable_attributes.clone() { UpdateState::Update(v) => { schema.update_indexed(v)?; must_reindex = true; }, UpdateState::Clear => { - let clear: Vec = Vec::new(); + let clear: Vec<&str> = Vec::new(); schema.update_indexed(clear)?; must_reindex = true; }, UpdateState::Nothing => (), - UpdateState::Add(attrs) => { - for attr in attrs { - schema.set_indexed(attr)?; - } - must_reindex = true; - }, - UpdateState::Delete(attrs) => { - for attr in attrs { - schema.remove_indexed(attr); - } - must_reindex = true; - } }; - match settings.attributes_displayed.clone() { + match settings.displayed_attributes.clone() { UpdateState::Update(v) => schema.update_displayed(v)?, UpdateState::Clear => { - let clear: Vec = Vec::new(); + let clear: Vec<&str> = Vec::new(); schema.update_displayed(clear)?; }, UpdateState::Nothing => (), - UpdateState::Add(attrs) => { - for attr in attrs { - schema.set_displayed(attr)?; - } - }, - UpdateState::Delete(attrs) => { - for attr in attrs { - schema.remove_displayed(attr); - } - } }; - match settings.attribute_identifier.clone() { + match settings.identifier.clone() { UpdateState::Update(v) => { - schema.set_identifier(v)?; + schema.set_identifier(v.as_ref())?; index.main.put_schema(writer, &schema)?; must_reindex = true; }, @@ -168,7 +146,7 @@ pub fn apply_settings_update( docs_words_store, )?; } - if let UpdateState::Clear = settings.attribute_identifier { + if let UpdateState::Clear = settings.identifier { index.main.delete_schema(writer)?; } Ok(()) @@ -189,8 +167,8 @@ pub fn apply_stop_words_update( .stream() .into_strs().unwrap().into_iter().collect(); - let deletion: BTreeSet = old_stop_words.clone().difference(&stop_words).cloned().collect(); - let addition: BTreeSet = stop_words.clone().difference(&old_stop_words).cloned().collect(); + let deletion: BTreeSet = old_stop_words.difference(&stop_words).cloned().collect(); + let addition: BTreeSet = stop_words.difference(&old_stop_words).cloned().collect(); if !addition.is_empty() { apply_stop_words_addition( @@ -201,11 +179,12 @@ pub fn apply_stop_words_update( } if !deletion.is_empty() { - must_reindex = apply_stop_words_deletion( + apply_stop_words_deletion( writer, index, deletion )?; + must_reindex = true; } Ok(must_reindex) @@ -275,7 +254,7 @@ fn apply_stop_words_deletion( writer: &mut heed::RwTxn, index: &store::Index, deletion: BTreeSet, -) -> MResult { +) -> MResult<()> { let main_store = index.main; @@ -306,17 +285,7 @@ fn apply_stop_words_deletion( .and_then(fst::Set::from_bytes) .unwrap(); - main_store.put_stop_words_fst(writer, &stop_words_fst)?; - - // now that we have setup the stop words - // lets reindex everything... - if let Ok(number) = main_store.number_of_documents(writer) { - if number > 0 { - return Ok(true) - } - } - - Ok(false) + Ok(main_store.put_stop_words_fst(writer, &stop_words_fst)?) } pub fn apply_synonyms_update( diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 47472c8e6..ee322b43c 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -14,7 +14,7 @@ name = "meilisearch" path = "src/main.rs" [dependencies] -async-std = { version = "1.0.1", features = ["unstable", "attributes"] } +async-std = { version = "1.0.1", features = ["attributes"] } bincode = "1.2.0" chrono = { version = "0.4.9", features = ["serde"] } crossbeam-channel = "0.4.0" @@ -35,12 +35,13 @@ serde_qs = "0.5.1" siphasher = "0.3.1" structopt = "0.3.3" sysinfo = "0.9.5" -tide = "0.5.1" +tide = "0.6.0" ureq = { version = "0.11.2", features = ["tls"], default-features = false } walkdir = "2.2.9" whoami = "0.6" http-service = "0.4.0" futures = "0.3.1" +mime = "0.3.16" [dev-dependencies] http-service-mock = "0.4.0" diff --git a/meilisearch-http/src/cors.rs b/meilisearch-http/src/cors.rs deleted file mode 100644 index 0f48e4310..000000000 --- a/meilisearch-http/src/cors.rs +++ /dev/null @@ -1,424 +0,0 @@ -//! Cors middleware - -use futures::future::BoxFuture; -use http::header::HeaderValue; -use http::{header, Method, StatusCode}; -use http_service::Body; - -use tide::middleware::{Middleware, Next}; -use tide::{Request, Response}; - -/// Middleware for CORS -/// -/// # Example -/// -/// ```no_run -/// use http::header::HeaderValue; -/// use tide::middleware::{Cors, Origin}; -/// -/// Cors::new() -/// .allow_methods(HeaderValue::from_static("GET, POST, OPTIONS")) -/// .allow_origin(Origin::from("*")) -/// .allow_credentials(false); -/// ``` -#[derive(Clone, Debug, Hash)] -pub struct Cors { - allow_credentials: Option, - allow_headers: HeaderValue, - allow_methods: HeaderValue, - allow_origin: Origin, - expose_headers: Option, - max_age: HeaderValue, -} - -pub const DEFAULT_MAX_AGE: &str = "86400"; -pub const DEFAULT_METHODS: &str = "GET, POST, OPTIONS"; -pub const WILDCARD: &str = "*"; - -impl Cors { - /// Creates a new Cors middleware. - pub fn new() -> Self { - Self { - allow_credentials: None, - allow_headers: HeaderValue::from_static(WILDCARD), - allow_methods: HeaderValue::from_static(DEFAULT_METHODS), - allow_origin: Origin::Any, - expose_headers: None, - max_age: HeaderValue::from_static(DEFAULT_MAX_AGE), - } - } - - /// Set allow_credentials and return new Cors - pub fn allow_credentials(mut self, allow_credentials: bool) -> Self { - self.allow_credentials = match HeaderValue::from_str(&allow_credentials.to_string()) { - Ok(header) => Some(header), - Err(_) => None, - }; - self - } - - /// Set allow_headers and return new Cors - pub fn allow_headers>(mut self, headers: T) -> Self { - self.allow_headers = headers.into(); - self - } - - /// Set max_age and return new Cors - pub fn max_age>(mut self, max_age: T) -> Self { - self.max_age = max_age.into(); - self - } - - /// Set allow_methods and return new Cors - pub fn allow_methods>(mut self, methods: T) -> Self { - self.allow_methods = methods.into(); - self - } - - /// Set allow_origin and return new Cors - pub fn allow_origin>(mut self, origin: T) -> Self { - self.allow_origin = origin.into(); - self - } - - /// Set expose_headers and return new Cors - pub fn expose_headers>(mut self, headers: T) -> Self { - self.expose_headers = Some(headers.into()); - self - } - - fn build_preflight_response(&self, origin: &HeaderValue) -> http::response::Response { - let mut response = http::Response::builder() - .status(StatusCode::OK) - .header::<_, HeaderValue>(header::ACCESS_CONTROL_ALLOW_ORIGIN, origin.clone()) - .header( - header::ACCESS_CONTROL_ALLOW_METHODS, - self.allow_methods.clone(), - ) - .header( - header::ACCESS_CONTROL_ALLOW_HEADERS, - self.allow_headers.clone(), - ) - .header(header::ACCESS_CONTROL_MAX_AGE, self.max_age.clone()) - .body(Body::empty()) - .unwrap(); - - if let Some(allow_credentials) = self.allow_credentials.clone() { - response - .headers_mut() - .append(header::ACCESS_CONTROL_ALLOW_CREDENTIALS, allow_credentials); - } - - if let Some(expose_headers) = self.expose_headers.clone() { - response - .headers_mut() - .append(header::ACCESS_CONTROL_EXPOSE_HEADERS, expose_headers); - } - - response - } - - /// Look at origin of request and determine allow_origin - fn response_origin>(&self, origin: T) -> Option { - let origin = origin.into(); - if !self.is_valid_origin(origin.clone()) { - return None; - } - - match self.allow_origin { - Origin::Any => Some(HeaderValue::from_static(WILDCARD)), - _ => Some(origin), - } - } - - /// Determine if origin is appropriate - fn is_valid_origin>(&self, origin: T) -> bool { - let origin = match origin.into().to_str() { - Ok(s) => s.to_string(), - Err(_) => return false, - }; - - match &self.allow_origin { - Origin::Any => true, - Origin::Exact(s) => s == &origin, - Origin::List(list) => list.contains(&origin), - } - } -} - -impl Middleware for Cors { - fn handle<'a>(&'a self, req: Request, next: Next<'a, State>) -> BoxFuture<'a, Response> { - Box::pin(async move { - let origin = req - .headers() - .get(header::ORIGIN) - .cloned() - .unwrap_or_else(|| HeaderValue::from_static("")); - - if !self.is_valid_origin(&origin) { - return http::Response::builder() - .status(StatusCode::UNAUTHORIZED) - .body(Body::empty()) - .unwrap() - .into(); - } - - // Return results immediately upon preflight request - if req.method() == Method::OPTIONS { - return self.build_preflight_response(&origin).into(); - } - - let mut response: http_service::Response = next.run(req).await.into(); - let headers = response.headers_mut(); - - headers.append( - header::ACCESS_CONTROL_ALLOW_ORIGIN, - self.response_origin(origin).unwrap(), - ); - - if let Some(allow_credentials) = self.allow_credentials.clone() { - headers.append(header::ACCESS_CONTROL_ALLOW_CREDENTIALS, allow_credentials); - } - - if let Some(expose_headers) = self.expose_headers.clone() { - headers.append(header::ACCESS_CONTROL_EXPOSE_HEADERS, expose_headers); - } - response.into() - }) - } -} - -impl Default for Cors { - fn default() -> Self { - Self::new() - } -} - -/// allow_origin enum -#[derive(Clone, Debug, Hash, PartialEq)] -pub enum Origin { - /// Wildcard. Accept all origin requests - Any, - /// Set a single allow_origin target - Exact(String), - /// Set multiple allow_origin targets - List(Vec), -} - -impl From for Origin { - fn from(s: String) -> Self { - if s == "*" { - return Origin::Any; - } - Origin::Exact(s) - } -} - -impl From<&str> for Origin { - fn from(s: &str) -> Self { - Origin::from(s.to_string()) - } -} - -impl From> for Origin { - fn from(list: Vec) -> Self { - if list.len() == 1 { - return Self::from(list[0].clone()); - } - - Origin::List(list) - } -} - -impl From> for Origin { - fn from(list: Vec<&str>) -> Self { - Origin::from(list.iter().map(|s| s.to_string()).collect::>()) - } -} - -#[cfg(test)] -mod test { - use super::*; - use http::header::HeaderValue; - use http_service::Body; - use http_service_mock::make_server; - - const ALLOW_ORIGIN: &str = "example.com"; - const ALLOW_METHODS: &str = "GET, POST, OPTIONS, DELETE"; - const EXPOSE_HEADER: &str = "X-My-Custom-Header"; - - const ENDPOINT: &str = "/cors"; - - fn app() -> tide::Server<()> { - let mut app = tide::Server::new(); - app.at(ENDPOINT).get(|_| async move { "Hello World" }); - - app - } - - fn request() -> http::Request { - http::Request::get(ENDPOINT) - .header(http::header::ORIGIN, ALLOW_ORIGIN) - .method(http::method::Method::GET) - .body(Body::empty()) - .unwrap() - } - - #[test] - fn preflight_request() { - let mut app = app(); - app.middleware( - Cors::new() - .allow_origin(Origin::from(ALLOW_ORIGIN)) - .allow_methods(HeaderValue::from_static(ALLOW_METHODS)) - .expose_headers(HeaderValue::from_static(EXPOSE_HEADER)) - .allow_credentials(true), - ); - - let mut server = make_server(app.into_http_service()).unwrap(); - - let req = http::Request::get(ENDPOINT) - .header(http::header::ORIGIN, ALLOW_ORIGIN) - .method(http::method::Method::OPTIONS) - .body(Body::empty()) - .unwrap(); - - let res = server.simulate(req).unwrap(); - - assert_eq!(res.status(), 200); - - assert_eq!( - res.headers().get("access-control-allow-origin").unwrap(), - ALLOW_ORIGIN - ); - assert_eq!( - res.headers().get("access-control-allow-methods").unwrap(), - ALLOW_METHODS - ); - assert_eq!( - res.headers().get("access-control-allow-headers").unwrap(), - WILDCARD - ); - assert_eq!( - res.headers().get("access-control-max-age").unwrap(), - DEFAULT_MAX_AGE - ); - - assert_eq!( - res.headers() - .get("access-control-allow-credentials") - .unwrap(), - "true" - ); - } - #[test] - fn default_cors_middleware() { - let mut app = app(); - app.middleware(Cors::new()); - - let mut server = make_server(app.into_http_service()).unwrap(); - let res = server.simulate(request()).unwrap(); - - assert_eq!(res.status(), 200); - - assert_eq!( - res.headers().get("access-control-allow-origin").unwrap(), - "*" - ); - } - - #[test] - fn custom_cors_middleware() { - let mut app = app(); - app.middleware( - Cors::new() - .allow_origin(Origin::from(ALLOW_ORIGIN)) - .allow_credentials(false) - .allow_methods(HeaderValue::from_static(ALLOW_METHODS)) - .expose_headers(HeaderValue::from_static(EXPOSE_HEADER)), - ); - - let mut server = make_server(app.into_http_service()).unwrap(); - let res = server.simulate(request()).unwrap(); - - assert_eq!(res.status(), 200); - assert_eq!( - res.headers().get("access-control-allow-origin").unwrap(), - ALLOW_ORIGIN - ); - } - - #[test] - fn credentials_true() { - let mut app = app(); - app.middleware(Cors::new().allow_credentials(true)); - - let mut server = make_server(app.into_http_service()).unwrap(); - let res = server.simulate(request()).unwrap(); - - assert_eq!(res.status(), 200); - assert_eq!( - res.headers() - .get("access-control-allow-credentials") - .unwrap(), - "true" - ); - } - - #[test] - fn set_allow_origin_list() { - let mut app = app(); - let origins = vec![ALLOW_ORIGIN, "foo.com", "bar.com"]; - app.middleware(Cors::new().allow_origin(origins.clone())); - let mut server = make_server(app.into_http_service()).unwrap(); - - for origin in origins { - let request = http::Request::get(ENDPOINT) - .header(http::header::ORIGIN, origin) - .method(http::method::Method::GET) - .body(Body::empty()) - .unwrap(); - - let res = server.simulate(request).unwrap(); - - assert_eq!(res.status(), 200); - assert_eq!( - res.headers().get("access-control-allow-origin").unwrap(), - origin - ); - } - } - - #[test] - fn not_set_origin_header() { - let mut app = app(); - app.middleware(Cors::new()); - - let request = http::Request::get(ENDPOINT) - .method(http::method::Method::GET) - .body(Body::empty()) - .unwrap(); - - let mut server = make_server(app.into_http_service()).unwrap(); - let res = server.simulate(request).unwrap(); - - assert_eq!(res.status(), 200); - } - - #[test] - fn unauthorized_origin() { - let mut app = app(); - app.middleware(Cors::new().allow_origin(ALLOW_ORIGIN)); - - let request = http::Request::get(ENDPOINT) - .header(http::header::ORIGIN, "unauthorize-origin.net") - .method(http::method::Method::GET) - .body(Body::empty()) - .unwrap(); - - let mut server = make_server(app.into_http_service()).unwrap(); - let res = server.simulate(request).unwrap(); - - assert_eq!(res.status(), 401); - } -} diff --git a/meilisearch-http/src/data.rs b/meilisearch-http/src/data.rs index b80d338f0..dd915e882 100644 --- a/meilisearch-http/src/data.rs +++ b/meilisearch-http/src/data.rs @@ -92,7 +92,7 @@ impl DataInner { // convert attributes to their names let frequency: HashMap<_, _> = fields_frequency .into_iter() - .map(|(a, c)| (schema.get_name(a).unwrap(), c)) + .map(|(a, c)| (schema.name(a).unwrap().to_string(), c)) .collect(); index diff --git a/meilisearch-http/src/error.rs b/meilisearch-http/src/error.rs index aae988816..3c1b7d706 100644 --- a/meilisearch-http/src/error.rs +++ b/meilisearch-http/src/error.rs @@ -127,6 +127,12 @@ fn error(message: String, status: StatusCode) -> Response { .unwrap() } +impl From for ResponseError { + fn from(err: serde_json::Error) -> ResponseError { + ResponseError::internal(err) + } +} + impl From for ResponseError { fn from(err: meilisearch_core::Error) -> ResponseError { ResponseError::internal(err) @@ -151,11 +157,16 @@ impl From for ResponseError { } } +impl From for ResponseError { + fn from(err: meilisearch_core::settings::RankingRuleConversionError) -> ResponseError { + ResponseError::internal(err) + } +} + pub trait IntoInternalError { fn into_internal_error(self) -> SResult; } -/// Must be used only impl IntoInternalError for Option { fn into_internal_error(self) -> SResult { match self { diff --git a/meilisearch-http/src/helpers/meilisearch.rs b/meilisearch-http/src/helpers/meilisearch.rs index 563823423..db1ea3709 100644 --- a/meilisearch-http/src/helpers/meilisearch.rs +++ b/meilisearch-http/src/helpers/meilisearch.rs @@ -63,6 +63,12 @@ impl From for Error { } } +impl From for Error { + fn from(error: heed::Error) -> Self { + Error::Internal(error.to_string()) + } +} + pub trait IndexSearchExt { fn new_search(&self, query: String) -> SearchBuilder; } @@ -171,7 +177,7 @@ impl<'a> SearchBuilder<'a> { let ref_index = &self.index; let value = value.trim().to_lowercase(); - let attr = match schema.get_id(attr) { + let attr = match schema.id(attr) { Some(attr) => attr, None => return Err(Error::UnknownFilteredAttribute), }; @@ -271,7 +277,7 @@ impl<'a> SearchBuilder<'a> { ranked_map: &'a RankedMap, schema: &Schema, ) -> Result>, Error> { - let ranking_rules = self.index.main.ranking_rules(reader).unwrap(); + let ranking_rules = self.index.main.ranking_rules(reader)?; if let Some(ranking_rules) = ranking_rules { let mut builder = CriteriaBuilder::with_capacity(7 + ranking_rules.len()); @@ -283,10 +289,18 @@ impl<'a> SearchBuilder<'a> { RankingRule::Attribute => builder.push(Attribute), RankingRule::WordsPosition => builder.push(WordsPosition), RankingRule::Exact => builder.push(Exact), - RankingRule::Asc(field) => builder - .push(SortByAttr::lower_is_better(&ranked_map, &schema, &field).unwrap()), - RankingRule::Dsc(field) => builder - .push(SortByAttr::higher_is_better(&ranked_map, &schema, &field).unwrap()), + RankingRule::Asc(field) => { + match SortByAttr::lower_is_better(&ranked_map, &schema, &field) { + Ok(rule) => builder.push(rule), + Err(err) => error!("Error during criteria builder; {:?}", err), + } + } + RankingRule::Dsc(field) => { + match SortByAttr::higher_is_better(&ranked_map, &schema, &field) { + Ok(rule) => builder.push(rule), + Err(err) => error!("Error during criteria builder; {:?}", err), + } + } }; } builder.push(DocumentId); @@ -334,8 +348,6 @@ pub struct SearchResult { pub limit: usize, pub processing_time_ms: usize, pub query: String, - // pub parsed_query: String, - // pub params: Option, } fn crop_text( @@ -369,7 +381,7 @@ fn crop_document( matches.sort_unstable_by_key(|m| (m.char_index, m.char_length)); for (field, length) in fields { - let attribute = match schema.get_id(field) { + let attribute = match schema.id(field) { Some(attribute) => attribute, None => continue, }; @@ -398,16 +410,16 @@ fn calculate_matches( ) -> MatchesInfos { let mut matches_result: HashMap> = HashMap::new(); for m in matches.iter() { - if let Some(attribute) = schema.get_name(FieldId::new(m.attribute)) { + if let Some(attribute) = schema.name(FieldId::new(m.attribute)) { if let Some(attributes_to_retrieve) = attributes_to_retrieve.clone() { - if !attributes_to_retrieve.contains(attribute.as_str()) { + if !attributes_to_retrieve.contains(attribute) { continue; } }; - if !schema.get_displayed_name().contains(attribute.as_str()) { + if !schema.displayed_name().contains(attribute) { continue; } - if let Some(pos) = matches_result.get_mut(&attribute) { + if let Some(pos) = matches_result.get_mut(attribute) { pos.push(MatchPosition { start: m.char_index as usize, length: m.char_length as usize, @@ -418,7 +430,7 @@ fn calculate_matches( start: m.char_index as usize, length: m.char_length as usize, }); - matches_result.insert(attribute, positions); + matches_result.insert(attribute.to_string(), positions); } } } diff --git a/meilisearch-http/src/helpers/tide.rs b/meilisearch-http/src/helpers/tide.rs index 132396a5c..3091bb3b3 100644 --- a/meilisearch-http/src/helpers/tide.rs +++ b/meilisearch-http/src/helpers/tide.rs @@ -88,7 +88,7 @@ impl RequestExt for Request { fn url_param(&self, name: &str) -> SResult { let param = self .param::(name) - .map_err(|_| ResponseError::bad_parameter("identifier", ""))?; + .map_err(|_| ResponseError::bad_parameter("identifier", name))?; Ok(param) } @@ -105,7 +105,7 @@ impl RequestExt for Request { fn identifier(&self) -> SResult { let name = self .param::("identifier") - .map_err(|_| ResponseError::bad_parameter("identifier", ""))?; + .map_err(|_| ResponseError::bad_parameter("identifier", "identifier"))?; Ok(name) } diff --git a/meilisearch-http/src/main.rs b/meilisearch-http/src/main.rs index 0517c3875..9fc5d4c8e 100644 --- a/meilisearch-http/src/main.rs +++ b/meilisearch-http/src/main.rs @@ -5,17 +5,14 @@ use async_std::task; use log::info; use main_error::MainError; use structopt::StructOpt; -use tide::middleware::RequestLogger; +use tide::middleware::{Cors, RequestLogger}; use meilisearch_http::data::Data; use meilisearch_http::option::Opt; use meilisearch_http::routes; use meilisearch_http::routes::index::index_update_callback; -use cors::Cors; - mod analytics; -mod cors; #[cfg(target_os = "linux")] #[global_allocator] @@ -40,15 +37,11 @@ pub fn main() -> Result<(), MainError> { app.middleware(Cors::new()); app.middleware(RequestLogger::new()); - // app.middleware(tide_compression::Compression::new()); - // app.middleware(tide_compression::Decompression::new()); routes::load_routes(&mut app); info!("Server HTTP enabled"); - task::block_on(async { - app.listen(opt.http_addr).await.unwrap(); - }); + task::block_on(app.listen(opt.http_addr))?; Ok(()) } diff --git a/meilisearch-http/src/routes/document.rs b/meilisearch-http/src/routes/document.rs index 4c90c329b..d3577d6db 100644 --- a/meilisearch-http/src/routes/document.rs +++ b/meilisearch-http/src/routes/document.rs @@ -30,7 +30,7 @@ pub async fn get_document(ctx: Request) -> SResult { return Err(ResponseError::document_not_found(identifier)); } - Ok(tide::Response::new(200).body_json(&response).unwrap()) + Ok(tide::Response::new(200).body_json(&response)?) } #[derive(Default, Serialize)] @@ -54,7 +54,7 @@ pub async fn delete_document(ctx: Request) -> SResult { update_writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } #[derive(Default, Deserialize)] @@ -106,7 +106,7 @@ pub async fn get_all_documents(ctx: Request) -> SResult { } } - Ok(tide::Response::new(200).body_json(&response_body).unwrap()) + Ok(tide::Response::new(200).body_json(&response_body)?) } fn find_identifier(document: &IndexMap) -> Option { @@ -146,10 +146,10 @@ async fn update_multiple_documents(mut ctx: Request, is_partial: bool) -> }, }; let settings = Settings { - attribute_identifier: Some(Some(id)), + identifier: Some(Some(id)), ..Settings::default() }; - index.settings_update(&mut update_writer, settings.into())?; + index.settings_update(&mut update_writer, settings.into_update()?)?; } let mut document_addition = if is_partial { @@ -166,7 +166,7 @@ async fn update_multiple_documents(mut ctx: Request, is_partial: bool) -> update_writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn add_or_replace_multiple_documents(ctx: Request) -> SResult { @@ -200,7 +200,7 @@ pub async fn delete_multiple_documents(mut ctx: Request) -> SResult) -> SResult { @@ -215,5 +215,5 @@ pub async fn clear_all_documents(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } diff --git a/meilisearch-http/src/routes/index.rs b/meilisearch-http/src/routes/index.rs index 78f3ea22e..baadc6346 100644 --- a/meilisearch-http/src/routes/index.rs +++ b/meilisearch-http/src/routes/index.rs @@ -55,7 +55,7 @@ pub async fn list_indexes(ctx: Request) -> SResult { } } - Ok(tide::Response::new(200).body_json(&response_body).unwrap()) + Ok(tide::Response::new(200).body_json(&response_body)?) } #[derive(Debug, Serialize)] @@ -87,7 +87,7 @@ pub async fn get_index(ctx: Request) -> SResult { updated_at, }; - Ok(tide::Response::new(200).body_json(&response_body).unwrap()) + Ok(tide::Response::new(200).body_json(&response_body)?) } #[derive(Debug, Deserialize)] @@ -95,7 +95,7 @@ pub async fn get_index(ctx: Request) -> SResult { struct IndexCreateRequest { name: Option, uid: Option, - attribute_identifier: Option, + identifier: Option, } #[derive(Debug, Serialize)] @@ -150,10 +150,10 @@ pub async fn create_index(mut ctx: Request) -> SResult { .updated_at(&writer)? .into_internal_error()?; - if let Some(id) = body.attribute_identifier { + if let Some(id) = body.identifier { created_index .main - .put_schema(&mut writer, &Schema::with_identifier(id))?; + .put_schema(&mut writer, &Schema::with_identifier(&id))?; } writer.commit()?; @@ -165,7 +165,7 @@ pub async fn create_index(mut ctx: Request) -> SResult { updated_at, }; - Ok(tide::Response::new(201).body_json(&response_body).unwrap()) + Ok(tide::Response::new(201).body_json(&response_body)?) } #[derive(Debug, Deserialize)] @@ -214,7 +214,7 @@ pub async fn update_index(mut ctx: Request) -> SResult { updated_at, }; - Ok(tide::Response::new(200).body_json(&response_body).unwrap()) + Ok(tide::Response::new(200).body_json(&response_body)?) } pub async fn get_update_status(ctx: Request) -> SResult { diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index 2d0db22f9..bdb0b9329 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -23,175 +23,131 @@ async fn into_response( } pub fn load_routes(app: &mut tide::Server) { - app.at("").nest(|router| { - // expose the web interface static files - router.at("/").get(|_| { - async move { - let response = include_str!("../../public/interface.html"); - response - } - }); - router.at("/bulma.min.css").get(|_| { - async { - let response = include_str!("../../public/bulma.min.css"); - response - } - }); - - router.at("/indexes").nest(|router| { - router - .at("/") - .get(|ctx| into_response(index::list_indexes(ctx))) - .post(|ctx| into_response(index::create_index(ctx))); - - router - .at("/search") - .post(|ctx| into_response(search::search_multi_index(ctx))); - - router.at("/:index").nest(|router| { - router - .at("/search") - .get(|ctx| into_response(search::search_with_url_query(ctx))); - - router.at("/updates").nest(|router| { - router - .at("/") - .get(|ctx| into_response(index::get_all_updates_status(ctx))); - - router - .at("/:update_id") - .get(|ctx| into_response(index::get_update_status(ctx))); - }); - - router - .at("/") - .get(|ctx| into_response(index::get_index(ctx))) - .put(|ctx| into_response(index::update_index(ctx))) - .delete(|ctx| into_response(index::delete_index(ctx))); - - router.at("/documents").nest(|router| { - router - .at("/") - .get(|ctx| into_response(document::get_all_documents(ctx))) - .post(|ctx| into_response(document::add_or_replace_multiple_documents(ctx))) - .put(|ctx| into_response(document::add_or_update_multiple_documents(ctx))) - .delete(|ctx| into_response(document::clear_all_documents(ctx))); - - router.at("/:identifier").nest(|router| { - router - .at("/") - .get(|ctx| into_response(document::get_document(ctx))) - .delete(|ctx| into_response(document::delete_document(ctx))); - }); - - router - .at("/delete-batch") - .post(|ctx| into_response(document::delete_multiple_documents(ctx))); - }); - - router.at("/settings").nest(|router| { - router - .get(|ctx| into_response(setting::get_all(ctx))) - .post(|ctx| into_response(setting::update_all(ctx))) - .delete(|ctx| into_response(setting::delete_all(ctx))); - - router.at("/ranking").nest(|router| { - router - .get(|ctx| into_response(setting::get_ranking(ctx))) - .post(|ctx| into_response(setting::update_ranking(ctx))) - .delete(|ctx| into_response(setting::delete_ranking(ctx))); - - router - .at("/rules") - .get(|ctx| into_response(setting::get_rules(ctx))) - .post(|ctx| into_response(setting::update_rules(ctx))) - .delete(|ctx| into_response(setting::delete_rules(ctx))); - - router - .at("/distinct") - .get(|ctx| into_response(setting::get_distinct(ctx))) - .post(|ctx| into_response(setting::update_distinct(ctx))) - .delete(|ctx| into_response(setting::delete_distinct(ctx))); - }); - - router.at("/attributes").nest(|router| { - router - .get(|ctx| into_response(setting::get_attributes(ctx))) - .post(|ctx| into_response(setting::update_attributes(ctx))) - .delete(|ctx| into_response(setting::delete_attributes(ctx))); - - router - .at("/identifier") - .get(|ctx| into_response(setting::get_identifier(ctx))); - - router - .at("/searchable") - .get(|ctx| into_response(setting::get_searchable(ctx))) - .post(|ctx| into_response(setting::update_searchable(ctx))) - .delete(|ctx| into_response(setting::delete_searchable(ctx))); - - router - .at("/displayed") - .get(|ctx| into_response(setting::get_displayed(ctx))) - .post(|ctx| into_response(setting::update_displayed(ctx))) - .delete(|ctx| into_response(setting::delete_displayed(ctx))); - }); - - router - .at("/index-new-fields") - .get(|ctx| into_response(setting::get_index_new_fields(ctx))) - .post(|ctx| into_response(setting::update_index_new_fields(ctx))); - - router - .at("/synonyms") - .get(|ctx| into_response(synonym::get(ctx))) - .post(|ctx| into_response(synonym::update(ctx))) - .delete(|ctx| into_response(synonym::delete(ctx))); - - router - .at("/stop-words") - .get(|ctx| into_response(stop_words::get(ctx))) - .post(|ctx| into_response(stop_words::update(ctx))) - .delete(|ctx| into_response(stop_words::delete(ctx))); - }); - - router - .at("/stats") - .get(|ctx| into_response(stats::index_stat(ctx))); - }); - }); - - router.at("/keys").nest(|router| { - router - .at("/") - .get(|ctx| into_response(key::list(ctx))) - .post(|ctx| into_response(key::create(ctx))); - - router - .at("/:key") - .get(|ctx| into_response(key::get(ctx))) - .put(|ctx| into_response(key::update(ctx))) - .delete(|ctx| into_response(key::delete(ctx))); - }); + app.at("/").get(|_| { + async move { + tide::Response::new(200) + .body_string(include_str!("../../public/interface.html").to_string()) + .set_mime(mime::TEXT_HTML_UTF_8) + } + }); + app.at("/bulma.min.css").get(|_| { + async { + tide::Response::new(200) + .body_string(include_str!("../../public/bulma.min.css").to_string()) + .set_mime(mime::TEXT_CSS_UTF_8) + } }); - app.at("").nest(|router| { - router - .at("/health") - .get(|ctx| into_response(health::get_health(ctx))) - .put(|ctx| into_response(health::change_healthyness(ctx))); + app.at("/indexes/") + .get(|ctx| into_response(index::list_indexes(ctx))) + .post(|ctx| into_response(index::create_index(ctx))); - router - .at("/stats") - .get(|ctx| into_response(stats::get_stats(ctx))); - router - .at("/version") - .get(|ctx| into_response(stats::get_version(ctx))); - router - .at("/sys-info") - .get(|ctx| into_response(stats::get_sys_info(ctx))); - router - .at("/sys-info/pretty") - .get(|ctx| into_response(stats::get_sys_info_pretty(ctx))); - }); + app.at("/indexes/search") + .post(|ctx| into_response(search::search_multi_index(ctx))); + + app.at("/indexes/:index") + .get(|ctx| into_response(index::get_index(ctx))) + .put(|ctx| into_response(index::update_index(ctx))) + .delete(|ctx| into_response(index::delete_index(ctx))); + + app.at("/indexes/:index/search") + .get(|ctx| into_response(search::search_with_url_query(ctx))); + + app.at("/indexes/:index/updates") + .get(|ctx| into_response(index::get_all_updates_status(ctx))); + + app.at("/indexes/:index/updates/:update_id") + .get(|ctx| into_response(index::get_update_status(ctx))); + + app.at("/indexes/:index/documents") + .get(|ctx| into_response(document::get_all_documents(ctx))) + .post(|ctx| into_response(document::add_or_replace_multiple_documents(ctx))) + .put(|ctx| into_response(document::add_or_update_multiple_documents(ctx))) + .delete(|ctx| into_response(document::clear_all_documents(ctx))); + + app.at("/indexes/:index/documents/:identifier") + .get(|ctx| into_response(document::get_document(ctx))) + .delete(|ctx| into_response(document::delete_document(ctx))); + + app.at("/indexes/:index/documents/:identifier/delete-batch") + .post(|ctx| into_response(document::delete_multiple_documents(ctx))); + + app.at("/indexes/:index/settings") + .get(|ctx| into_response(setting::get_all(ctx))) + .post(|ctx| into_response(setting::update_all(ctx))) + .delete(|ctx| into_response(setting::delete_all(ctx))); + app.at("/indexes/:index/settings/ranking") + .get(|ctx| into_response(setting::get_ranking(ctx))) + .post(|ctx| into_response(setting::update_ranking(ctx))) + .delete(|ctx| into_response(setting::delete_ranking(ctx))); + + app.at("/indexes/:index/settings/ranking/rules") + .get(|ctx| into_response(setting::get_rules(ctx))) + .post(|ctx| into_response(setting::update_rules(ctx))) + .delete(|ctx| into_response(setting::delete_rules(ctx))); + + app.at("/indexes/:index/settings/ranking/distinct") + .get(|ctx| into_response(setting::get_distinct(ctx))) + .post(|ctx| into_response(setting::update_distinct(ctx))) + .delete(|ctx| into_response(setting::delete_distinct(ctx))); + + app.at("/indexes/:index/settings/attributes") + .get(|ctx| into_response(setting::get_attributes(ctx))) + .post(|ctx| into_response(setting::update_attributes(ctx))) + .delete(|ctx| into_response(setting::delete_attributes(ctx))); + + app.at("/indexes/:index/settings/attributes/identifier") + .get(|ctx| into_response(setting::get_identifier(ctx))); + + app.at("/indexes/:index/settings/attributes/searchable") + .get(|ctx| into_response(setting::get_searchable(ctx))) + .post(|ctx| into_response(setting::update_searchable(ctx))) + .delete(|ctx| into_response(setting::delete_searchable(ctx))); + + app.at("/indexes/:index/settings/attributes/displayed") + .get(|ctx| into_response(setting::displayed(ctx))) + .post(|ctx| into_response(setting::update_displayed(ctx))) + .delete(|ctx| into_response(setting::delete_displayed(ctx))); + + app.at("/indexes/:index/settings/index-new-field") + .get(|ctx| into_response(setting::get_index_new_fields(ctx))) + .post(|ctx| into_response(setting::update_index_new_fields(ctx))); + + app.at("/indexes/:index/settings/synonyms") + .get(|ctx| into_response(synonym::get(ctx))) + .post(|ctx| into_response(synonym::update(ctx))) + .delete(|ctx| into_response(synonym::delete(ctx))); + + app.at("/indexes/:index/settings/stop_words") + .get(|ctx| into_response(stop_words::get(ctx))) + .post(|ctx| into_response(stop_words::update(ctx))) + .delete(|ctx| into_response(stop_words::delete(ctx))); + + app.at("/indexes/:index/stats") + .get(|ctx| into_response(stats::index_stat(ctx))); + + app.at("/keys/") + .get(|ctx| into_response(key::list(ctx))) + .post(|ctx| into_response(key::create(ctx))); + + app.at("/keys/:key") + .get(|ctx| into_response(key::get(ctx))) + .put(|ctx| into_response(key::update(ctx))) + .delete(|ctx| into_response(key::delete(ctx))); + + app.at("/health") + .get(|ctx| into_response(health::get_health(ctx))) + .put(|ctx| into_response(health::change_healthyness(ctx))); + + app.at("/stats") + .get(|ctx| into_response(stats::get_stats(ctx))); + + app.at("/version") + .get(|ctx| into_response(stats::get_version(ctx))); + + app.at("/sys-info") + .get(|ctx| into_response(stats::get_sys_info(ctx))); + + app.at("/sys-info/pretty") + .get(|ctx| into_response(stats::get_sys_info_pretty(ctx))); } diff --git a/meilisearch-http/src/routes/search.rs b/meilisearch-http/src/routes/search.rs index 34280ba7d..2ed423334 100644 --- a/meilisearch-http/src/routes/search.rs +++ b/meilisearch-http/src/routes/search.rs @@ -62,7 +62,7 @@ pub async fn search_with_url_query(ctx: Request) -> SResult { let crop_length = query.crop_length.unwrap_or(200); if attributes_to_crop == "*" { let attributes_to_crop = schema - .get_displayed_name() + .displayed_name() .iter() .map(|attr| (attr.to_string(), crop_length)) .collect(); @@ -78,11 +78,15 @@ pub async fn search_with_url_query(ctx: Request) -> SResult { if let Some(attributes_to_highlight) = query.attributes_to_highlight { let attributes_to_highlight = if attributes_to_highlight == "*" { - schema.get_displayed_name() + schema + .displayed_name() + .iter() + .map(|s| s.to_string()) + .collect() } else { attributes_to_highlight .split(',') - .map(ToString::to_string) + .map(|s| s.to_string()) .collect() }; diff --git a/meilisearch-http/src/routes/setting.rs b/meilisearch-http/src/routes/setting.rs index 3c2472b96..0b7ed176a 100644 --- a/meilisearch-http/src/routes/setting.rs +++ b/meilisearch-http/src/routes/setting.rs @@ -18,7 +18,7 @@ pub async fn get_all(ctx: Request) -> SResult { let stop_words_fst = index.main.stop_words_fst(&reader)?; let stop_words = stop_words_fst.unwrap_or_default().stream().into_strs()?; let stop_words: BTreeSet = stop_words.into_iter().collect(); - let stop_words = if stop_words.len() > 0 { + let stop_words = if stop_words.is_empty() { Some(stop_words) } else { None @@ -40,7 +40,7 @@ pub async fn get_all(ctx: Request) -> SResult { } } - let synonyms = if synonyms.len() > 0 { + let synonyms = if synonyms.is_empty() { Some(synonyms) } else { None @@ -54,17 +54,21 @@ pub async fn get_all(ctx: Request) -> SResult { let schema = index.main.schema(&reader)?; - let attribute_identifier = schema.clone().map(|s| s.identifier()); - let attributes_searchable = schema.clone().map(|s| s.get_indexed_name()); - let attributes_displayed = schema.clone().map(|s| s.get_displayed_name()); - let index_new_fields = schema.map(|s| s.must_index_new_fields()); + let identifier = schema.clone().map(|s| s.identifier().to_owned()); + let searchable_attributes = schema + .clone() + .map(|s| s.indexed_name().iter().map(|s| s.to_string()).collect()); + let displayed_attributes = schema + .clone() + .map(|s| s.displayed_name().iter().map(|s| s.to_string()).collect()); + let index_new_fields = schema.map(|s| s.index_new_fields()); let settings = Settings { ranking_rules: Some(ranking_rules), ranking_distinct: Some(ranking_distinct), - attribute_identifier: Some(attribute_identifier), - attributes_searchable: Some(attributes_searchable), - attributes_displayed: Some(attributes_displayed), + identifier: Some(identifier), + searchable_attributes: Some(searchable_attributes), + displayed_attributes: Some(displayed_attributes), stop_words: Some(stop_words), synonyms: Some(synonyms), index_new_fields: Some(index_new_fields), @@ -78,9 +82,9 @@ pub async fn get_all(ctx: Request) -> SResult { pub struct UpdateSettings { pub ranking_rules: Option>, pub ranking_distinct: Option, - pub attribute_identifier: Option, - pub attributes_searchable: Option>, - pub attributes_displayed: Option>, + pub identifier: Option, + pub searchable_attributes: Option>, + pub displayed_attributes: Option>, pub stop_words: Option>, pub synonyms: Option>>, pub index_new_fields: Option, @@ -96,20 +100,20 @@ pub async fn update_all(mut ctx: Request) -> SResult { let settings = Settings { ranking_rules: Some(settings_update.ranking_rules), ranking_distinct: Some(settings_update.ranking_distinct), - attribute_identifier: Some(settings_update.attribute_identifier), - attributes_searchable: Some(settings_update.attributes_searchable), - attributes_displayed: Some(settings_update.attributes_displayed), + identifier: Some(settings_update.identifier), + searchable_attributes: Some(settings_update.searchable_attributes), + displayed_attributes: Some(settings_update.displayed_attributes), stop_words: Some(settings_update.stop_words), synonyms: Some(settings_update.synonyms), index_new_fields: Some(settings_update.index_new_fields), }; let mut writer = db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings.into())?; + let update_id = index.settings_update(&mut writer, settings.into_update()?)?; writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete_all(ctx: Request) -> SResult { @@ -121,9 +125,9 @@ pub async fn delete_all(ctx: Request) -> SResult { let settings = SettingsUpdate { ranking_rules: UpdateState::Clear, ranking_distinct: UpdateState::Clear, - attribute_identifier: UpdateState::Clear, - attributes_searchable: UpdateState::Clear, - attributes_displayed: UpdateState::Clear, + identifier: UpdateState::Clear, + searchable_attributes: UpdateState::Clear, + displayed_attributes: UpdateState::Clear, stop_words: UpdateState::Clear, synonyms: UpdateState::Clear, index_new_fields: UpdateState::Clear, @@ -134,12 +138,12 @@ pub async fn delete_all(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } #[derive(Default, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct GetRankingSettings { +pub struct RankingSettings { pub ranking_rules: Option>, pub ranking_distinct: Option, } @@ -156,7 +160,7 @@ pub async fn get_ranking(ctx: Request) -> SResult { }; let ranking_distinct = index.main.ranking_distinct(&reader)?; - let settings = GetRankingSettings { + let settings = RankingSettings { ranking_rules, ranking_distinct, }; @@ -164,17 +168,10 @@ pub async fn get_ranking(ctx: Request) -> SResult { Ok(tide::Response::new(200).body_json(&settings).unwrap()) } -#[derive(Default, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct SetRankingSettings { - pub ranking_rules: Option>, - pub ranking_distinct: Option, -} - pub async fn update_ranking(mut ctx: Request) -> SResult { ctx.is_allowed(SettingsWrite)?; let index = ctx.index()?; - let settings: SetRankingSettings = ctx.body_json().await.map_err(ResponseError::bad_request)?; + let settings: RankingSettings = ctx.body_json().await.map_err(ResponseError::bad_request)?; let db = &ctx.state().db; let settings = Settings { @@ -184,11 +181,11 @@ pub async fn update_ranking(mut ctx: Request) -> SResult { }; let mut writer = db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings.into())?; + let update_id = index.settings_update(&mut writer, settings.into_update()?)?; writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete_ranking(ctx: Request) -> SResult { @@ -208,7 +205,7 @@ pub async fn delete_ranking(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn get_rules(ctx: Request) -> SResult { @@ -238,11 +235,11 @@ pub async fn update_rules(mut ctx: Request) -> SResult { }; let mut writer = db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings.into())?; + let update_id = index.settings_update(&mut writer, settings.into_update()?)?; writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete_rules(ctx: Request) -> SResult { @@ -261,13 +258,7 @@ pub async fn delete_rules(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) -} - -#[derive(Default, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct GetRankingDistinctSettings { - pub ranking_distinct: Option, + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn get_distinct(ctx: Request) -> SResult { @@ -283,12 +274,6 @@ pub async fn get_distinct(ctx: Request) -> SResult { .unwrap()) } -#[derive(Default, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct SetRankingDistinctSettings { - pub ranking_distinct: Option, -} - pub async fn update_distinct(mut ctx: Request) -> SResult { ctx.is_allowed(SettingsWrite)?; let index = ctx.index()?; @@ -302,11 +287,11 @@ pub async fn update_distinct(mut ctx: Request) -> SResult { }; let mut writer = db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings.into())?; + let update_id = index.settings_update(&mut writer, settings.into_update()?)?; writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete_distinct(ctx: Request) -> SResult { @@ -325,15 +310,15 @@ pub async fn delete_distinct(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } #[derive(Default, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct GetAttributesSettings { - pub attribute_identifier: Option, - pub attributes_searchable: Option>, - pub attributes_displayed: Option>, +pub struct AttributesSettings { + pub identifier: Option, + pub searchable_attributes: Option>, + pub displayed_attributes: Option>, } pub async fn get_attributes(ctx: Request) -> SResult { @@ -344,47 +329,42 @@ pub async fn get_attributes(ctx: Request) -> SResult { let schema = index.main.schema(&reader)?; - let attribute_identifier = schema.clone().map(|s| s.identifier()); - let attributes_searchable = schema.clone().map(|s| s.get_indexed_name()); - let attributes_displayed = schema.clone().map(|s| s.get_displayed_name()); + let identifier = schema.clone().map(|s| s.identifier().to_string()); + let searchable_attributes = schema + .clone() + .map(|s| s.indexed_name().iter().map(|s| s.to_string()).collect()); + let displayed_attributes = schema + .clone() + .map(|s| s.displayed_name().iter().map(|s| s.to_string()).collect()); - let settings = GetAttributesSettings { - attribute_identifier, - attributes_searchable, - attributes_displayed, + let settings = AttributesSettings { + identifier, + searchable_attributes, + displayed_attributes, }; Ok(tide::Response::new(200).body_json(&settings).unwrap()) } -#[derive(Default, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct SetAttributesSettings { - pub attribute_identifier: Option, - pub attributes_searchable: Option>, - pub attributes_displayed: Option>, -} - pub async fn update_attributes(mut ctx: Request) -> SResult { ctx.is_allowed(SettingsWrite)?; let index = ctx.index()?; - let settings: SetAttributesSettings = - ctx.body_json().await.map_err(ResponseError::bad_request)?; + let settings: AttributesSettings = ctx.body_json().await.map_err(ResponseError::bad_request)?; let db = &ctx.state().db; let settings = Settings { - attribute_identifier: Some(settings.attribute_identifier), - attributes_searchable: Some(settings.attributes_searchable), - attributes_displayed: Some(settings.attributes_displayed), + identifier: Some(settings.identifier), + searchable_attributes: Some(settings.searchable_attributes), + displayed_attributes: Some(settings.displayed_attributes), ..Settings::default() }; let mut writer = db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings.into())?; + let update_id = index.settings_update(&mut writer, settings.into_update()?)?; writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete_attributes(ctx: Request) -> SResult { @@ -393,8 +373,8 @@ pub async fn delete_attributes(ctx: Request) -> SResult { let db = &ctx.state().db; let settings = SettingsUpdate { - attributes_searchable: UpdateState::Clear, - attributes_displayed: UpdateState::Clear, + searchable_attributes: UpdateState::Clear, + displayed_attributes: UpdateState::Clear, ..SettingsUpdate::default() }; @@ -403,7 +383,7 @@ pub async fn delete_attributes(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn get_identifier(ctx: Request) -> SResult { @@ -414,11 +394,9 @@ pub async fn get_identifier(ctx: Request) -> SResult { let schema = index.main.schema(&reader)?; - let attribute_identifier = schema.map(|s| s.identifier()); + let identifier = schema.map(|s| s.identifier().to_string()); - Ok(tide::Response::new(200) - .body_json(&attribute_identifier) - .unwrap()) + Ok(tide::Response::new(200).body_json(&identifier).unwrap()) } pub async fn get_searchable(ctx: Request) -> SResult { @@ -429,37 +407,32 @@ pub async fn get_searchable(ctx: Request) -> SResult { let schema = index.main.schema(&reader)?; - let attributes_searchable = schema.map(|s| s.get_indexed_name()); + let searchable_attributes: Option> = + schema.map(|s| s.indexed_name().iter().map(|i| i.to_string()).collect()); Ok(tide::Response::new(200) - .body_json(&attributes_searchable) + .body_json(&searchable_attributes) .unwrap()) } -#[derive(Default, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct SetAttributesSearchableSettings { - pub attributes_searchable: Option>, -} - pub async fn update_searchable(mut ctx: Request) -> SResult { ctx.is_allowed(SettingsWrite)?; let index = ctx.index()?; - let attributes_searchable: Option> = + let searchable_attributes: Option> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let db = &ctx.state().db; let settings = Settings { - attributes_searchable: Some(attributes_searchable), + searchable_attributes: Some(searchable_attributes), ..Settings::default() }; let mut writer = db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings.into())?; + let update_id = index.settings_update(&mut writer, settings.into_update()?)?; writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete_searchable(ctx: Request) -> SResult { @@ -468,7 +441,7 @@ pub async fn delete_searchable(ctx: Request) -> SResult { let db = &ctx.state().db; let settings = SettingsUpdate { - attributes_searchable: UpdateState::Clear, + searchable_attributes: UpdateState::Clear, ..SettingsUpdate::default() }; @@ -477,10 +450,10 @@ pub async fn delete_searchable(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } -pub async fn get_displayed(ctx: Request) -> SResult { +pub async fn displayed(ctx: Request) -> SResult { ctx.is_allowed(SettingsRead)?; let index = ctx.index()?; let db = &ctx.state().db; @@ -488,31 +461,32 @@ pub async fn get_displayed(ctx: Request) -> SResult { let schema = index.main.schema(&reader)?; - let attributes_displayed = schema.map(|s| s.get_displayed_name()); + let displayed_attributes: Option> = + schema.map(|s| s.displayed_name().iter().map(|i| i.to_string()).collect()); Ok(tide::Response::new(200) - .body_json(&attributes_displayed) + .body_json(&displayed_attributes) .unwrap()) } pub async fn update_displayed(mut ctx: Request) -> SResult { ctx.is_allowed(SettingsWrite)?; let index = ctx.index()?; - let attributes_displayed: Option> = + let displayed_attributes: Option> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let db = &ctx.state().db; let settings = Settings { - attributes_displayed: Some(attributes_displayed), + displayed_attributes: Some(displayed_attributes), ..Settings::default() }; let mut writer = db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings.into())?; + let update_id = index.settings_update(&mut writer, settings.into_update()?)?; writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete_displayed(ctx: Request) -> SResult { @@ -521,7 +495,7 @@ pub async fn delete_displayed(ctx: Request) -> SResult { let db = &ctx.state().db; let settings = SettingsUpdate { - attributes_displayed: UpdateState::Clear, + displayed_attributes: UpdateState::Clear, ..SettingsUpdate::default() }; @@ -530,7 +504,7 @@ pub async fn delete_displayed(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn get_index_new_fields(ctx: Request) -> SResult { @@ -541,7 +515,7 @@ pub async fn get_index_new_fields(ctx: Request) -> SResult { let schema = index.main.schema(&reader)?; - let index_new_fields = schema.map(|s| s.must_index_new_fields()); + let index_new_fields = schema.map(|s| s.index_new_fields()); Ok(tide::Response::new(200) .body_json(&index_new_fields) @@ -561,9 +535,9 @@ pub async fn update_index_new_fields(mut ctx: Request) -> SResult) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete(ctx: Request) -> SResult { @@ -59,5 +59,5 @@ pub async fn delete(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } diff --git a/meilisearch-http/src/routes/synonym.rs b/meilisearch-http/src/routes/synonym.rs index 3aff02525..e407d1a61 100644 --- a/meilisearch-http/src/routes/synonym.rs +++ b/meilisearch-http/src/routes/synonym.rs @@ -57,7 +57,7 @@ pub async fn update(mut ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } pub async fn delete(ctx: Request) -> SResult { @@ -78,5 +78,5 @@ pub async fn delete(ctx: Request) -> SResult { writer.commit()?; let response_body = IndexUpdateResponse { update_id }; - Ok(tide::Response::new(202).body_json(&response_body).unwrap()) + Ok(tide::Response::new(202).body_json(&response_body)?) } diff --git a/meilisearch-http/tests/common.rs b/meilisearch-http/tests/common.rs index 80c24067f..93e806507 100644 --- a/meilisearch-http/tests/common.rs +++ b/meilisearch-http/tests/common.rs @@ -1,4 +1,3 @@ -#![allow(dead_code)] use serde_json::Value; use std::error::Error; use std::time::Duration; @@ -64,8 +63,8 @@ pub fn enrich_server_with_movies_settings( "dsc(vote_average)", ], "rankingDistinct": null, - "attributeIdentifier": "id", - "attributesSearchable": [ + "identifier": "id", + "searchableAttributes": [ "title", "tagline", "overview", @@ -75,7 +74,7 @@ pub fn enrich_server_with_movies_settings( "production_companies", "genres", ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "director", "producer", diff --git a/meilisearch-http/tests/index.rs b/meilisearch-http/tests/index.rs index c328ac5a5..b87c56a2d 100644 --- a/meilisearch-http/tests/index.rs +++ b/meilisearch-http/tests/index.rs @@ -3,7 +3,6 @@ use async_std::task::block_on; use http_service::Body; use serde_json::json; use serde_json::Value; -use std::convert::Into; mod common; diff --git a/meilisearch-http/tests/search.rs b/meilisearch-http/tests/search.rs index e4ef41efa..cfbf6dad9 100644 --- a/meilisearch-http/tests/search.rs +++ b/meilisearch-http/tests/search.rs @@ -636,8 +636,8 @@ fn search_with_settings_basic() { "dsc(vote_average)" ], "rankingDistinct": null, - "attributeIdentifier": "id", - "attributesSearchable": [ + "identifier": "id", + "searchableAttributes": [ "title", "tagline", "overview", @@ -647,7 +647,7 @@ fn search_with_settings_basic() { "production_companies", "genres" ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "director", "producer", @@ -742,8 +742,8 @@ fn search_with_settings_stop_words() { "dsc(vote_average)" ], "rankingDistinct": null, - "attributeIdentifier": "id", - "attributesSearchable": [ + "identifier": "id", + "searchableAttributes": [ "title", "tagline", "overview", @@ -753,7 +753,7 @@ fn search_with_settings_stop_words() { "production_companies", "genres" ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "director", "producer", @@ -849,8 +849,8 @@ fn search_with_settings_synonyms() { "dsc(vote_average)" ], "rankingDistinct": null, - "attributeIdentifier": "id", - "attributesSearchable": [ + "identifier": "id", + "searchableAttributes": [ "title", "tagline", "overview", @@ -860,7 +860,7 @@ fn search_with_settings_synonyms() { "production_companies", "genres" ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "director", "producer", @@ -961,8 +961,8 @@ fn search_with_settings_ranking_rules() { "dsc(popularity)" ], "rankingDistinct": null, - "attributeIdentifier": "id", - "attributesSearchable": [ + "identifier": "id", + "searchableAttributes": [ "title", "tagline", "overview", @@ -972,7 +972,7 @@ fn search_with_settings_ranking_rules() { "production_companies", "genres" ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "director", "producer", @@ -1052,7 +1052,7 @@ fn search_with_settings_ranking_rules() { } #[test] -fn search_with_settings_attributes_searchable() { +fn search_with_settings_searchable_attributes() { let mut server = common::setup_server().unwrap(); common::enrich_server_with_movies_index(&mut server).unwrap(); @@ -1068,8 +1068,8 @@ fn search_with_settings_attributes_searchable() { "dsc(vote_average)" ], "rankingDistinct": null, - "attributeIdentifier": "id", - "attributesSearchable": [ + "identifier": "id", + "searchableAttributes": [ "tagline", "overview", "cast", @@ -1078,7 +1078,7 @@ fn search_with_settings_attributes_searchable() { "production_companies", "genres" ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "director", "producer", @@ -1158,7 +1158,7 @@ fn search_with_settings_attributes_searchable() { } #[test] -fn search_with_settings_attributes_displayed() { +fn search_with_settings_displayed_attributes() { let mut server = common::setup_server().unwrap(); common::enrich_server_with_movies_index(&mut server).unwrap(); @@ -1174,8 +1174,8 @@ fn search_with_settings_attributes_displayed() { "dsc(vote_average)" ], "rankingDistinct": null, - "attributeIdentifier": "id", - "attributesSearchable": [ + "identifier": "id", + "searchableAttributes": [ "title", "tagline", "overview", @@ -1185,7 +1185,7 @@ fn search_with_settings_attributes_displayed() { "production_companies", "genres" ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "tagline", "id", @@ -1229,7 +1229,7 @@ fn search_with_settings_attributes_displayed() { } #[test] -fn search_with_settings_attributes_searchable_2() { +fn search_with_settings_searchable_attributes_2() { let mut server = common::setup_server().unwrap(); common::enrich_server_with_movies_index(&mut server).unwrap(); @@ -1245,8 +1245,8 @@ fn search_with_settings_attributes_searchable_2() { "dsc(vote_average)" ], "rankingDistinct": null, - "attributeIdentifier": "id", - "attributesSearchable": [ + "identifier": "id", + "searchableAttributes": [ "tagline", "overview", "title", @@ -1256,7 +1256,7 @@ fn search_with_settings_attributes_searchable_2() { "production_companies", "genres" ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "tagline", "id", diff --git a/meilisearch-http/tests/settings.rs b/meilisearch-http/tests/settings.rs index 1a2c5767b..6094e7d67 100644 --- a/meilisearch-http/tests/settings.rs +++ b/meilisearch-http/tests/settings.rs @@ -50,8 +50,8 @@ fn write_all_and_delete() { "dsc(rank)", ], "rankingDistinct": "movie_id", - "attributeIdentifier": "uid", - "attributesSearchable": [ + "identifier": "uid", + "searchableAttributes": [ "uid", "movie_id", "title", @@ -60,7 +60,7 @@ fn write_all_and_delete() { "release_date", "rank", ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "description", "poster", @@ -128,9 +128,9 @@ fn write_all_and_delete() { let json = json!({ "rankingRules": null, "rankingDistinct": null, - "attributeIdentifier": null, - "attributesSearchable": null, - "attributesDisplayed": null, + "identifier": null, + "searchableAttributes": null, + "displayedAttributes": null, "stopWords": null, "synonyms": null, "indexNewFields": null, @@ -179,8 +179,8 @@ fn write_all_and_update() { "dsc(rank)", ], "rankingDistinct": "movie_id", - "attributeIdentifier": "uid", - "attributesSearchable": [ + "identifier": "uid", + "searchableAttributes": [ "uid", "movie_id", "title", @@ -189,7 +189,7 @@ fn write_all_and_update() { "release_date", "rank", ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "description", "poster", @@ -244,13 +244,13 @@ fn write_all_and_update() { "_exact", "dsc(release_date)", ], - "attributeIdentifier": "uid", - "attributesSearchable": [ + "identifier": "uid", + "searchableAttributes": [ "title", "description", "uid", ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "description", "release_date", @@ -299,13 +299,13 @@ fn write_all_and_update() { "dsc(release_date)", ], "rankingDistinct": null, - "attributeIdentifier": "uid", - "attributesSearchable": [ + "identifier": "uid", + "searchableAttributes": [ "title", "description", "uid", ], - "attributesDisplayed": [ + "displayedAttributes": [ "title", "description", "release_date", diff --git a/meilisearch-http/tests/settings_ranking.rs b/meilisearch-http/tests/settings_ranking.rs index 2ae0401b9..b8e837978 100644 --- a/meilisearch-http/tests/settings_ranking.rs +++ b/meilisearch-http/tests/settings_ranking.rs @@ -1,4 +1,3 @@ -use std::convert::Into; use std::time::Duration; use assert_json_diff::assert_json_eq; @@ -26,7 +25,7 @@ fn write_all_and_delete() { let body = json!({ "uid": "movies", - "attributeIdentifier": "uid", + "identifier": "uid", }) .to_string() .into_bytes(); @@ -123,7 +122,7 @@ fn write_all_and_update() { let body = json!({ "uid": "movies", - "attributeIdentifier": "uid", + "identifier": "uid", }) .to_string() .into_bytes(); diff --git a/meilisearch-schema/src/error.rs b/meilisearch-schema/src/error.rs index 57f73050a..29c4dd035 100644 --- a/meilisearch-schema/src/error.rs +++ b/meilisearch-schema/src/error.rs @@ -13,8 +13,8 @@ impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::Error::*; match self { - FieldNameNotFound(field) => write!(f, "The field {} doesn't exist", field), - MaxFieldsLimitExceeded => write!(f, "The maximum of possible reatributed field id has been reached"), + FieldNameNotFound(field) => write!(f, "The field {:?} doesn't exist", field), + MaxFieldsLimitExceeded => write!(f, "The maximum of possible reattributed field id has been reached"), } } } diff --git a/meilisearch-schema/src/fields_map.rs b/meilisearch-schema/src/fields_map.rs index d81a6d245..a8b73c573 100644 --- a/meilisearch-schema/src/fields_map.rs +++ b/meilisearch-schema/src/fields_map.rs @@ -1,11 +1,9 @@ -use std::io::{Read, Write}; use std::collections::HashMap; use serde::{Deserialize, Serialize}; use crate::{SResult, FieldId}; - #[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct FieldsMap { name_map: HashMap, @@ -22,41 +20,30 @@ impl FieldsMap { self.name_map.is_empty() } - pub fn insert>(&mut self, name: S) -> SResult { - let name = name.into(); - if let Some(id) = self.name_map.get(&name) { + pub fn insert(&mut self, name: &str) -> SResult { + if let Some(id) = self.name_map.get(name) { return Ok(*id) } let id = self.next_id.into(); self.next_id = self.next_id.next()?; - self.name_map.insert(name.clone(), id); - self.id_map.insert(id, name); + self.name_map.insert(name.to_string(), id); + self.id_map.insert(id, name.to_string()); Ok(id) } - pub fn remove>(&mut self, name: S) { - let name = name.into(); - if let Some(id) = self.name_map.get(&name) { + pub fn remove(&mut self, name: &str) { + if let Some(id) = self.name_map.get(name) { self.id_map.remove(&id); } - self.name_map.remove(&name); + self.name_map.remove(name); } - pub fn get_id>(&self, name: S) -> Option { - let name = name.into(); - self.name_map.get(&name).map(|s| *s) + pub fn id(&self, name: &str) -> Option { + self.name_map.get(name).map(|s| *s) } - pub fn get_name>(&self, id: I) -> Option { - self.id_map.get(&id.into()).map(|s| s.to_string()) - } - - pub fn read_from_bin(reader: R) -> bincode::Result { - bincode::deserialize_from(reader) - } - - pub fn write_to_bin(&self, writer: W) -> bincode::Result<()> { - bincode::serialize_into(writer, &self) + pub fn name>(&self, id: I) -> Option<&str> { + self.id_map.get(&id.into()).map(|s| s.as_str()) } } @@ -73,17 +60,17 @@ mod tests { assert_eq!(fields_map.insert("id").unwrap(), 0.into()); assert_eq!(fields_map.insert("title").unwrap(), 1.into()); assert_eq!(fields_map.insert("descritpion").unwrap(), 2.into()); - assert_eq!(fields_map.get_id("id"), Some(0.into())); - assert_eq!(fields_map.get_id("title"), Some(1.into())); - assert_eq!(fields_map.get_id("descritpion"), Some(2.into())); - assert_eq!(fields_map.get_id("date"), None); + assert_eq!(fields_map.id("id"), Some(0.into())); + assert_eq!(fields_map.id("title"), Some(1.into())); + assert_eq!(fields_map.id("descritpion"), Some(2.into())); + assert_eq!(fields_map.id("date"), None); assert_eq!(fields_map.len(), 3); - assert_eq!(fields_map.get_name(0), Some("id".to_owned())); - assert_eq!(fields_map.get_name(1), Some("title".to_owned())); - assert_eq!(fields_map.get_name(2), Some("descritpion".to_owned())); - assert_eq!(fields_map.get_name(4), None); + assert_eq!(fields_map.name(0), Some("id")); + assert_eq!(fields_map.name(1), Some("title")); + assert_eq!(fields_map.name(2), Some("descritpion")); + assert_eq!(fields_map.name(4), None); fields_map.remove("title"); - assert_eq!(fields_map.get_id("title"), None); + assert_eq!(fields_map.id("title"), None); assert_eq!(fields_map.insert("title").unwrap(), 3.into()); assert_eq!(fields_map.len(), 3); } diff --git a/meilisearch-schema/src/lib.rs b/meilisearch-schema/src/lib.rs index 2b77b4336..c56ac151e 100644 --- a/meilisearch-schema/src/lib.rs +++ b/meilisearch-schema/src/lib.rs @@ -22,14 +22,6 @@ impl IndexedPos { pub const fn max() -> IndexedPos { IndexedPos(u16::max_value()) } - - pub fn next(self) -> SResult { - self.0.checked_add(1).map(IndexedPos).ok_or(Error::MaxFieldsLimitExceeded) - } - - pub fn prev(self) -> SResult { - self.0.checked_sub(1).map(IndexedPos).ok_or(Error::MaxFieldsLimitExceeded) - } } impl From for IndexedPos { @@ -44,7 +36,6 @@ impl Into for IndexedPos { } } - #[derive(Serialize, Deserialize, Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)] pub struct FieldId(pub u16); @@ -64,10 +55,6 @@ impl FieldId { pub fn next(self) -> SResult { self.0.checked_add(1).map(FieldId).ok_or(Error::MaxFieldsLimitExceeded) } - - pub fn prev(self) -> SResult { - self.0.checked_sub(1).map(FieldId).ok_or(Error::MaxFieldsLimitExceeded) - } } impl From for FieldId { diff --git a/meilisearch-schema/src/schema.rs b/meilisearch-schema/src/schema.rs index 8fe3e2b23..22bb1e1f7 100644 --- a/meilisearch-schema/src/schema.rs +++ b/meilisearch-schema/src/schema.rs @@ -15,12 +15,11 @@ pub struct Schema { indexed: Vec, indexed_map: HashMap, - must_index_new_fields: bool, + index_new_fields: bool, } impl Schema { - - pub fn with_identifier>(name: S) -> Schema { + pub fn with_identifier(name: &str) -> Schema { let mut fields_map = FieldsMap::default(); let field_id = fields_map.insert(name.into()).unwrap(); @@ -31,47 +30,47 @@ impl Schema { displayed: HashSet::new(), indexed: Vec::new(), indexed_map: HashMap::new(), - must_index_new_fields: true, + index_new_fields: true, } } - pub fn identifier(&self) -> String { - self.fields_map.get_name(self.identifier).unwrap().to_string() + pub fn identifier(&self) -> &str { + self.fields_map.name(self.identifier).unwrap() } - pub fn set_identifier(&mut self, id: String) -> SResult<()> { - match self.get_id(id.clone()) { + pub fn set_identifier(&mut self, id: &str) -> SResult<()> { + match self.id(id) { Some(id) => { self.identifier = id; Ok(()) }, - None => Err(Error::FieldNameNotFound(id)) + None => Err(Error::FieldNameNotFound(id.to_string())) } } - pub fn get_id>(&self, name: S) -> Option { - self.fields_map.get_id(name) + pub fn id(&self, name: &str) -> Option { + self.fields_map.id(name) } - pub fn get_name>(&self, id: I) -> Option { - self.fields_map.get_name(id) + pub fn name>(&self, id: I) -> Option<&str> { + self.fields_map.name(id) } - pub fn contains>(&self, name: S) -> bool { - self.fields_map.get_id(name.into()).is_some() + pub fn contains(&self, name: &str) -> bool { + self.fields_map.id(name.into()).is_some() } - pub fn get_or_create_empty>(&mut self, name: S) -> SResult { + pub fn get_or_create_empty(&mut self, name: &str) -> SResult { self.fields_map.insert(name) } - pub fn get_or_create + std::clone::Clone>(&mut self, name: S) -> SResult { - match self.fields_map.get_id(name.clone()) { + pub fn get_or_create(&mut self, name: &str) -> SResult { + match self.fields_map.id(name.clone()) { Some(id) => { Ok(id) } None => { - if self.must_index_new_fields { + if self.index_new_fields { self.set_indexed(name.clone())?; self.set_displayed(name) } else { @@ -81,43 +80,43 @@ impl Schema { } } - pub fn get_ranked(&self) -> HashSet { + pub fn ranked(&self) -> HashSet { self.ranked.clone() } - pub fn get_ranked_name(&self) -> HashSet { - self.ranked.iter().filter_map(|a| self.get_name(*a)).collect() + pub fn ranked_name(&self) -> HashSet<&str> { + self.ranked.iter().filter_map(|a| self.name(*a)).collect() } - pub fn get_displayed(&self) -> HashSet { + pub fn displayed(&self) -> HashSet { self.displayed.clone() } - pub fn get_displayed_name(&self) -> HashSet { - self.displayed.iter().filter_map(|a| self.get_name(*a)).collect() + pub fn displayed_name(&self) -> HashSet<&str> { + self.displayed.iter().filter_map(|a| self.name(*a)).collect() } - pub fn get_indexed(&self) -> Vec { + pub fn indexed(&self) -> Vec { self.indexed.clone() } - pub fn get_indexed_name(&self) -> Vec { - self.indexed.iter().filter_map(|a| self.get_name(*a)).collect() + pub fn indexed_name(&self) -> Vec<&str> { + self.indexed.iter().filter_map(|a| self.name(*a)).collect() } - pub fn set_ranked>(&mut self, name: S) -> SResult { + pub fn set_ranked(&mut self, name: &str) -> SResult { let id = self.fields_map.insert(name.into())?; self.ranked.insert(id); Ok(id) } - pub fn set_displayed>(&mut self, name: S) -> SResult { + pub fn set_displayed(&mut self, name: &str) -> SResult { let id = self.fields_map.insert(name.into())?; self.displayed.insert(id); Ok(id) } - pub fn set_indexed>(&mut self, name: S) -> SResult<(FieldId, IndexedPos)> { + pub fn set_indexed(&mut self, name: &str) -> SResult<(FieldId, IndexedPos)> { let id = self.fields_map.insert(name.into())?; if let Some(indexed_pos) = self.indexed_map.get(&id) { return Ok((id, *indexed_pos)) @@ -128,55 +127,34 @@ impl Schema { Ok((id, pos.into())) } - pub fn remove_ranked>(&mut self, name: S) { - if let Some(id) = self.fields_map.get_id(name.into()) { + pub fn remove_ranked(&mut self, name: &str) { + if let Some(id) = self.fields_map.id(name.into()) { self.ranked.remove(&id); } } - pub fn remove_displayed>(&mut self, name: S) { - if let Some(id) = self.fields_map.get_id(name.into()) { + pub fn remove_displayed(&mut self, name: &str) { + if let Some(id) = self.fields_map.id(name.into()) { self.displayed.remove(&id); } } - pub fn remove_indexed>(&mut self, name: S) { - if let Some(id) = self.fields_map.get_id(name.into()) { + pub fn remove_indexed(&mut self, name: &str) { + if let Some(id) = self.fields_map.id(name.into()) { self.indexed_map.remove(&id); self.indexed.retain(|x| *x != id); } } - pub fn is_ranked>(&self, name: S) -> Option { - match self.fields_map.get_id(name.into()) { - Some(id) => self.ranked.get(&id).map(|s| *s), - None => None, - } - } - - pub fn is_displayed>(&self, name: S) -> Option { - match self.fields_map.get_id(name.into()) { - Some(id) => self.displayed.get(&id).map(|s| *s), - None => None, - } - } - - pub fn is_indexed>(&self, name: S) -> Option { - match self.fields_map.get_id(name.into()) { - Some(id) => self.indexed_map.get(&id).map(|s| *s), - None => None, - } - } - - pub fn id_is_ranked(&self, id: FieldId) -> bool { + pub fn is_ranked(&self, id: FieldId) -> bool { self.ranked.get(&id).is_some() } - pub fn id_is_displayed(&self, id: FieldId) -> bool { + pub fn is_displayed(&self, id: FieldId) -> bool { self.displayed.get(&id).is_some() } - pub fn id_is_indexed(&self, id: FieldId) -> Option<&IndexedPos> { + pub fn is_indexed(&self, id: FieldId) -> Option<&IndexedPos> { self.indexed_map.get(&id) } @@ -189,36 +167,36 @@ impl Schema { } } - pub fn update_ranked>(&mut self, data: impl IntoIterator) -> SResult<()> { + pub fn update_ranked>(&mut self, data: impl IntoIterator) -> SResult<()> { self.ranked = HashSet::new(); for name in data { - self.set_ranked(name)?; + self.set_ranked(name.as_ref())?; } Ok(()) } - pub fn update_displayed>(&mut self, data: impl IntoIterator) -> SResult<()> { + pub fn update_displayed>(&mut self, data: impl IntoIterator) -> SResult<()> { self.displayed = HashSet::new(); for name in data { - self.set_displayed(name)?; + self.set_displayed(name.as_ref())?; } Ok(()) } - pub fn update_indexed>(&mut self, data: Vec) -> SResult<()> { + pub fn update_indexed>(&mut self, data: Vec) -> SResult<()> { self.indexed = Vec::new(); self.indexed_map = HashMap::new(); for name in data { - self.set_indexed(name)?; + self.set_indexed(name.as_ref())?; } Ok(()) } - pub fn must_index_new_fields(&self) -> bool { - self.must_index_new_fields + pub fn index_new_fields(&self) -> bool { + self.index_new_fields } - pub fn set_must_index_new_fields(&mut self, value: bool) { - self.must_index_new_fields = value; + pub fn set_index_new_fields(&mut self, value: bool) { + self.index_new_fields = value; } }