diff --git a/CHANGELOG.md b/CHANGELOG.md index c2cbc38b0..7144ab2f1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ ## v0.10.2 - - Change the HTTP framework, moving from tide to actix-web #601 + - Change the HTTP framework, moving from tide to actix-web (#601) - Bump sentry version to 0.18.1 (#690) - Enable max payload size override (#684) - Disable sentry in debug (#681) @@ -18,6 +18,7 @@ - Update sentry features to remove openssl (#702) - Add SSL support (#669) - Rename fieldsFrequency into fieldsDistribution in stats (#719) + - Add support for error code reporting (#703) ## v0.10.1 diff --git a/Cargo.lock b/Cargo.lock index a2b38b19a..ea62dfe64 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1653,6 +1653,7 @@ dependencies = [ "jemallocator", "levenshtein_automata", "log", + "meilisearch-error", "meilisearch-schema", "meilisearch-tokenizer", "meilisearch-types", @@ -1673,6 +1674,13 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "meilisearch-error" +version = "0.10.1" +dependencies = [ + "actix-http", +] + [[package]] name = "meilisearch-http" version = "0.10.1" @@ -1690,7 +1698,6 @@ dependencies = [ "crossbeam-channel", "env_logger", "futures", - "heed", "http 0.1.21", "http-service", "http-service-mock", @@ -1699,6 +1706,7 @@ dependencies = [ "log", "main_error", "meilisearch-core", + "meilisearch-error", "meilisearch-schema", "meilisearch-tokenizer", "mime", @@ -1729,6 +1737,7 @@ version = "0.10.1" dependencies = [ "bincode", "indexmap", + "meilisearch-error", "serde", "serde_json", "toml", diff --git a/meilisearch-core/Cargo.toml b/meilisearch-core/Cargo.toml index 912210007..4d092d8af 100644 --- a/meilisearch-core/Cargo.toml +++ b/meilisearch-core/Cargo.toml @@ -24,6 +24,7 @@ intervaltree = "0.2.5" itertools = "0.9.0" levenshtein_automata = { version = "0.2.0", features = ["fst_automaton"] } log = "0.4.8" +meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.10.1" } meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.10.1" } meilisearch-types = { path = "../meilisearch-types", version = "0.10.1" } diff --git a/meilisearch-core/examples/from_file.rs b/meilisearch-core/examples/from_file.rs index 821f23fcc..af41efb6c 100644 --- a/meilisearch-core/examples/from_file.rs +++ b/meilisearch-core/examples/from_file.rs @@ -126,9 +126,7 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box) @@ -175,10 +173,9 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box; + type ArcSwapFn = arc_swap::ArcSwapOption; +type SerdeDatetime = SerdeBincode>; + +pub type MainWriter<'a> = heed::RwTxn<'a, MainT>; +pub type MainReader = heed::RoTxn; + +pub type UpdateWriter<'a> = heed::RwTxn<'a, UpdateT>; +pub type UpdateReader = heed::RoTxn; + +const UNHEALTHY_KEY: &str = "_is_unhealthy"; +const LAST_UPDATE_KEY: &str = "last-update"; + pub struct MainT; pub struct UpdateT; @@ -241,6 +254,13 @@ impl Database { } } + pub fn is_indexing(&self, reader: &UpdateReader, index: &str) -> MResult> { + match self.open_index(&index) { + Some(index) => index.current_update_id(&reader).map(|u| Some(u.is_some())), + None => Ok(None), + } + } + pub fn create_index(&self, name: impl AsRef) -> MResult { let name = name.as_ref(); let mut indexes_lock = self.indexes.write().unwrap(); @@ -319,23 +339,73 @@ impl Database { self.update_fn.swap(None); } - pub fn main_read_txn(&self) -> heed::Result> { - self.env.typed_read_txn::() + pub fn main_read_txn(&self) -> MResult { + Ok(self.env.typed_read_txn::()?) } - pub fn main_write_txn(&self) -> heed::Result> { - self.env.typed_write_txn::() + pub(crate) fn main_write_txn(&self) -> MResult { + Ok(self.env.typed_write_txn::()?) } - pub fn update_read_txn(&self) -> heed::Result> { - self.update_env.typed_read_txn::() + /// Calls f providing it with a writer to the main database. After f is called, makes sure the + /// transaction is commited. Returns whatever result f returns. + pub fn main_write(&self, f: F) -> Result + where + F: FnOnce(&mut MainWriter) -> Result, + E: From, + { + let mut writer = self.main_write_txn()?; + let result = f(&mut writer)?; + writer.commit().map_err(Error::Heed)?; + Ok(result) } - pub fn update_write_txn(&self) -> heed::Result> { - self.update_env.typed_write_txn::() + /// provides a context with a reader to the main database. experimental. + pub fn main_read(&self, f: F) -> Result + where + F: FnOnce(&MainReader) -> Result, + E: From, + { + let reader = self.main_read_txn()?; + let result = f(&reader)?; + reader.abort().map_err(Error::Heed)?; + Ok(result) } - pub fn copy_and_compact_to_path>(&self, path: P) -> ZResult<(File, File)> { + pub fn update_read_txn(&self) -> MResult { + Ok(self.update_env.typed_read_txn::()?) + } + + pub(crate) fn update_write_txn(&self) -> MResult> { + Ok(self.update_env.typed_write_txn::()?) + } + + /// Calls f providing it with a writer to the main database. After f is called, makes sure the + /// transaction is commited. Returns whatever result f returns. + pub fn update_write(&self, f: F) -> Result + where + F: FnOnce(&mut UpdateWriter) -> Result, + E: From, + { + let mut writer = self.update_write_txn()?; + let result = f(&mut writer)?; + writer.commit().map_err(Error::Heed)?; + Ok(result) + } + + /// provides a context with a reader to the update database. experimental. + pub fn update_read(&self, f: F) -> Result + where + F: FnOnce(&UpdateReader) -> Result, + E: From, + { + let reader = self.update_read_txn()?; + let result = f(&reader)?; + reader.abort().map_err(Error::Heed)?; + Ok(result) + } + + pub fn copy_and_compact_to_path>(&self, path: P) -> MResult<(File, File)> { let path = path.as_ref(); let env_path = path.join("main"); @@ -352,7 +422,7 @@ impl Database { Ok(update_env_file) => Ok((env_file, update_env_file)), Err(e) => { fs::remove_file(env_path)?; - Err(e) + Err(e.into()) }, } } @@ -362,9 +432,78 @@ impl Database { indexes.keys().cloned().collect() } - pub fn common_store(&self) -> heed::PolyDatabase { + pub(crate) fn common_store(&self) -> heed::PolyDatabase { self.common_store } + + pub fn last_update(&self, reader: &heed::RoTxn) -> MResult>> { + match self.common_store() + .get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)? { + Some(datetime) => Ok(Some(datetime)), + None => Ok(None), + } + } + + pub fn set_last_update(&self, writer: &mut heed::RwTxn, time: &DateTime) -> MResult<()> { + self.common_store() + .put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, time)?; + Ok(()) + } + + pub fn set_healthy(&self, writer: &mut heed::RwTxn) -> MResult<()> { + let common_store = self.common_store(); + common_store.delete::<_, Str>(writer, UNHEALTHY_KEY)?; + Ok(()) + } + + pub fn set_unhealthy(&self, writer: &mut heed::RwTxn) -> MResult<()> { + let common_store = self.common_store(); + common_store.put::<_, Str, Unit>(writer, UNHEALTHY_KEY, &())?; + Ok(()) + } + + pub fn get_health(&self, reader: &heed::RoTxn) -> MResult> { + let common_store = self.common_store(); + Ok(common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY)?) + } + + pub fn compute_stats(&self, writer: &mut MainWriter, index_uid: &str) -> MResult<()> { + let index = match self.open_index(&index_uid) { + Some(index) => index, + None => { + error!("Impossible to retrieve index {}", index_uid); + return Ok(()); + } + }; + + let schema = match index.main.schema(&writer)? { + Some(schema) => schema, + None => return Ok(()), + }; + + let all_documents_fields = index + .documents_fields_counts + .all_documents_fields_counts(&writer)?; + + // count fields frequencies + let mut fields_frequency = HashMap::<_, usize>::new(); + for result in all_documents_fields { + let (_, attr, _) = result?; + if let Some(field_id) = schema.indexed_pos_to_field_id(attr) { + *fields_frequency.entry(field_id).or_default() += 1; + } + } + + // convert attributes to their names + let frequency: HashMap<_, _> = fields_frequency + .into_iter() + .filter_map(|(a, c)| schema.name(a).map(|name| (name.to_string(), c))) + .collect(); + + index + .main + .put_fields_distribution(writer, &frequency) + } } #[cfg(test)] diff --git a/meilisearch-core/src/error.rs b/meilisearch-core/src/error.rs index 7990f691f..62840c2a8 100644 --- a/meilisearch-core/src/error.rs +++ b/meilisearch-core/src/error.rs @@ -9,6 +9,8 @@ pub use fst::Error as FstError; pub use heed::Error as HeedError; pub use pest::error as pest_error; +use meilisearch_error::{ErrorCode, Code}; + pub type MResult = Result; #[derive(Debug)] @@ -21,17 +23,41 @@ pub enum Error { MissingDocumentId, MaxFieldsLimitExceeded, Schema(meilisearch_schema::Error), - Zlmdb(heed::Error), + Heed(heed::Error), Fst(fst::Error), SerdeJson(SerdeJsonError), Bincode(bincode::Error), Serializer(SerializerError), Deserializer(DeserializerError), - UnsupportedOperation(UnsupportedOperation), FilterParseError(PestError), FacetError(FacetError), } +impl ErrorCode for Error { + fn error_code(&self) -> Code { + use Error::*; + + match self { + FacetError(_) => Code::Facet, + FilterParseError(_) => Code::Filter, + IndexAlreadyExists => Code::IndexAlreadyExists, + MissingPrimaryKey => Code::InvalidState, + MissingDocumentId => Code::MissingDocumentId, + MaxFieldsLimitExceeded => Code::MaxFieldsLimitExceeded, + Schema(s) => s.error_code(), + WordIndexMissing + | SchemaMissing => Code::InvalidState, + Heed(_) + | Fst(_) + | SerdeJson(_) + | Bincode(_) + | Serializer(_) + | Deserializer(_) + | Io(_) => Code::Internal, + } + } +} + impl From for Error { fn from(error: io::Error) -> Error { Error::Io(error) @@ -74,7 +100,7 @@ impl From for Error { impl From for Error { fn from(error: HeedError) -> Error { - Error::Zlmdb(error) + Error::Heed(error) } } @@ -108,12 +134,6 @@ impl From for Error { } } -impl From for Error { - fn from(op: UnsupportedOperation) -> Error { - Error::UnsupportedOperation(op) - } -} - impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::Error::*; @@ -126,13 +146,12 @@ impl fmt::Display for Error { MissingDocumentId => write!(f, "document id is missing"), MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"), Schema(e) => write!(f, "schema error; {}", e), - Zlmdb(e) => write!(f, "heed error; {}", e), + Heed(e) => write!(f, "heed error; {}", e), Fst(e) => write!(f, "fst error; {}", e), SerdeJson(e) => write!(f, "serde json error; {}", e), Bincode(e) => write!(f, "bincode error; {}", e), Serializer(e) => write!(f, "serializer error; {}", e), Deserializer(e) => write!(f, "deserializer error; {}", e), - UnsupportedOperation(op) => write!(f, "unsupported operation; {}", op), FilterParseError(e) => write!(f, "error parsing filter; {}", e), FacetError(e) => write!(f, "error processing facet filter: {}", e), } @@ -141,27 +160,17 @@ impl fmt::Display for Error { impl error::Error for Error {} -#[derive(Debug)] -pub enum UnsupportedOperation { - SchemaAlreadyExists, - CannotUpdateSchemaPrimaryKey, - CannotReorderSchemaAttribute, - CanOnlyIntroduceNewSchemaAttributesAtEnd, - CannotRemoveSchemaAttribute, -} +struct FilterParseError(PestError); -impl fmt::Display for UnsupportedOperation { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use self::UnsupportedOperation::*; - match self { - SchemaAlreadyExists => write!(f, "Cannot update index which already have a schema"), - CannotUpdateSchemaPrimaryKey => write!(f, "Cannot update the primary key of a schema"), - CannotReorderSchemaAttribute => write!(f, "Cannot reorder the attributes of a schema"), - CanOnlyIntroduceNewSchemaAttributesAtEnd => { - write!(f, "Can only introduce new attributes at end of a schema") - } - CannotRemoveSchemaAttribute => write!(f, "Cannot remove attributes from a schema"), - } +impl fmt::Display for FilterParseError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use crate::pest_error::LineColLocation::*; + + let (line, column) = match self.0.line_col { + Span((line, _), (column, _)) => (line, column), + Pos((line, column)) => (line, column), + }; + write!(f, "parsing error on line {} at column {}: {}", line, column, self.0.variant.message()) } } diff --git a/meilisearch-core/src/facets.rs b/meilisearch-core/src/facets.rs index dc8654915..cc1737eaf 100644 --- a/meilisearch-core/src/facets.rs +++ b/meilisearch-core/src/facets.rs @@ -13,7 +13,7 @@ use meilisearch_schema::{FieldId, Schema}; use meilisearch_types::DocumentId; use crate::database::MainT; -use crate::error::{FacetError, Error}; +use crate::error::{FacetError, MResult}; use crate::store::BEU16; /// Data structure used to represent a boolean expression in the form of nested arrays. @@ -34,14 +34,13 @@ impl FacetFilter { s: &str, schema: &Schema, attributes_for_faceting: &[FieldId], - ) -> Result { - + ) -> MResult { let parsed = serde_json::from_str::(s).map_err(|e| FacetError::ParsingError(e.to_string()))?; let mut filter = Vec::new(); match parsed { Value::Array(and_exprs) => { if and_exprs.is_empty() { - return Err(FacetError::EmptyArray); + return Err(FacetError::EmptyArray.into()); } for expr in and_exprs { match expr { @@ -51,7 +50,7 @@ impl FacetFilter { } Value::Array(or_exprs) => { if or_exprs.is_empty() { - return Err(FacetError::EmptyArray); + return Err(FacetError::EmptyArray.into()); } let mut inner = Vec::new(); for expr in or_exprs { @@ -60,17 +59,17 @@ impl FacetFilter { let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?; inner.push(key); } - bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value)), + bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value).into()), } } filter.push(Either::Left(inner)); } - bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value)), + bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value).into()), } } return Ok(Self(filter)); } - bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value)), + bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value).into()), } } } @@ -183,7 +182,7 @@ pub fn facet_map_from_docids( index: &crate::Index, document_ids: &[DocumentId], attributes_for_facetting: &[FieldId], -) -> Result>, Error> { +) -> MResult>> { let mut facet_map = HashMap::new(); for document_id in document_ids { for result in index @@ -210,7 +209,7 @@ pub fn facet_map_from_docs( schema: &Schema, documents: &HashMap>, attributes_for_facetting: &[FieldId], -) -> Result>, Error> { +) -> MResult>> { let mut facet_map = HashMap::new(); let attributes_for_facetting = attributes_for_facetting .iter() diff --git a/meilisearch-core/src/lib.rs b/meilisearch-core/src/lib.rs index a9938bb73..e468a794e 100644 --- a/meilisearch-core/src/lib.rs +++ b/meilisearch-core/src/lib.rs @@ -26,7 +26,7 @@ pub mod settings; pub mod store; pub mod update; -pub use self::database::{BoxUpdateFn, Database, DatabaseOptions, MainT, UpdateT}; +pub use self::database::{BoxUpdateFn, Database, DatabaseOptions, MainT, UpdateT, MainWriter, MainReader, UpdateWriter, UpdateReader}; pub use self::error::{Error, HeedError, FstError, MResult, pest_error, FacetError}; pub use self::filters::Filter; pub use self::number::{Number, ParseNumberError}; diff --git a/meilisearch-core/src/query_builder.rs b/meilisearch-core/src/query_builder.rs index c9ed1933e..72accfcde 100644 --- a/meilisearch-core/src/query_builder.rs +++ b/meilisearch-core/src/query_builder.rs @@ -268,7 +268,7 @@ mod tests { let alternatives = self .index .synonyms - .synonyms(&writer, word.as_bytes()) + .synonyms_fst(&writer, word.as_bytes()) .unwrap(); let new = sdset_into_fstset(&new); diff --git a/meilisearch-core/src/query_tree.rs b/meilisearch-core/src/query_tree.rs index 2687028a0..4a3a622b2 100644 --- a/meilisearch-core/src/query_tree.rs +++ b/meilisearch-core/src/query_tree.rs @@ -147,7 +147,7 @@ fn split_best_frequency<'a>(reader: &heed::RoTxn, ctx: &Context, word: &' fn fetch_synonyms(reader: &heed::RoTxn, ctx: &Context, words: &[&str]) -> MResult>> { let words = normalize_str(&words.join(" ")); - let set = ctx.synonyms.synonyms(reader, words.as_bytes())?; + let set = ctx.synonyms.synonyms_fst(reader, words.as_bytes())?; let mut strings = Vec::new(); let mut stream = set.stream(); diff --git a/meilisearch-core/src/store/documents_fields_counts.rs b/meilisearch-core/src/store/documents_fields_counts.rs index 69bacd5f7..f0d23c99b 100644 --- a/meilisearch-core/src/store/documents_fields_counts.rs +++ b/meilisearch-core/src/store/documents_fields_counts.rs @@ -4,6 +4,7 @@ use crate::DocumentId; use heed::types::OwnedType; use heed::Result as ZResult; use meilisearch_schema::IndexedPos; +use crate::MResult; #[derive(Copy, Clone)] pub struct DocumentsFieldsCounts { @@ -60,7 +61,7 @@ impl DocumentsFieldsCounts { Ok(DocumentFieldsCountsIter { iter }) } - pub fn documents_ids<'txn>(self, reader: &'txn heed::RoTxn) -> ZResult> { + pub fn documents_ids<'txn>(self, reader: &'txn heed::RoTxn) -> MResult> { let iter = self.documents_fields_counts.iter(reader)?; Ok(DocumentsIdsIter { last_seen_id: None, @@ -102,7 +103,7 @@ pub struct DocumentsIdsIter<'txn> { } impl Iterator for DocumentsIdsIter<'_> { - type Item = ZResult; + type Item = MResult; fn next(&mut self) -> Option { for result in &mut self.iter { @@ -114,7 +115,7 @@ impl Iterator for DocumentsIdsIter<'_> { return Some(Ok(document_id)); } } - Err(e) => return Some(Err(e)), + Err(e) => return Some(Err(e.into())), } } None diff --git a/meilisearch-core/src/store/main.rs b/meilisearch-core/src/store/main.rs index 864970320..f1ef6fc5e 100644 --- a/meilisearch-core/src/store/main.rs +++ b/meilisearch-core/src/store/main.rs @@ -2,14 +2,13 @@ use std::borrow::Cow; use std::collections::HashMap; use chrono::{DateTime, Utc}; -use heed::Result as ZResult; use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str}; use meilisearch_schema::{FieldId, Schema}; use meilisearch_types::DocumentId; use sdset::Set; use crate::database::MainT; -use crate::RankedMap; +use crate::{RankedMap, MResult}; use crate::settings::RankingRule; use crate::{FstSetCow, FstMapCow}; use super::{CowSet, DocumentsIds}; @@ -41,75 +40,73 @@ pub struct Main { } impl Main { - pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { - self.main.clear(writer) + pub fn clear(self, writer: &mut heed::RwTxn) -> MResult<()> { + Ok(self.main.clear(writer)?) } - pub fn put_name(self, writer: &mut heed::RwTxn, name: &str) -> ZResult<()> { - self.main.put::<_, Str, Str>(writer, NAME_KEY, name) + pub fn put_name(self, writer: &mut heed::RwTxn, name: &str) -> MResult<()> { + Ok(self.main.put::<_, Str, Str>(writer, NAME_KEY, name)?) } - pub fn name(self, reader: &heed::RoTxn) -> ZResult> { + pub fn name(self, reader: &heed::RoTxn) -> MResult> { Ok(self .main .get::<_, Str, Str>(reader, NAME_KEY)? .map(|name| name.to_owned())) } - pub fn put_created_at(self, writer: &mut heed::RwTxn) -> ZResult<()> { - self.main - .put::<_, Str, SerdeDatetime>(writer, CREATED_AT_KEY, &Utc::now()) + pub fn put_created_at(self, writer: &mut heed::RwTxn) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeDatetime>(writer, CREATED_AT_KEY, &Utc::now())?) } - pub fn created_at(self, reader: &heed::RoTxn) -> ZResult>> { - self.main.get::<_, Str, SerdeDatetime>(reader, CREATED_AT_KEY) + pub fn created_at(self, reader: &heed::RoTxn) -> MResult>> { + Ok(self.main.get::<_, Str, SerdeDatetime>(reader, CREATED_AT_KEY)?) } - pub fn put_updated_at(self, writer: &mut heed::RwTxn) -> ZResult<()> { - self.main - .put::<_, Str, SerdeDatetime>(writer, UPDATED_AT_KEY, &Utc::now()) + pub fn put_updated_at(self, writer: &mut heed::RwTxn) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeDatetime>(writer, UPDATED_AT_KEY, &Utc::now())?) } - pub fn updated_at(self, reader: &heed::RoTxn) -> ZResult>> { - self.main.get::<_, Str, SerdeDatetime>(reader, UPDATED_AT_KEY) + pub fn updated_at(self, reader: &heed::RoTxn) -> MResult>> { + Ok(self.main.get::<_, Str, SerdeDatetime>(reader, UPDATED_AT_KEY)?) } - pub fn put_internal_docids(self, writer: &mut heed::RwTxn, ids: &sdset::Set) -> ZResult<()> { - self.main.put::<_, Str, DocumentsIds>(writer, INTERNAL_DOCIDS_KEY, ids) + pub fn put_internal_docids(self, writer: &mut heed::RwTxn, ids: &sdset::Set) -> MResult<()> { + Ok(self.main.put::<_, Str, DocumentsIds>(writer, INTERNAL_DOCIDS_KEY, ids)?) } - pub fn internal_docids<'txn>(self, reader: &'txn heed::RoTxn) -> ZResult>> { + pub fn internal_docids<'txn>(self, reader: &'txn heed::RoTxn) -> MResult>> { match self.main.get::<_, Str, DocumentsIds>(reader, INTERNAL_DOCIDS_KEY)? { Some(ids) => Ok(ids), None => Ok(Cow::default()), } } - pub fn merge_internal_docids(self, writer: &mut heed::RwTxn, new_ids: &sdset::Set) -> ZResult<()> { + pub fn merge_internal_docids(self, writer: &mut heed::RwTxn, new_ids: &sdset::Set) -> MResult<()> { use sdset::SetOperation; // We do an union of the old and new internal ids. let internal_docids = self.internal_docids(writer)?; let internal_docids = sdset::duo::Union::new(&internal_docids, new_ids).into_set_buf(); - self.put_internal_docids(writer, &internal_docids) + Ok(self.put_internal_docids(writer, &internal_docids)?) } - pub fn remove_internal_docids(self, writer: &mut heed::RwTxn, ids: &sdset::Set) -> ZResult<()> { + pub fn remove_internal_docids(self, writer: &mut heed::RwTxn, ids: &sdset::Set) -> MResult<()> { use sdset::SetOperation; // We do a difference of the old and new internal ids. let internal_docids = self.internal_docids(writer)?; let internal_docids = sdset::duo::Difference::new(&internal_docids, ids).into_set_buf(); - self.put_internal_docids(writer, &internal_docids) + Ok(self.put_internal_docids(writer, &internal_docids)?) } - pub fn put_external_docids(self, writer: &mut heed::RwTxn, ids: &fst::Map) -> ZResult<()> + pub fn put_external_docids(self, writer: &mut heed::RwTxn, ids: &fst::Map) -> MResult<()> where A: AsRef<[u8]>, { - self.main.put::<_, Str, ByteSlice>(writer, EXTERNAL_DOCIDS_KEY, ids.as_fst().as_bytes()) + Ok(self.main.put::<_, Str, ByteSlice>(writer, EXTERNAL_DOCIDS_KEY, ids.as_fst().as_bytes())?) } - pub fn merge_external_docids(self, writer: &mut heed::RwTxn, new_docids: &fst::Map) -> ZResult<()> + pub fn merge_external_docids(self, writer: &mut heed::RwTxn, new_docids: &fst::Map) -> MResult<()> where A: AsRef<[u8]>, { use fst::{Streamer, IntoStreamer}; @@ -124,10 +121,10 @@ impl Main { drop(op); let external_docids = build.into_map(); - self.put_external_docids(writer, &external_docids) + Ok(self.put_external_docids(writer, &external_docids)?) } - pub fn remove_external_docids(self, writer: &mut heed::RwTxn, ids: &fst::Map) -> ZResult<()> + pub fn remove_external_docids(self, writer: &mut heed::RwTxn, ids: &fst::Map) -> MResult<()> where A: AsRef<[u8]>, { use fst::{Streamer, IntoStreamer}; @@ -145,74 +142,90 @@ impl Main { self.put_external_docids(writer, &external_docids) } - pub fn external_docids(self, reader: &heed::RoTxn) -> ZResult { + pub fn external_docids(self, reader: &heed::RoTxn) -> MResult { match self.main.get::<_, Str, ByteSlice>(reader, EXTERNAL_DOCIDS_KEY)? { Some(bytes) => Ok(fst::Map::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Map::default().map_data(Cow::Owned).unwrap()), } } - pub fn external_to_internal_docid(self, reader: &heed::RoTxn, external_docid: &str) -> ZResult> { + pub fn external_to_internal_docid(self, reader: &heed::RoTxn, external_docid: &str) -> MResult> { let external_ids = self.external_docids(reader)?; Ok(external_ids.get(external_docid).map(|id| DocumentId(id as u32))) } - pub fn put_words_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> { - self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, fst.as_fst().as_bytes()) - } - - pub fn words_fst(self, reader: &heed::RoTxn) -> ZResult { + pub fn words_fst(self, reader: &heed::RoTxn) -> MResult { match self.main.get::<_, Str, ByteSlice>(reader, WORDS_KEY)? { Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), } } - pub fn put_schema(self, writer: &mut heed::RwTxn, schema: &Schema) -> ZResult<()> { - self.main.put::<_, Str, SerdeBincode>(writer, SCHEMA_KEY, schema) + pub fn put_words_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> MResult<()> { + Ok(self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, fst.as_fst().as_bytes())?) } - pub fn schema(self, reader: &heed::RoTxn) -> ZResult> { - self.main.get::<_, Str, SerdeBincode>(reader, SCHEMA_KEY) + pub fn put_schema(self, writer: &mut heed::RwTxn, schema: &Schema) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeBincode>(writer, SCHEMA_KEY, schema)?) } - pub fn delete_schema(self, writer: &mut heed::RwTxn) -> ZResult { - self.main.delete::<_, Str>(writer, SCHEMA_KEY) + pub fn schema(self, reader: &heed::RoTxn) -> MResult> { + Ok(self.main.get::<_, Str, SerdeBincode>(reader, SCHEMA_KEY)?) } - pub fn put_ranked_map(self, writer: &mut heed::RwTxn, ranked_map: &RankedMap) -> ZResult<()> { - self.main.put::<_, Str, SerdeBincode>(writer, RANKED_MAP_KEY, &ranked_map) + pub fn delete_schema(self, writer: &mut heed::RwTxn) -> MResult { + Ok(self.main.delete::<_, Str>(writer, SCHEMA_KEY)?) } - pub fn ranked_map(self, reader: &heed::RoTxn) -> ZResult> { - self.main.get::<_, Str, SerdeBincode>(reader, RANKED_MAP_KEY) + pub fn put_ranked_map(self, writer: &mut heed::RwTxn, ranked_map: &RankedMap) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeBincode>(writer, RANKED_MAP_KEY, &ranked_map)?) } - pub fn put_synonyms_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> { + pub fn ranked_map(self, reader: &heed::RoTxn) -> MResult> { + Ok(self.main.get::<_, Str, SerdeBincode>(reader, RANKED_MAP_KEY)?) + } + + pub fn put_synonyms_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> MResult<()> { let bytes = fst.as_fst().as_bytes(); - self.main.put::<_, Str, ByteSlice>(writer, SYNONYMS_KEY, bytes) + Ok(self.main.put::<_, Str, ByteSlice>(writer, SYNONYMS_KEY, bytes)?) } - pub fn synonyms_fst(self, reader: &heed::RoTxn) -> ZResult { + pub(crate) fn synonyms_fst(self, reader: &heed::RoTxn) -> MResult { match self.main.get::<_, Str, ByteSlice>(reader, SYNONYMS_KEY)? { Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), } } - pub fn put_stop_words_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> { - let bytes = fst.as_fst().as_bytes(); - self.main.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes) + pub fn synonyms(self, reader: &heed::RoTxn) -> MResult> { + let synonyms = self + .synonyms_fst(&reader)? + .stream() + .into_strs()?; + Ok(synonyms) } - pub fn stop_words_fst(self, reader: &heed::RoTxn) -> ZResult { + pub fn put_stop_words_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> MResult<()> { + let bytes = fst.as_fst().as_bytes(); + Ok(self.main.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes)?) + } + + pub(crate) fn stop_words_fst(self, reader: &heed::RoTxn) -> MResult { match self.main.get::<_, Str, ByteSlice>(reader, STOP_WORDS_KEY)? { Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), } } - pub fn put_number_of_documents(self, writer: &mut heed::RwTxn, f: F) -> ZResult + pub fn stop_words(self, reader: &heed::RoTxn) -> MResult> { + let stop_word_list = self + .stop_words_fst(reader)? + .stream() + .into_strs()?; + Ok(stop_word_list) + } + + pub fn put_number_of_documents(self, writer: &mut heed::RwTxn, f: F) -> MResult where F: Fn(u64) -> u64, { @@ -222,11 +235,10 @@ impl Main { Ok(new) } - pub fn number_of_documents(self, reader: &heed::RoTxn) -> ZResult { + pub fn number_of_documents(self, reader: &heed::RoTxn) -> MResult { match self .main - .get::<_, Str, OwnedType>(reader, NUMBER_OF_DOCUMENTS_KEY)? - { + .get::<_, Str, OwnedType>(reader, NUMBER_OF_DOCUMENTS_KEY)? { Some(value) => Ok(value), None => Ok(0), } @@ -235,13 +247,12 @@ impl Main { pub fn put_fields_distribution( self, writer: &mut heed::RwTxn, - fields_distribution: &FreqsMap, - ) -> ZResult<()> { - self.main - .put::<_, Str, SerdeFreqsMap>(writer, FIELDS_DISTRIBUTION_KEY, fields_distribution) + fields_frequency: &FreqsMap, + ) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeFreqsMap>(writer, FIELDS_DISTRIBUTION_KEY, fields_frequency)?) } - pub fn fields_distribution(&self, reader: &heed::RoTxn) -> ZResult> { + pub fn fields_distribution(&self, reader: &heed::RoTxn) -> MResult> { match self .main .get::<_, Str, SerdeFreqsMap>(reader, FIELDS_DISTRIBUTION_KEY)? @@ -251,51 +262,50 @@ impl Main { } } - pub fn attributes_for_faceting<'txn>(&self, reader: &'txn heed::RoTxn) -> ZResult>>> { - self.main.get::<_, Str, CowSet>(reader, ATTRIBUTES_FOR_FACETING_KEY) + pub fn attributes_for_faceting<'txn>(&self, reader: &'txn heed::RoTxn) -> MResult>>> { + Ok(self.main.get::<_, Str, CowSet>(reader, ATTRIBUTES_FOR_FACETING_KEY)?) } - pub fn put_attributes_for_faceting(self, writer: &mut heed::RwTxn, attributes: &Set) -> ZResult<()> { - self.main.put::<_, Str, CowSet>(writer, ATTRIBUTES_FOR_FACETING_KEY, attributes) + pub fn put_attributes_for_faceting(self, writer: &mut heed::RwTxn, attributes: &Set) -> MResult<()> { + Ok(self.main.put::<_, Str, CowSet>(writer, ATTRIBUTES_FOR_FACETING_KEY, attributes)?) } - pub fn delete_attributes_for_faceting(self, writer: &mut heed::RwTxn) -> ZResult { - self.main.delete::<_, Str>(writer, ATTRIBUTES_FOR_FACETING_KEY) + pub fn delete_attributes_for_faceting(self, writer: &mut heed::RwTxn) -> MResult { + Ok(self.main.delete::<_, Str>(writer, ATTRIBUTES_FOR_FACETING_KEY)?) } - pub fn ranking_rules(&self, reader: &heed::RoTxn) -> ZResult>> { - self.main.get::<_, Str, SerdeBincode>>(reader, RANKING_RULES_KEY) + pub fn ranking_rules(&self, reader: &heed::RoTxn) -> MResult>> { + Ok(self.main.get::<_, Str, SerdeBincode>>(reader, RANKING_RULES_KEY)?) } - pub fn put_ranking_rules(self, writer: &mut heed::RwTxn, value: &[RankingRule]) -> ZResult<()> { - self.main.put::<_, Str, SerdeBincode>>(writer, RANKING_RULES_KEY, &value.to_vec()) + pub fn put_ranking_rules(self, writer: &mut heed::RwTxn, value: &[RankingRule]) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeBincode>>(writer, RANKING_RULES_KEY, &value.to_vec())?) } - pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn) -> ZResult { - self.main.delete::<_, Str>(writer, RANKING_RULES_KEY) + pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn) -> MResult { + Ok(self.main.delete::<_, Str>(writer, RANKING_RULES_KEY)?) } - pub fn distinct_attribute(&self, reader: &heed::RoTxn) -> ZResult> { + pub fn distinct_attribute(&self, reader: &heed::RoTxn) -> MResult> { if let Some(value) = self.main.get::<_, Str, Str>(reader, DISTINCT_ATTRIBUTE_KEY)? { return Ok(Some(value.to_owned())) } return Ok(None) } - pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn, value: &str) -> ZResult<()> { - self.main.put::<_, Str, Str>(writer, DISTINCT_ATTRIBUTE_KEY, value) + pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn, value: &str) -> MResult<()> { + Ok(self.main.put::<_, Str, Str>(writer, DISTINCT_ATTRIBUTE_KEY, value)?) } - pub fn delete_distinct_attribute(self, writer: &mut heed::RwTxn) -> ZResult { - self.main.delete::<_, Str>(writer, DISTINCT_ATTRIBUTE_KEY) + pub fn delete_distinct_attribute(self, writer: &mut heed::RwTxn) -> MResult { + Ok(self.main.delete::<_, Str>(writer, DISTINCT_ATTRIBUTE_KEY)?) } - pub fn put_customs(self, writer: &mut heed::RwTxn, customs: &[u8]) -> ZResult<()> { - self.main - .put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs) + pub fn put_customs(self, writer: &mut heed::RwTxn, customs: &[u8]) -> MResult<()> { + Ok(self.main.put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs)?) } - pub fn customs<'txn>(self, reader: &'txn heed::RoTxn) -> ZResult> { - self.main.get::<_, Str, ByteSlice>(reader, CUSTOMS_KEY) + pub fn customs<'txn>(self, reader: &'txn heed::RoTxn) -> MResult> { + Ok(self.main.get::<_, Str, ByteSlice>(reader, CUSTOMS_KEY)?) } } diff --git a/meilisearch-core/src/store/mod.rs b/meilisearch-core/src/store/mod.rs index d19d3f7d3..fa5baa831 100644 --- a/meilisearch-core/src/store/mod.rs +++ b/meilisearch-core/src/store/mod.rs @@ -31,7 +31,6 @@ use std::collections::HashSet; use std::convert::TryInto; use std::{mem, ptr}; -use heed::Result as ZResult; use heed::{BytesEncode, BytesDecode}; use meilisearch_schema::{IndexedPos, FieldId}; use sdset::{Set, SetBuf}; @@ -279,14 +278,14 @@ impl Index { } } - pub fn customs_update(&self, writer: &mut heed::RwTxn, customs: Vec) -> ZResult { + pub fn customs_update(&self, writer: &mut heed::RwTxn, customs: Vec) -> MResult { let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); - update::push_customs_update(writer, self.updates, self.updates_results, customs) + Ok(update::push_customs_update(writer, self.updates, self.updates_results, customs)?) } - pub fn settings_update(&self, writer: &mut heed::RwTxn, update: SettingsUpdate) -> ZResult { + pub fn settings_update(&self, writer: &mut heed::RwTxn, update: SettingsUpdate) -> MResult { let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); - update::push_settings_update(writer, self.updates, self.updates_results, update) + Ok(update::push_settings_update(writer, self.updates, self.updates_results, update)?) } pub fn documents_addition(&self) -> update::DocumentsAddition { diff --git a/meilisearch-core/src/store/synonyms.rs b/meilisearch-core/src/store/synonyms.rs index 1ec8d313c..bf7472f96 100644 --- a/meilisearch-core/src/store/synonyms.rs +++ b/meilisearch-core/src/store/synonyms.rs @@ -4,7 +4,7 @@ use heed::Result as ZResult; use heed::types::ByteSlice; use crate::database::MainT; -use crate::FstSetCow; +use crate::{FstSetCow, MResult}; #[derive(Copy, Clone)] pub struct Synonyms { @@ -27,10 +27,18 @@ impl Synonyms { self.synonyms.clear(writer) } - pub fn synonyms<'txn>(self, reader: &'txn heed::RoTxn, word: &[u8]) -> ZResult> { + pub(crate) fn synonyms_fst<'txn>(self, reader: &'txn heed::RoTxn, word: &[u8]) -> ZResult> { match self.synonyms.get(reader, word)? { Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), } } + + pub fn synonyms(self, reader: &heed::RoTxn, word: &[u8]) -> MResult> { + let synonyms = self + .synonyms_fst(&reader, word)? + .stream() + .into_strs()?; + Ok(synonyms) + } } diff --git a/meilisearch-core/src/update/customs_update.rs b/meilisearch-core/src/update/customs_update.rs index a413d13a6..a3a66e61d 100644 --- a/meilisearch-core/src/update/customs_update.rs +++ b/meilisearch-core/src/update/customs_update.rs @@ -1,14 +1,13 @@ -use heed::Result as ZResult; use crate::database::{MainT, UpdateT}; -use crate::store; +use crate::{store, MResult}; use crate::update::{next_update_id, Update}; pub fn apply_customs_update( writer: &mut heed::RwTxn, main_store: store::Main, customs: &[u8], -) -> ZResult<()> { +) -> MResult<()> { main_store.put_customs(writer, customs) } @@ -17,7 +16,7 @@ pub fn push_customs_update( updates_store: store::Updates, updates_results_store: store::UpdatesResults, customs: Vec, -) -> ZResult { +) -> MResult { let last_update_id = next_update_id(writer, updates_store, updates_results_store)?; let update = Update::customs(customs); diff --git a/meilisearch-error/Cargo.toml b/meilisearch-error/Cargo.toml new file mode 100644 index 000000000..ad77978b1 --- /dev/null +++ b/meilisearch-error/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "meilisearch-error" +version = "0.10.1" +authors = ["marin "] +edition = "2018" + +[dependencies] +actix-http = "1.0.1" diff --git a/meilisearch-error/src/lib.rs b/meilisearch-error/src/lib.rs new file mode 100644 index 000000000..1c8fc4e78 --- /dev/null +++ b/meilisearch-error/src/lib.rs @@ -0,0 +1,174 @@ +use std::fmt; + +use actix_http::http::StatusCode; + +pub trait ErrorCode: std::error::Error { + fn error_code(&self) -> Code; + + /// returns the HTTP status code ascociated with the error + fn http_status(&self) -> StatusCode { + self.error_code().http() + } + + /// returns the doc url ascociated with the error + fn error_url(&self) -> String { + self.error_code().url() + } + + /// returns error name, used as error code + fn error_name(&self) -> String { + self.error_code().name() + } + + /// return the error type + fn error_type(&self) -> String { + self.error_code().type_() + } +} + +enum ErrorType { + InternalError, + InvalidRequest, + Authentication, +} + +impl fmt::Display for ErrorType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use ErrorType::*; + + match self { + InternalError => write!(f, "internal_error"), + InvalidRequest => write!(f, "invalid_request"), + Authentication => write!(f, "authentication"), + } + } +} + +pub enum Code { + // index related error + CreateIndex, + IndexAlreadyExists, + IndexNotFound, + InvalidIndexUid, + OpenIndex, + + // invalid state error + InvalidState, + MissingPrimaryKey, + PrimaryKeyAlreadyPresent, + + MaxFieldsLimitExceeded, + MissingDocumentId, + + Facet, + Filter, + + BadParameter, + BadRequest, + DocumentNotFound, + Internal, + InvalidToken, + Maintenance, + MissingAuthorizationHeader, + MissingHeader, + NotFound, + PayloadTooLarge, + RetrieveDocument, + SearchDocuments, + UnsupportedMediaType, +} + +impl Code { + + /// ascociate a `Code` variant to the actual ErrCode + fn err_code(&self) -> ErrCode { + use Code::*; + + match self { + // index related errors + CreateIndex => ErrCode::invalid("create_index", StatusCode::BAD_REQUEST), + IndexAlreadyExists => ErrCode::invalid("existing_index", StatusCode::BAD_REQUEST), + IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND), InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST), + OpenIndex => ErrCode::internal("open_index", StatusCode::INTERNAL_SERVER_ERROR), + + // invalid state error + InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR), + MissingPrimaryKey => ErrCode::internal("missing_primary_key", StatusCode::INTERNAL_SERVER_ERROR), + PrimaryKeyAlreadyPresent => ErrCode::internal("primary_key_already_present", StatusCode::INTERNAL_SERVER_ERROR), + + // invalid document + MaxFieldsLimitExceeded => ErrCode::invalid("max_field_limit_exceeded", StatusCode::BAD_REQUEST), + MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST), + + Facet => ErrCode::invalid("invalid_facet", StatusCode::BAD_REQUEST), + Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST), + + BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST), + BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST), + DocumentNotFound => ErrCode::internal("document_not_found", StatusCode::NOT_FOUND), + Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR), + InvalidToken => ErrCode::authentication("invalid_token", StatusCode::UNAUTHORIZED), + Maintenance => ErrCode::internal("maintenance", StatusCode::SERVICE_UNAVAILABLE), + MissingAuthorizationHeader => ErrCode::authentication("missing_authorization_header", StatusCode::FORBIDDEN), + MissingHeader => ErrCode::authentication("missing_header", StatusCode::UNAUTHORIZED), + NotFound => ErrCode::invalid("not_found", StatusCode::NOT_FOUND), + PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE), + RetrieveDocument => ErrCode::internal("retrieve_document", StatusCode::BAD_REQUEST), + SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST), + UnsupportedMediaType => ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE), + } + } + + /// return the HTTP status code ascociated with the `Code` + fn http(&self) -> StatusCode { + self.err_code().status_code + } + + /// return error name, used as error code + fn name(&self) -> String { + self.err_code().error_name.to_string() + } + + /// return the error type + fn type_(&self) -> String { + self.err_code().error_type.to_string() + } + + /// return the doc url ascociated with the error + fn url(&self) -> String { + format!("https://docs.meilisearch.com/error/{}", self.name()) + } +} + +/// Internal structure providing a convenient way to create error codes +struct ErrCode { + status_code: StatusCode, + error_type: ErrorType, + error_name: &'static str, +} + +impl ErrCode { + fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::Authentication, + } + } + + fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::InternalError, + } + } + + fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::InvalidRequest, + } + } +} diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 368c8f649..abcbba9d2 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -29,12 +29,12 @@ chrono = { version = "0.4.11", features = ["serde"] } crossbeam-channel = "0.4.2" env_logger = "0.7.1" futures = "0.3.4" -heed = "0.8.0" http = "0.1.19" indexmap = { version = "1.3.2", features = ["serde-1"] } log = "0.4.8" main_error = "0.1.0" meilisearch-core = { path = "../meilisearch-core", version = "0.10.1" } +meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.10.1" } meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.10.1"} mime = "0.3.16" diff --git a/meilisearch-http/src/data.rs b/meilisearch-http/src/data.rs index 39b7f9b1e..3692b0b69 100644 --- a/meilisearch-http/src/data.rs +++ b/meilisearch-http/src/data.rs @@ -1,21 +1,13 @@ -use std::collections::HashMap; use std::ops::Deref; use std::sync::Arc; -use chrono::{DateTime, Utc}; -use heed::types::{SerdeBincode, Str}; -use log::error; -use meilisearch_core::{Database, DatabaseOptions, Error as MError, MResult, MainT, UpdateT}; +use meilisearch_core::{Database, DatabaseOptions}; use sha2::Digest; use sysinfo::Pid; use crate::index_update_callback; use crate::option::Opt; -const LAST_UPDATE_KEY: &str = "last-update"; - -type SerdeDatetime = SerdeBincode>; - #[derive(Clone)] pub struct Data { inner: Arc, @@ -62,72 +54,6 @@ impl ApiKeys { } } -impl DataInner { - pub fn is_indexing(&self, reader: &heed::RoTxn, index: &str) -> MResult> { - match self.db.open_index(&index) { - Some(index) => index.current_update_id(&reader).map(|u| Some(u.is_some())), - None => Ok(None), - } - } - - pub fn last_update(&self, reader: &heed::RoTxn) -> MResult>> { - match self - .db - .common_store() - .get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)? - { - Some(datetime) => Ok(Some(datetime)), - None => Ok(None), - } - } - - pub fn set_last_update(&self, writer: &mut heed::RwTxn) -> MResult<()> { - self.db - .common_store() - .put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, &Utc::now()) - .map_err(Into::into) - } - - pub fn compute_stats(&self, writer: &mut heed::RwTxn, index_uid: &str) -> MResult<()> { - let index = match self.db.open_index(&index_uid) { - Some(index) => index, - None => { - error!("Impossible to retrieve index {}", index_uid); - return Ok(()); - } - }; - - let schema = match index.main.schema(&writer)? { - Some(schema) => schema, - None => return Ok(()), - }; - - let all_documents_fields = index - .documents_fields_counts - .all_documents_fields_counts(&writer)?; - - // count fields frequencies - let mut fields_distribution = HashMap::<_, usize>::new(); - for result in all_documents_fields { - let (_, attr, _) = result?; - if let Some(field_id) = schema.indexed_pos_to_field_id(attr) { - *fields_distribution.entry(field_id).or_default() += 1; - } - } - - // convert attributes to their names - let distribution: HashMap<_, _> = fields_distribution - .into_iter() - .filter_map(|(a, c)| schema.name(a).map(|name| (name.to_string(), c))) - .collect(); - - index - .main - .put_fields_distribution(writer, &distribution) - .map_err(MError::Zlmdb) - } -} - impl Data { pub fn new(opt: Opt) -> Data { let db_path = opt.db_path.clone(); diff --git a/meilisearch-http/src/error.rs b/meilisearch-http/src/error.rs index 27626c80b..3f57e3a92 100644 --- a/meilisearch-http/src/error.rs +++ b/meilisearch-http/src/error.rs @@ -1,13 +1,41 @@ +use std::error; use std::fmt; use actix_http::ResponseBuilder; use actix_web as aweb; +use actix_web::error::JsonPayloadError; use actix_web::http::StatusCode; use serde_json::json; -use actix_web::error::JsonPayloadError; + +use meilisearch_error::{ErrorCode, Code}; #[derive(Debug)] -pub enum ResponseError { +pub struct ResponseError { + inner: Box, +} + +impl error::Error for ResponseError {} + +impl ErrorCode for ResponseError { + fn error_code(&self) -> Code { + self.inner.error_code() + } +} + +impl fmt::Display for ResponseError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.fmt(f) + } +} + +impl From for ResponseError { + fn from(error: Error) -> ResponseError { + ResponseError { inner: Box::new(error) } + } +} + +#[derive(Debug)] +pub enum Error { BadParameter(String, String), BadRequest(String), CreateIndex(String), @@ -21,15 +49,40 @@ pub enum ResponseError { MissingHeader(String), NotFound(String), OpenIndex(String), - FilterParsing(String), RetrieveDocument(u32, String), SearchDocuments(String), PayloadTooLarge, UnsupportedMediaType, - FacetExpression(String), - FacetCount(String), } +impl error::Error for Error {} + +impl ErrorCode for Error { + fn error_code(&self) -> Code { + use Error::*; + match self { + BadParameter(_, _) => Code::BadParameter, + BadRequest(_) => Code::BadRequest, + CreateIndex(_) => Code::CreateIndex, + DocumentNotFound(_) => Code::DocumentNotFound, + IndexNotFound(_) => Code::IndexNotFound, + Internal(_) => Code::Internal, + InvalidIndexUid => Code::InvalidIndexUid, + InvalidToken(_) => Code::InvalidToken, + Maintenance => Code::Maintenance, + MissingAuthorizationHeader => Code::MissingAuthorizationHeader, + MissingHeader(_) => Code::MissingHeader, + NotFound(_) => Code::NotFound, + OpenIndex(_) => Code::OpenIndex, + RetrieveDocument(_, _) => Code::RetrieveDocument, + SearchDocuments(_) => Code::SearchDocuments, + PayloadTooLarge => Code::PayloadTooLarge, + UnsupportedMediaType => Code::UnsupportedMediaType, + } + } +} + +#[derive(Debug)] pub enum FacetCountError { AttributeNotSet(String), SyntaxError(String), @@ -37,6 +90,14 @@ pub enum FacetCountError { NoFacetSet, } +impl error::Error for FacetCountError {} + +impl ErrorCode for FacetCountError { + fn error_code(&self) -> Code { + Code::BadRequest + } +} + impl FacetCountError { pub fn unexpected_token(found: impl ToString, expected: &'static [&'static str]) -> FacetCountError { let found = found.to_string(); @@ -63,69 +124,69 @@ impl fmt::Display for FacetCountError { } } -impl ResponseError { - pub fn internal(err: impl fmt::Display) -> ResponseError { - ResponseError::Internal(err.to_string()) +impl Error { + pub fn internal(err: impl fmt::Display) -> Error { + Error::Internal(err.to_string()) } - pub fn bad_request(err: impl fmt::Display) -> ResponseError { - ResponseError::BadRequest(err.to_string()) + pub fn bad_request(err: impl fmt::Display) -> Error { + Error::BadRequest(err.to_string()) } - pub fn missing_authorization_header() -> ResponseError { - ResponseError::MissingAuthorizationHeader + pub fn missing_authorization_header() -> Error { + Error::MissingAuthorizationHeader } - pub fn invalid_token(err: impl fmt::Display) -> ResponseError { - ResponseError::InvalidToken(err.to_string()) + pub fn invalid_token(err: impl fmt::Display) -> Error { + Error::InvalidToken(err.to_string()) } - pub fn not_found(err: impl fmt::Display) -> ResponseError { - ResponseError::NotFound(err.to_string()) + pub fn not_found(err: impl fmt::Display) -> Error { + Error::NotFound(err.to_string()) } - pub fn index_not_found(err: impl fmt::Display) -> ResponseError { - ResponseError::IndexNotFound(err.to_string()) + pub fn index_not_found(err: impl fmt::Display) -> Error { + Error::IndexNotFound(err.to_string()) } - pub fn document_not_found(err: impl fmt::Display) -> ResponseError { - ResponseError::DocumentNotFound(err.to_string()) + pub fn document_not_found(err: impl fmt::Display) -> Error { + Error::DocumentNotFound(err.to_string()) } - pub fn missing_header(err: impl fmt::Display) -> ResponseError { - ResponseError::MissingHeader(err.to_string()) + pub fn missing_header(err: impl fmt::Display) -> Error { + Error::MissingHeader(err.to_string()) } - pub fn bad_parameter(param: impl fmt::Display, err: impl fmt::Display) -> ResponseError { - ResponseError::BadParameter(param.to_string(), err.to_string()) + pub fn bad_parameter(param: impl fmt::Display, err: impl fmt::Display) -> Error { + Error::BadParameter(param.to_string(), err.to_string()) } - pub fn open_index(err: impl fmt::Display) -> ResponseError { - ResponseError::OpenIndex(err.to_string()) + pub fn open_index(err: impl fmt::Display) -> Error { + Error::OpenIndex(err.to_string()) } - pub fn create_index(err: impl fmt::Display) -> ResponseError { - ResponseError::CreateIndex(err.to_string()) + pub fn create_index(err: impl fmt::Display) -> Error { + Error::CreateIndex(err.to_string()) } - pub fn invalid_index_uid() -> ResponseError { - ResponseError::InvalidIndexUid + pub fn invalid_index_uid() -> Error { + Error::InvalidIndexUid } - pub fn maintenance() -> ResponseError { - ResponseError::Maintenance + pub fn maintenance() -> Error { + Error::Maintenance } - pub fn retrieve_document(doc_id: u32, err: impl fmt::Display) -> ResponseError { - ResponseError::RetrieveDocument(doc_id, err.to_string()) + pub fn retrieve_document(doc_id: u32, err: impl fmt::Display) -> Error { + Error::RetrieveDocument(doc_id, err.to_string()) } - pub fn search_documents(err: impl fmt::Display) -> ResponseError { - ResponseError::SearchDocuments(err.to_string()) + pub fn search_documents(err: impl fmt::Display) -> Error { + Error::SearchDocuments(err.to_string()) } } -impl fmt::Display for ResponseError { +impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::BadParameter(param, err) => write!(f, "Url parameter {} error: {}", param, err), @@ -137,17 +198,14 @@ impl fmt::Display for ResponseError { Self::InvalidIndexUid => f.write_str("Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."), Self::InvalidToken(err) => write!(f, "Invalid API key: {}", err), Self::Maintenance => f.write_str("Server is in maintenance, please try again later"), - Self::FilterParsing(err) => write!(f, "parsing error: {}", err), Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"), Self::MissingHeader(header) => write!(f, "Header {} is missing", header), Self::NotFound(err) => write!(f, "{} not found", err), Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err), Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err), Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err), - Self::FacetExpression(e) => write!(f, "error parsing facet filter expression: {}", e), Self::PayloadTooLarge => f.write_str("Payload to large"), Self::UnsupportedMediaType => f.write_str("Unsupported media type"), - Self::FacetCount(e) => write!(f, "error with facet count: {}", e), } } } @@ -156,101 +214,53 @@ impl aweb::error::ResponseError for ResponseError { fn error_response(&self) -> aweb::HttpResponse { ResponseBuilder::new(self.status_code()).json(json!({ "message": self.to_string(), + "errorCode": self.error_name(), + "errorType": self.error_type(), + "errorLink": self.error_url(), })) } fn status_code(&self) -> StatusCode { - match *self { - Self::BadParameter(_, _) - | Self::BadRequest(_) - | Self::CreateIndex(_) - | Self::InvalidIndexUid - | Self::OpenIndex(_) - | Self::RetrieveDocument(_, _) - | Self::FacetExpression(_) - | Self::SearchDocuments(_) - | Self::FacetCount(_) - | Self::FilterParsing(_) => StatusCode::BAD_REQUEST, - Self::DocumentNotFound(_) - | Self::IndexNotFound(_) - | Self::NotFound(_) => StatusCode::NOT_FOUND, - Self::InvalidToken(_) - | Self::MissingHeader(_) => StatusCode::UNAUTHORIZED, - Self::MissingAuthorizationHeader => StatusCode::FORBIDDEN, - Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, - Self::Maintenance => StatusCode::SERVICE_UNAVAILABLE, - Self::PayloadTooLarge => StatusCode::PAYLOAD_TOO_LARGE, - Self::UnsupportedMediaType => StatusCode::UNSUPPORTED_MEDIA_TYPE, - } - } -} - -impl From for ResponseError { - fn from(err: meilisearch_core::HeedError) -> ResponseError { - ResponseError::Internal(err.to_string()) - } -} - -impl From for ResponseError { - fn from(err: meilisearch_core::FstError) -> ResponseError { - ResponseError::Internal(err.to_string()) - } -} - -impl From for ResponseError { - fn from(error: meilisearch_core::FacetError) -> ResponseError { - ResponseError::FacetExpression(error.to_string()) + self.http_status() } } impl From for ResponseError { fn from(err: meilisearch_core::Error) -> ResponseError { - use meilisearch_core::pest_error::LineColLocation::*; - match err { - meilisearch_core::Error::FilterParseError(e) => { - let (line, column) = match e.line_col { - Span((line, _), (column, _)) => (line, column), - Pos((line, column)) => (line, column), - }; - let message = format!("parsing error on line {} at column {}: {}", line, column, e.variant.message()); - - ResponseError::FilterParsing(message) - }, - meilisearch_core::Error::FacetError(e) => ResponseError::FacetExpression(e.to_string()), - _ => ResponseError::Internal(err.to_string()), - } + ResponseError { inner: Box::new(err) } } } impl From for ResponseError { fn from(err: meilisearch_schema::Error) -> ResponseError { - ResponseError::Internal(err.to_string()) + ResponseError { inner: Box::new(err) } } } -impl From for ResponseError { - fn from(err: actix_http::Error) -> ResponseError { - ResponseError::Internal(err.to_string()) +impl From for Error { + fn from(err: actix_http::Error) -> Error { + Error::Internal(err.to_string()) } } impl From for ResponseError { - fn from(other: FacetCountError) -> ResponseError { - ResponseError::FacetCount(other.to_string()) + fn from(err: FacetCountError) -> ResponseError { + ResponseError { inner: Box::new(err) } } } -impl From for ResponseError { - fn from(err: JsonPayloadError) -> ResponseError { +impl From for Error { + fn from(err: JsonPayloadError) -> Error { match err { - JsonPayloadError::Deserialize(err) => ResponseError::BadRequest(format!("Invalid JSON: {}", err)), - JsonPayloadError::Overflow => ResponseError::PayloadTooLarge, - JsonPayloadError::ContentType => ResponseError::UnsupportedMediaType, - JsonPayloadError::Payload(err) => ResponseError::BadRequest(format!("Problem while decoding the request: {}", err)), + JsonPayloadError::Deserialize(err) => Error::BadRequest(format!("Invalid JSON: {}", err)), + JsonPayloadError::Overflow => Error::PayloadTooLarge, + JsonPayloadError::ContentType => Error::UnsupportedMediaType, + JsonPayloadError::Payload(err) => Error::BadRequest(format!("Problem while decoding the request: {}", err)), } } } pub fn json_error_handler(err: JsonPayloadError) -> ResponseError { - err.into() + let error = Error::from(err); + error.into() } diff --git a/meilisearch-http/src/helpers/authentication.rs b/meilisearch-http/src/helpers/authentication.rs index 894718d53..927665ffe 100644 --- a/meilisearch-http/src/helpers/authentication.rs +++ b/meilisearch-http/src/helpers/authentication.rs @@ -4,10 +4,10 @@ use std::rc::Rc; use std::task::{Context, Poll}; use actix_service::{Service, Transform}; -use actix_web::{dev::ServiceRequest, dev::ServiceResponse, Error}; +use actix_web::{dev::ServiceRequest, dev::ServiceResponse}; use futures::future::{err, ok, Future, Ready}; -use crate::error::ResponseError; +use crate::error::{Error, ResponseError}; use crate::Data; #[derive(Clone)] @@ -19,13 +19,13 @@ pub enum Authentication { impl Transform for Authentication where - S: Service, Error = Error>, + S: Service, Error = actix_web::Error>, S::Future: 'static, B: 'static, { type Request = ServiceRequest; type Response = ServiceResponse; - type Error = Error; + type Error = actix_web::Error; type InitError = (); type Transform = LoggingMiddleware; type Future = Ready>; @@ -45,13 +45,13 @@ pub struct LoggingMiddleware { impl Service for LoggingMiddleware where - S: Service, Error = Error> + 'static, + S: Service, Error = actix_web::Error> + 'static, S::Future: 'static, B: 'static, { type Request = ServiceRequest; type Response = ServiceResponse; - type Error = Error; + type Error = actix_web::Error; type Future = Pin>>>; fn poll_ready(&mut self, cx: &mut Context) -> Poll> { @@ -71,10 +71,10 @@ where let auth_header = match req.headers().get("X-Meili-API-Key") { Some(auth) => match auth.to_str() { Ok(auth) => auth, - Err(_) => return Box::pin(err(ResponseError::MissingAuthorizationHeader.into())), + Err(_) => return Box::pin(err(ResponseError::from(Error::MissingAuthorizationHeader).into())), }, None => { - return Box::pin(err(ResponseError::MissingAuthorizationHeader.into())); + return Box::pin(err(ResponseError::from(Error::MissingAuthorizationHeader).into())); } }; @@ -95,7 +95,7 @@ where Box::pin(svc.call(req)) } else { Box::pin(err( - ResponseError::InvalidToken(auth_header.to_string()).into() + ResponseError::from(Error::InvalidToken(auth_header.to_string())).into() )) } } diff --git a/meilisearch-http/src/helpers/meilisearch.rs b/meilisearch-http/src/helpers/meilisearch.rs index b6ae2c6b8..65b6da08f 100644 --- a/meilisearch-http/src/helpers/meilisearch.rs +++ b/meilisearch-http/src/helpers/meilisearch.rs @@ -5,11 +5,11 @@ use std::time::Instant; use indexmap::IndexMap; use log::error; -use meilisearch_core::Filter; +use meilisearch_core::{Filter, MainReader}; use meilisearch_core::facets::FacetFilter; use meilisearch_core::criterion::*; use meilisearch_core::settings::RankingRule; -use meilisearch_core::{Highlight, Index, MainT, RankedMap}; +use meilisearch_core::{Highlight, Index, RankedMap}; use meilisearch_schema::{FieldId, Schema}; use meilisearch_tokenizer::is_cjk; use serde::{Deserialize, Serialize}; @@ -17,7 +17,7 @@ use serde_json::Value; use siphasher::sip::SipHasher; use slice_group_by::GroupBy; -use crate::error::ResponseError; +use crate::error::{Error, ResponseError}; pub trait IndexSearchExt { fn new_search(&self, query: String) -> SearchBuilder; @@ -107,12 +107,12 @@ impl<'a> SearchBuilder<'a> { self } - pub fn search(self, reader: &heed::RoTxn) -> Result { + pub fn search(self, reader: &MainReader) -> Result { let schema = self .index .main .schema(reader)? - .ok_or(ResponseError::internal("missing schema"))?; + .ok_or(Error::internal("missing schema"))?; let ranked_map = self.index.main.ranked_map(reader)?.unwrap_or_default(); @@ -159,7 +159,7 @@ impl<'a> SearchBuilder<'a> { let start = Instant::now(); let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit)); - let search_result = result.map_err(ResponseError::search_documents)?; + let search_result = result.map_err(Error::search_documents)?; let time_ms = start.elapsed().as_millis() as usize; let mut all_attributes: HashSet<&str> = HashSet::new(); @@ -194,8 +194,8 @@ impl<'a> SearchBuilder<'a> { let mut document: IndexMap = self .index .document(reader, Some(&all_attributes), doc.id) - .map_err(|e| ResponseError::retrieve_document(doc.id.0, e))? - .ok_or(ResponseError::internal( + .map_err(|e| Error::retrieve_document(doc.id.0, e))? + .ok_or(Error::internal( "Impossible to retrieve the document; Corrupted data", ))?; @@ -257,7 +257,7 @@ impl<'a> SearchBuilder<'a> { pub fn get_criteria( &self, - reader: &heed::RoTxn, + reader: &MainReader, ranked_map: &'a RankedMap, schema: &Schema, ) -> Result>, ResponseError> { diff --git a/meilisearch-http/src/lib.rs b/meilisearch-http/src/lib.rs index e2e24d4d2..8e81d9221 100644 --- a/meilisearch-http/src/lib.rs +++ b/meilisearch-http/src/lib.rs @@ -7,14 +7,17 @@ pub mod models; pub mod option; pub mod routes; -pub use self::data::Data; -use self::error::json_error_handler; use actix_http::Error; use actix_service::ServiceFactory; use actix_web::{dev, web, App}; +use chrono::Utc; use log::error; + use meilisearch_core::ProcessedUpdateResult; +pub use self::data::Data; +use self::error::{json_error_handler, ResponseError}; + pub fn create_app( data: &Data, ) -> App< @@ -55,28 +58,23 @@ pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpda if let Some(index) = data.db.open_index(&index_uid) { let db = &data.db; - let mut writer = match db.main_write_txn() { - Ok(writer) => writer, - Err(e) => { - error!("Impossible to get write_txn; {}", e); - return; + let res = db.main_write::<_, _, ResponseError>(|mut writer| { + if let Err(e) = data.db.compute_stats(&mut writer, &index_uid) { + error!("Impossible to compute stats; {}", e) } - }; - if let Err(e) = data.compute_stats(&mut writer, &index_uid) { - error!("Impossible to compute stats; {}", e) - } + if let Err(e) = data.db.set_last_update(&mut writer, &Utc::now()) { + error!("Impossible to update last_update; {}", e) + } - if let Err(e) = data.set_last_update(&mut writer) { - error!("Impossible to update last_update; {}", e) - } - - if let Err(e) = index.main.put_updated_at(&mut writer) { - error!("Impossible to update updated_at; {}", e) - } - - if let Err(e) = writer.commit() { - error!("Impossible to get write_txn; {}", e); + if let Err(e) = index.main.put_updated_at(&mut writer) { + error!("Impossible to update updated_at; {}", e) + } + Ok(()) + }); + match res { + Ok(_) => (), + Err(e) => error!("{}", e), } } } diff --git a/meilisearch-http/src/routes/document.rs b/meilisearch-http/src/routes/document.rs index eca88a590..e7ad3801b 100644 --- a/meilisearch-http/src/routes/document.rs +++ b/meilisearch-http/src/routes/document.rs @@ -7,10 +7,10 @@ use meilisearch_core::update; use serde::Deserialize; use serde_json::Value; -use crate::error::ResponseError; +use crate::Data; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::{IndexParam, IndexUpdateResponse}; -use crate::Data; type Document = IndexMap; @@ -41,18 +41,19 @@ async fn get_document( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; + let internal_id = index.main .external_to_internal_docid(&reader, &path.document_id)? - .ok_or(ResponseError::document_not_found(&path.document_id))?; + .ok_or(Error::document_not_found(&path.document_id))?; - let response: Document = index + let document: Document = index .document(&reader, None, internal_id)? - .ok_or(ResponseError::document_not_found(&path.document_id))?; + .ok_or(Error::document_not_found(&path.document_id))?; - Ok(HttpResponse::Ok().json(response)) + Ok(HttpResponse::Ok().json(document)) } #[delete( @@ -66,16 +67,12 @@ async fn delete_document( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; - - let mut update_writer = data.db.update_write_txn()?; + .ok_or(Error::index_not_found(&path.index_uid))?; let mut documents_deletion = index.documents_deletion(); documents_deletion.delete_document_by_external_docid(path.document_id.clone()); - let update_id = documents_deletion.finalize(&mut update_writer)?; - - update_writer.commit()?; + let update_id = data.db.update_write(|w| documents_deletion.finalize(w))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -97,13 +94,12 @@ async fn get_all_documents( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let offset = params.offset.unwrap_or(0); let limit = params.limit.unwrap_or(20); let reader = data.db.main_read_txn()?; - let documents_ids: Result, _> = index .documents_fields_counts .documents_ids(&reader)? @@ -111,23 +107,21 @@ async fn get_all_documents( .take(limit) .collect(); - let documents_ids = documents_ids?; - let attributes: Option> = params .attributes_to_retrieve .as_ref() .map(|a| a.split(',').collect()); - let mut response = Vec::new(); - for document_id in documents_ids { + let mut documents = Vec::new(); + for document_id in documents_ids? { if let Ok(Some(document)) = index.document::(&reader, attributes.as_ref(), document_id) { - response.push(document); + documents.push(document); } } - Ok(HttpResponse::Ok().json(response)) + Ok(HttpResponse::Ok().json(documents)) } fn find_primary_key(document: &IndexMap) -> Option { @@ -155,14 +149,14 @@ async fn update_multiple_documents( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; let mut schema = index .main .schema(&reader)? - .ok_or(ResponseError::internal("Impossible to retrieve the schema"))?; + .ok_or(Error::internal("Impossible to retrieve the schema"))?; if schema.primary_key().is_none() { let id = match ¶ms.primary_key { @@ -170,16 +164,14 @@ async fn update_multiple_documents( None => body .first() .and_then(find_primary_key) - .ok_or(ResponseError::bad_request("Could not infer a primary key"))?, + .ok_or(Error::bad_request("Could not infer a primary key"))?, }; - let mut writer = data.db.main_write_txn()?; - schema .set_primary_key(&id) - .map_err(ResponseError::bad_request)?; - index.main.put_schema(&mut writer, &schema)?; - writer.commit()?; + .map_err(Error::bad_request)?; + + data.db.main_write(|w| index.main.put_schema(w, &schema))?; } let mut document_addition = if is_partial { @@ -192,9 +184,7 @@ async fn update_multiple_documents( document_addition.update_document(document); } - let mut update_writer = data.db.update_write_txn()?; - let update_id = document_addition.finalize(&mut update_writer)?; - update_writer.commit()?; + let update_id = data.db.update_write(|w| document_addition.finalize(w))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -231,9 +221,8 @@ async fn delete_documents( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; let mut documents_deletion = index.documents_deletion(); @@ -242,9 +231,7 @@ async fn delete_documents( documents_deletion.delete_document_by_external_docid(document_id); } - let update_id = documents_deletion.finalize(&mut writer)?; - - writer.commit()?; + let update_id = data.db.update_write(|w| documents_deletion.finalize(w))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -257,13 +244,9 @@ async fn clear_all_documents( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; - - let update_id = index.clear_all(&mut writer)?; - - writer.commit()?; + let update_id = data.db.update_write(|w| index.clear_all(w))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } diff --git a/meilisearch-http/src/routes/health.rs b/meilisearch-http/src/routes/health.rs index 826e8ee02..0f0a3a203 100644 --- a/meilisearch-http/src/routes/health.rs +++ b/meilisearch-http/src/routes/health.rs @@ -1,14 +1,11 @@ use actix_web::{web, HttpResponse}; use actix_web_macros::{get, put}; -use heed::types::{Str, Unit}; use serde::Deserialize; -use crate::error::ResponseError; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::Data; -const UNHEALTHY_KEY: &str = "_is_unhealthy"; - pub fn services(cfg: &mut web::ServiceConfig) { cfg.service(get_health).service(change_healthyness); } @@ -16,31 +13,19 @@ pub fn services(cfg: &mut web::ServiceConfig) { #[get("/health", wrap = "Authentication::Private")] async fn get_health(data: web::Data) -> Result { let reader = data.db.main_read_txn()?; - - let common_store = data.db.common_store(); - - if let Ok(Some(_)) = common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY) { - return Err(ResponseError::Maintenance); + if let Ok(Some(_)) = data.db.get_health(&reader) { + return Err(Error::Maintenance.into()); } - Ok(HttpResponse::Ok().finish()) } async fn set_healthy(data: web::Data) -> Result { - let mut writer = data.db.main_write_txn()?; - let common_store = data.db.common_store(); - common_store.delete::<_, Str>(&mut writer, UNHEALTHY_KEY)?; - writer.commit()?; - + data.db.main_write(|w| data.db.set_healthy(w))?; Ok(HttpResponse::Ok().finish()) } async fn set_unhealthy(data: web::Data) -> Result { - let mut writer = data.db.main_write_txn()?; - let common_store = data.db.common_store(); - common_store.put::<_, Str, Unit>(&mut writer, UNHEALTHY_KEY, &())?; - writer.commit()?; - + data.db.main_write(|w| data.db.set_unhealthy(w))?; Ok(HttpResponse::Ok().finish()) } diff --git a/meilisearch-http/src/routes/index.rs b/meilisearch-http/src/routes/index.rs index d43967bdd..f12179902 100644 --- a/meilisearch-http/src/routes/index.rs +++ b/meilisearch-http/src/routes/index.rs @@ -5,7 +5,7 @@ use log::error; use rand::seq::SliceRandom; use serde::{Deserialize, Serialize}; -use crate::error::ResponseError; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::IndexParam; use crate::Data; @@ -42,28 +42,27 @@ struct IndexResponse { #[get("/indexes", wrap = "Authentication::Private")] async fn list_indexes(data: web::Data) -> Result { let reader = data.db.main_read_txn()?; - - let mut response = Vec::new(); + let mut indexes = Vec::new(); for index_uid in data.db.indexes_uids() { let index = data.db.open_index(&index_uid); match index { Some(index) => { - let name = index.main.name(&reader)?.ok_or(ResponseError::internal( - "Impossible to get the name of an index", + let name = index.main.name(&reader)?.ok_or(Error::internal( + "Impossible to get the name of an index", ))?; let created_at = index .main .created_at(&reader)? - .ok_or(ResponseError::internal( - "Impossible to get the create date of an index", + .ok_or(Error::internal( + "Impossible to get the create date of an index", ))?; let updated_at = index .main .updated_at(&reader)? - .ok_or(ResponseError::internal( - "Impossible to get the last update date of an index", + .ok_or(Error::internal( + "Impossible to get the last update date of an index", ))?; let primary_key = match index.main.schema(&reader) { @@ -81,7 +80,7 @@ async fn list_indexes(data: web::Data) -> Result error!( "Index {} is referenced in the indexes list but cannot be found", @@ -90,7 +89,7 @@ async fn list_indexes(data: web::Data) -> Result None, }; - - Ok(HttpResponse::Ok().json(IndexResponse { + let index_response = IndexResponse { name, uid: path.index_uid.clone(), created_at, updated_at, primary_key, - })) + }; + + Ok(HttpResponse::Ok().json(index_response)) } #[derive(Debug, Deserialize)] @@ -152,9 +151,9 @@ async fn create_index( body: web::Json, ) -> Result { if let (None, None) = (body.name.clone(), body.uid.clone()) { - return Err(ResponseError::bad_request( + return Err(Error::bad_request( "Index creation must have an uid", - )); + ).into()); } let uid = match &body.uid { @@ -165,7 +164,7 @@ async fn create_index( { uid.to_owned() } else { - return Err(ResponseError::InvalidIndexUid); + return Err(Error::InvalidIndexUid.into()); } } None => loop { @@ -179,41 +178,41 @@ async fn create_index( let created_index = data .db .create_index(&uid) - .map_err(ResponseError::create_index)?; + .map_err(Error::create_index)?; - let mut writer = data.db.main_write_txn()?; + let index_response = data.db.main_write::<_, _, ResponseError>(|mut writer| { + let name = body.name.as_ref().unwrap_or(&uid); + created_index.main.put_name(&mut writer, name)?; - let name = body.name.as_ref().unwrap_or(&uid); - created_index.main.put_name(&mut writer, name)?; + let created_at = created_index + .main + .created_at(&writer)? + .ok_or(Error::internal("Impossible to read created at"))?; - let created_at = created_index - .main - .created_at(&writer)? - .ok_or(ResponseError::internal("Impossible to read created at"))?; + let updated_at = created_index + .main + .updated_at(&writer)? + .ok_or(Error::internal("Impossible to read updated at"))?; - let updated_at = created_index - .main - .updated_at(&writer)? - .ok_or(ResponseError::internal("Impossible to read updated at"))?; - - if let Some(id) = body.primary_key.clone() { - if let Some(mut schema) = created_index.main.schema(&writer)? { - schema - .set_primary_key(&id) - .map_err(ResponseError::bad_request)?; - created_index.main.put_schema(&mut writer, &schema)?; + if let Some(id) = body.primary_key.clone() { + if let Some(mut schema) = created_index.main.schema(&writer)? { + schema + .set_primary_key(&id) + .map_err(Error::bad_request)?; + created_index.main.put_schema(&mut writer, &schema)?; + } } - } + let index_response = IndexResponse { + name: name.to_string(), + uid, + created_at, + updated_at, + primary_key: body.primary_key.clone(), + }; + Ok(index_response) + })?; - writer.commit()?; - - Ok(HttpResponse::Created().json(IndexResponse { - name: name.to_string(), - uid, - created_at, - updated_at, - primary_key: body.primary_key.clone(), - })) + Ok(HttpResponse::Created().json(index_response)) } #[derive(Debug, Deserialize)] @@ -242,49 +241,47 @@ async fn update_index( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.main_write_txn()?; + data.db.main_write::<_, _, ResponseError>(|writer| { + if let Some(name) = &body.name { + index.main.put_name(writer, name)?; + } - if let Some(name) = &body.name { - index.main.put_name(&mut writer, name)?; - } - - if let Some(id) = body.primary_key.clone() { - if let Some(mut schema) = index.main.schema(&writer)? { - match schema.primary_key() { - Some(_) => { - return Err(ResponseError::bad_request( - "The primary key cannot be updated", - )); - } - None => { - schema.set_primary_key(&id)?; - index.main.put_schema(&mut writer, &schema)?; + if let Some(id) = body.primary_key.clone() { + if let Some(mut schema) = index.main.schema(writer)? { + match schema.primary_key() { + Some(_) => { + return Err(Error::bad_request( + "The primary key cannot be updated", + ).into()); + } + None => { + schema.set_primary_key(&id)?; + index.main.put_schema(writer, &schema)?; + } } } } - } - - index.main.put_updated_at(&mut writer)?; - writer.commit()?; + index.main.put_updated_at(writer)?; + Ok(()) + })?; let reader = data.db.main_read_txn()?; - - let name = index.main.name(&reader)?.ok_or(ResponseError::internal( - "Impossible to get the name of an index", + let name = index.main.name(&reader)?.ok_or(Error::internal( + "Impossible to get the name of an index", ))?; let created_at = index .main .created_at(&reader)? - .ok_or(ResponseError::internal( - "Impossible to get the create date of an index", + .ok_or(Error::internal( + "Impossible to get the create date of an index", ))?; let updated_at = index .main .updated_at(&reader)? - .ok_or(ResponseError::internal( - "Impossible to get the last update date of an index", + .ok_or(Error::internal( + "Impossible to get the last update date of an index", ))?; let primary_key = match index.main.schema(&reader) { @@ -295,13 +292,15 @@ async fn update_index( _ => None, }; - Ok(HttpResponse::Ok().json(IndexResponse { + let index_response = IndexResponse { name, uid: path.index_uid.clone(), created_at, updated_at, primary_key, - })) + }; + + Ok(HttpResponse::Ok().json(index_response)) } #[delete("/indexes/{index_uid}", wrap = "Authentication::Private")] @@ -331,7 +330,7 @@ async fn get_update_status( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.update_read_txn()?; @@ -339,10 +338,10 @@ async fn get_update_status( match status { Some(status) => Ok(HttpResponse::Ok().json(status)), - None => Err(ResponseError::NotFound(format!( + None => Err(Error::NotFound(format!( "Update {} not found", path.update_id - ))), + )).into()), } } @@ -354,7 +353,7 @@ async fn get_all_updates_status( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.update_read_txn()?; diff --git a/meilisearch-http/src/routes/search.rs b/meilisearch-http/src/routes/search.rs index 2bc7b45d0..1a2521386 100644 --- a/meilisearch-http/src/routes/search.rs +++ b/meilisearch-http/src/routes/search.rs @@ -7,7 +7,7 @@ use actix_web_macros::get; use serde::Deserialize; use serde_json::Value; -use crate::error::{ResponseError, FacetCountError}; +use crate::error::{Error, FacetCountError, ResponseError}; use crate::helpers::meilisearch::IndexSearchExt; use crate::helpers::Authentication; use crate::routes::IndexParam; @@ -45,14 +45,13 @@ async fn search_with_url_query( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; - let schema = index .main .schema(&reader)? - .ok_or(ResponseError::internal("Impossible to retrieve the schema"))?; + .ok_or(Error::internal("Impossible to retrieve the schema"))?; let mut search_builder = index.new_search(params.q.clone()); @@ -88,9 +87,9 @@ async fn search_with_url_query( } if let Some(ref facet_filters) = params.facet_filters { - match index.main.attributes_for_faceting(&reader)? { - Some(ref attrs) => { search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, attrs)?); }, - None => return Err(ResponseError::FacetExpression("can't filter on facets, as no facet is set".to_string())) + let attrs = index.main.attributes_for_faceting(&reader)?; + if let Some(attrs) = attrs { + search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, &attrs)?); } } @@ -100,7 +99,7 @@ async fn search_with_url_query( let field_ids = prepare_facet_list(&facets, &schema, attrs)?; search_builder.add_facets(field_ids); }, - None => return Err(FacetCountError::NoFacetSet.into()) + None => return Err(FacetCountError::NoFacetSet.into()), } } @@ -160,8 +159,9 @@ async fn search_with_url_query( search_builder.get_matches(); } } + let search_result = search_builder.search(&reader)?; - Ok(HttpResponse::Ok().json(search_builder.search(&reader)?)) + Ok(HttpResponse::Ok().json(search_result)) } /// Parses the incoming string into an array of attributes for which to return a count. It returns diff --git a/meilisearch-http/src/routes/setting.rs b/meilisearch-http/src/routes/setting.rs index 04ed8ceed..c7abd6ce4 100644 --- a/meilisearch-http/src/routes/setting.rs +++ b/meilisearch-http/src/routes/setting.rs @@ -3,7 +3,7 @@ use actix_web_macros::{delete, get, post}; use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES}; use std::collections::{BTreeMap, BTreeSet, HashSet}; -use crate::error::ResponseError; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::Data; @@ -37,15 +37,16 @@ async fn update_all( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; - let settings = body - .into_inner() - .into_update() - .map_err(ResponseError::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|writer| { + let settings = body + .into_inner() + .into_update() + .map_err(Error::bad_request)?; + let update_id = index.settings_update(writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -58,22 +59,22 @@ async fn get_all( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; - let stop_words_fst = index.main.stop_words_fst(&reader)?; - let stop_words = stop_words_fst.stream().into_strs()?; - let stop_words: BTreeSet = stop_words.into_iter().collect(); + let stop_words: BTreeSet = index + .main + .stop_words(&reader)? + .into_iter() + .collect(); - let synonyms_fst = index.main.synonyms_fst(&reader)?; - let synonyms_list = synonyms_fst.stream().into_strs()?; + let synonyms_list = index.main.synonyms(&reader)?; let mut synonyms = BTreeMap::new(); let index_synonyms = &index.synonyms; for synonym in synonyms_list { - let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?; - let list = alternative_list.stream().into_strs()?; + let list = index_synonyms.synonyms(&reader, synonym.as_bytes())?; synonyms.insert(synonym, list); } @@ -138,8 +139,7 @@ async fn delete_all( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { ranking_rules: UpdateState::Clear, @@ -153,8 +153,7 @@ async fn delete_all( attributes_for_faceting: UpdateState::Clear, }; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -170,7 +169,7 @@ async fn get_rules( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; let ranking_rules = index @@ -196,17 +195,15 @@ async fn update_rules( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = Settings { ranking_rules: Some(body.into_inner()), ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; - let settings = settings.into_update().map_err(ResponseError::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let settings = settings.into_update().map_err(Error::bad_request)?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -222,17 +219,14 @@ async fn delete_rules( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { ranking_rules: UpdateState::Clear, ..SettingsUpdate::default() }; - let update_id = index.settings_update(&mut writer, settings)?; - - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -248,7 +242,7 @@ async fn get_distinct( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; let distinct_attribute = index.main.distinct_attribute(&reader)?; @@ -267,17 +261,15 @@ async fn update_distinct( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = Settings { distinct_attribute: Some(body.into_inner()), ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; - let settings = settings.into_update().map_err(ResponseError::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let settings = settings.into_update().map_err(Error::bad_request)?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -293,17 +285,14 @@ async fn delete_distinct( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { distinct_attribute: UpdateState::Clear, ..SettingsUpdate::default() }; - let update_id = index.settings_update(&mut writer, settings)?; - - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -319,7 +308,7 @@ async fn get_searchable( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; let schema = index.main.schema(&reader)?; let searchable_attributes: Option> = @@ -340,17 +329,16 @@ async fn update_searchable( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = Settings { searchable_attributes: Some(body.into_inner()), ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; - let settings = settings.into_update().map_err(ResponseError::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let settings = settings.into_update().map_err(Error::bad_request)?; + + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -366,16 +354,14 @@ async fn delete_searchable( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { searchable_attributes: UpdateState::Clear, ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -391,7 +377,7 @@ async fn get_displayed( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; let schema = index.main.schema(&reader)?; @@ -414,17 +400,15 @@ async fn update_displayed( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = Settings { displayed_attributes: Some(body.into_inner()), ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; - let settings = settings.into_update().map_err(ResponseError::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let settings = settings.into_update().map_err(Error::bad_request)?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -440,16 +424,14 @@ async fn delete_displayed( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { displayed_attributes: UpdateState::Clear, ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -465,7 +447,7 @@ async fn get_accept_new_fields( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; let schema = index.main.schema(&reader)?; @@ -487,17 +469,15 @@ async fn update_accept_new_fields( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = Settings { accept_new_fields: Some(body.into_inner()), ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; - let settings = settings.into_update().map_err(ResponseError::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let settings = settings.into_update().map_err(Error::bad_request)?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } diff --git a/meilisearch-http/src/routes/stats.rs b/meilisearch-http/src/routes/stats.rs index 6c31283cd..5f3e09dd5 100644 --- a/meilisearch-http/src/routes/stats.rs +++ b/meilisearch-http/src/routes/stats.rs @@ -10,7 +10,7 @@ use serde::Serialize; use sysinfo::{NetworkExt, ProcessExt, ProcessorExt, System, SystemExt}; use walkdir::WalkDir; -use crate::error::ResponseError; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::IndexParam; use crate::Data; @@ -39,7 +39,7 @@ async fn index_stats( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; @@ -50,8 +50,8 @@ async fn index_stats( let update_reader = data.db.update_read_txn()?; let is_indexing = - data.is_indexing(&update_reader, &path.index_uid)? - .ok_or(ResponseError::internal( + data.db.is_indexing(&update_reader, &path.index_uid)? + .ok_or(Error::internal( "Impossible to know if the database is indexing", ))?; @@ -86,8 +86,8 @@ async fn get_stats(data: web::Data) -> Result let fields_distribution = index.main.fields_distribution(&reader)?.unwrap_or_default(); - let is_indexing = data.is_indexing(&update_reader, &index_uid)?.ok_or( - ResponseError::internal("Impossible to know if the database is indexing"), + let is_indexing = data.db.is_indexing(&update_reader, &index_uid)?.ok_or( + Error::internal("Impossible to know if the database is indexing"), )?; let response = IndexStatsResponse { @@ -111,7 +111,7 @@ async fn get_stats(data: web::Data) -> Result .filter(|metadata| metadata.is_file()) .fold(0, |acc, m| acc + m.len()); - let last_update = data.last_update(&reader)?; + let last_update = data.db.last_update(&reader)?; Ok(HttpResponse::Ok().json(StatsResult { database_size, diff --git a/meilisearch-http/src/routes/stop_words.rs b/meilisearch-http/src/routes/stop_words.rs index 90814e423..eaea22827 100644 --- a/meilisearch-http/src/routes/stop_words.rs +++ b/meilisearch-http/src/routes/stop_words.rs @@ -3,7 +3,7 @@ use actix_web_macros::{delete, get, post}; use meilisearch_core::settings::{SettingsUpdate, UpdateState}; use std::collections::BTreeSet; -use crate::error::ResponseError; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::Data; @@ -23,10 +23,9 @@ async fn get( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; - let stop_words_fst = index.main.stop_words_fst(&reader)?; - let stop_words = stop_words_fst.stream().into_strs()?; + let stop_words = index.main.stop_words(&reader)?; Ok(HttpResponse::Ok().json(stop_words)) } @@ -43,16 +42,14 @@ async fn update( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { stop_words: UpdateState::Update(body.into_inner()), ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -68,16 +65,14 @@ async fn delete( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { stop_words: UpdateState::Clear, ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } diff --git a/meilisearch-http/src/routes/synonym.rs b/meilisearch-http/src/routes/synonym.rs index 6c9e77f6d..f047e9dd3 100644 --- a/meilisearch-http/src/routes/synonym.rs +++ b/meilisearch-http/src/routes/synonym.rs @@ -5,7 +5,7 @@ use actix_web_macros::{delete, get, post}; use indexmap::IndexMap; use meilisearch_core::settings::{SettingsUpdate, UpdateState}; -use crate::error::ResponseError; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::Data; @@ -25,18 +25,16 @@ async fn get( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; - let synonyms_fst = index.main.synonyms_fst(&reader)?; - let synonyms_list = synonyms_fst.stream().into_strs()?; + let synonyms_list = index.main.synonyms(&reader)?; let mut synonyms = IndexMap::new(); let index_synonyms = &index.synonyms; for synonym in synonyms_list { - let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?; - let list = alternative_list.stream().into_strs()?; + let list = index_synonyms.synonyms(&reader, synonym.as_bytes())?; synonyms.insert(synonym, list); } @@ -55,16 +53,14 @@ async fn update( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { synonyms: UpdateState::Update(body.into_inner()), ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -80,17 +76,14 @@ async fn delete( let index = data .db .open_index(&path.index_uid) - .ok_or(ResponseError::index_not_found(&path.index_uid))?; + .ok_or(Error::index_not_found(&path.index_uid))?; let settings = SettingsUpdate { synonyms: UpdateState::Clear, ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - - writer.commit()?; + let update_id = data.db.update_write(|w| index.settings_update(w, settings))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } diff --git a/meilisearch-http/tests/documents_delete.rs b/meilisearch-http/tests/documents_delete.rs index 7edc9ac63..cd0bfc142 100644 --- a/meilisearch-http/tests/documents_delete.rs +++ b/meilisearch-http/tests/documents_delete.rs @@ -14,7 +14,7 @@ async fn delete() { assert_eq!(status_code, 404); } -// Resolve teh issue https://github.com/meilisearch/MeiliSearch/issues/493 +// Resolve the issue https://github.com/meilisearch/MeiliSearch/issues/493 #[actix_rt::test] async fn delete_batch() { let mut server = common::Server::with_uid("movies"); diff --git a/meilisearch-http/tests/index.rs b/meilisearch-http/tests/index.rs index 6ea81f606..ed67f2d1d 100644 --- a/meilisearch-http/tests/index.rs +++ b/meilisearch-http/tests/index.rs @@ -382,7 +382,7 @@ async fn create_index_failed() { assert_eq!(status_code, 400); let message = res_value["message"].as_str().unwrap(); - assert_eq!(res_value.as_object().unwrap().len(), 1); + assert_eq!(res_value.as_object().unwrap().len(), 4); assert_eq!(message, "Index creation must have an uid"); // 3 - Create a index with extra data @@ -462,7 +462,7 @@ async fn create_index_with_invalid_uid() { assert_eq!(status_code, 400); let message = response["message"].as_str().unwrap(); - assert_eq!(response.as_object().unwrap().len(), 1); + assert_eq!(response.as_object().unwrap().len(), 4); assert_eq!(message, "Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."); // 2 - Create the index with invalid uid @@ -475,7 +475,7 @@ async fn create_index_with_invalid_uid() { assert_eq!(status_code, 400); let message = response["message"].as_str().unwrap(); - assert_eq!(response.as_object().unwrap().len(), 1); + assert_eq!(response.as_object().unwrap().len(), 4); assert_eq!(message, "Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."); // 3 - Create the index with invalid uid @@ -488,7 +488,7 @@ async fn create_index_with_invalid_uid() { assert_eq!(status_code, 400); let message = response["message"].as_str().unwrap(); - assert_eq!(response.as_object().unwrap().len(), 1); + assert_eq!(response.as_object().unwrap().len(), 4); assert_eq!(message, "Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."); // 4 - Create the index with invalid uid @@ -501,7 +501,7 @@ async fn create_index_with_invalid_uid() { assert_eq!(status_code, 400); let message = response["message"].as_str().unwrap(); - assert_eq!(response.as_object().unwrap().len(), 1); + assert_eq!(response.as_object().unwrap().len(), 4); assert_eq!(message, "Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."); } @@ -645,12 +645,10 @@ async fn check_add_documents_without_primary_key() { let (response, status_code) = server.add_or_replace_multiple_documents_sync(body).await; - let expected = json!({ - "message": "Could not infer a primary key" - }); - + let message = response["message"].as_str().unwrap(); + assert_eq!(response.as_object().unwrap().len(), 4); + assert_eq!(message, "Could not infer a primary key"); assert_eq!(status_code, 400); - assert_json_eq!(response, expected, ordered: false); } #[actix_rt::test] diff --git a/meilisearch-schema/Cargo.toml b/meilisearch-schema/Cargo.toml index 712fdb008..a795fa1db 100644 --- a/meilisearch-schema/Cargo.toml +++ b/meilisearch-schema/Cargo.toml @@ -8,6 +8,7 @@ edition = "2018" [dependencies] bincode = "1.2.1" indexmap = { version = "1.3.2", features = ["serde-1"] } +meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" } serde = { version = "1.0.105", features = ["derive"] } serde_json = { version = "1.0.50", features = ["preserve_order"] } toml = { version = "0.5.6", features = ["preserve_order"] } diff --git a/meilisearch-schema/src/error.rs b/meilisearch-schema/src/error.rs index c31596df2..8dcc0a7a4 100644 --- a/meilisearch-schema/src/error.rs +++ b/meilisearch-schema/src/error.rs @@ -1,6 +1,7 @@ - use std::{error, fmt}; +use meilisearch_error::{ErrorCode, Code}; + pub type SResult = Result; #[derive(Debug)] @@ -22,3 +23,15 @@ impl fmt::Display for Error { } impl error::Error for Error {} + +impl ErrorCode for Error { + fn error_code(&self) -> Code { + use Error::*; + + match self { + FieldNameNotFound(_) => Code::Internal, + MaxFieldsLimitExceeded => Code::MaxFieldsLimitExceeded, + PrimaryKeyAlreadyPresent => Code::PrimaryKeyAlreadyPresent, + } + } +}