From d69180ec67e786fd63cfcd2b21f8fabbfa69c3a1 Mon Sep 17 00:00:00 2001 From: mpostma Date: Fri, 22 May 2020 12:03:57 +0200 Subject: [PATCH] refactor errors / isolate core/http errors --- Cargo.lock | 10 ++ meilisearch-core/Cargo.toml | 1 + meilisearch-core/src/database.rs | 107 ++++++++++-- meilisearch-core/src/error.rs | 17 +- meilisearch-core/src/facets.rs | 19 +-- meilisearch-core/src/query_builder.rs | 2 +- meilisearch-core/src/query_tree.rs | 2 +- .../src/store/documents_fields_counts.rs | 7 +- meilisearch-core/src/store/main.rs | 160 ++++++++++-------- meilisearch-core/src/store/mod.rs | 9 +- meilisearch-core/src/store/synonyms.rs | 13 +- meilisearch-core/src/update/customs_update.rs | 7 +- meilisearch-http/Cargo.toml | 1 + meilisearch-http/src/data.rs | 28 +-- meilisearch-http/src/error.rs | 125 ++++++-------- .../src/helpers/authentication.rs | 8 +- meilisearch-http/src/helpers/meilisearch.rs | 6 +- meilisearch-http/src/lib.rs | 40 +++-- meilisearch-http/src/routes/document.rs | 70 +++----- meilisearch-http/src/routes/health.rs | 41 ++--- meilisearch-http/src/routes/index.rs | 157 +++++++++-------- meilisearch-http/src/routes/search.rs | 16 +- meilisearch-http/src/routes/setting.rs | 144 +++++++++------- meilisearch-http/src/routes/stats.rs | 8 +- meilisearch-http/src/routes/stop_words.rs | 25 +-- meilisearch-http/src/routes/synonym.rs | 31 ++-- meilisearch-http/tests/documents_delete.rs | 1 + meilisearch-schema/Cargo.toml | 1 + meilisearch-schema/src/error.rs | 9 +- 29 files changed, 585 insertions(+), 480 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a2b38b19a..dc7454ea6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1653,6 +1653,7 @@ dependencies = [ "jemallocator", "levenshtein_automata", "log", + "meilisearch-error", "meilisearch-schema", "meilisearch-tokenizer", "meilisearch-types", @@ -1673,6 +1674,13 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "meilisearch-error" +version = "0.10.1" +dependencies = [ + "actix-http", +] + [[package]] name = "meilisearch-http" version = "0.10.1" @@ -1699,6 +1707,7 @@ dependencies = [ "log", "main_error", "meilisearch-core", + "meilisearch-error", "meilisearch-schema", "meilisearch-tokenizer", "mime", @@ -1729,6 +1738,7 @@ version = "0.10.1" dependencies = [ "bincode", "indexmap", + "meilisearch-error", "serde", "serde_json", "toml", diff --git a/meilisearch-core/Cargo.toml b/meilisearch-core/Cargo.toml index 912210007..4d092d8af 100644 --- a/meilisearch-core/Cargo.toml +++ b/meilisearch-core/Cargo.toml @@ -24,6 +24,7 @@ intervaltree = "0.2.5" itertools = "0.9.0" levenshtein_automata = { version = "0.2.0", features = ["fst_automaton"] } log = "0.4.8" +meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.10.1" } meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.10.1" } meilisearch-types = { path = "../meilisearch-types", version = "0.10.1" } diff --git a/meilisearch-core/src/database.rs b/meilisearch-core/src/database.rs index a26324930..09fc2b326 100644 --- a/meilisearch-core/src/database.rs +++ b/meilisearch-core/src/database.rs @@ -4,17 +4,23 @@ use std::path::Path; use std::sync::{Arc, RwLock}; use std::{fs, thread}; +use chrono::{DateTime, Utc}; use crossbeam_channel::{Receiver, Sender}; -use heed::types::{Str, Unit}; +use heed::types::{Str, Unit, SerdeBincode}; use heed::{CompactionOption, Result as ZResult}; use log::debug; use meilisearch_schema::Schema; -use crate::{store, update, Index, MResult}; +use crate::{store, update, Index, MResult, Error}; pub type BoxUpdateFn = Box; type ArcSwapFn = arc_swap::ArcSwapOption; +type SerdeDatetime = SerdeBincode>; + +const UNHEALTHY_KEY: &str = "_is_unhealthy"; +const LAST_UPDATE_KEY: &str = "last-update"; + pub struct MainT; pub struct UpdateT; @@ -319,20 +325,66 @@ impl Database { self.update_fn.swap(None); } - pub fn main_read_txn(&self) -> heed::Result> { - self.env.typed_read_txn::() + pub fn main_read_txn(&self) -> MResult> { + Ok(self.env.typed_read_txn::()?) } - pub fn main_write_txn(&self) -> heed::Result> { - self.env.typed_write_txn::() + pub(crate) fn main_write_txn(&self) -> MResult> { + Ok(self.env.typed_write_txn::()?) } - pub fn update_read_txn(&self) -> heed::Result> { - self.update_env.typed_read_txn::() + /// Calls f providing it with a writer to the main database. After f is called, makes sure the + /// transaction is commited. Returns whatever result f returns. + pub fn main_write(&self, f: F) -> Result + where + F: FnOnce(&mut heed::RwTxn) -> Result, + E: From, + { + let mut writer = self.main_write_txn()?; + let result = f(&mut writer)?; + writer.commit().map_err(Error::Heed)?; + Ok(result) } - pub fn update_write_txn(&self) -> heed::Result> { - self.update_env.typed_write_txn::() + /// provides a context with a reader to the main database. experimental. + pub fn main_read(&self, f: F) -> Result + where + F: Fn(&heed::RoTxn) -> Result, + E: From, + { + let reader = self.main_read_txn()?; + f(&reader) + } + + pub fn update_read_txn(&self) -> MResult> { + Ok(self.update_env.typed_read_txn::()?) + } + + /// Calls f providing it with a writer to the main database. After f is called, makes sure the + /// transaction is commited. Returns whatever result f returns. + pub fn update_write(&self, f: F) -> Result + where + F: FnOnce(&mut heed::RwTxn) -> Result, + E: From, + { + let mut writer = self.update_write_txn()?; + let result = f(&mut writer)?; + writer.commit().map_err(Error::Heed)?; + Ok(result) + } + + /// provides a context with a reader to the update database. experimental. + pub fn update_read(&self, f: F) -> Result + where + F: Fn(&heed::RoTxn) -> Result, + E: From, + { + let reader = self.update_read_txn()?; + f(&reader) + } + + pub fn update_write_txn(&self) -> MResult> { + Ok(self.update_env.typed_write_txn::()?) } pub fn copy_and_compact_to_path>(&self, path: P) -> ZResult<(File, File)> { @@ -362,9 +414,41 @@ impl Database { indexes.keys().cloned().collect() } - pub fn common_store(&self) -> heed::PolyDatabase { + pub(crate) fn common_store(&self) -> heed::PolyDatabase { self.common_store } + + pub fn last_update(&self, reader: &heed::RoTxn) -> MResult>> { + match self.common_store() + .get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)? + { + Some(datetime) => Ok(Some(datetime)), + None => Ok(None), + } + } + + pub fn set_last_update(&self, writer: &mut heed::RwTxn, time: &DateTime) -> MResult<()> { + self.common_store() + .put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, time)?; + Ok(()) + } + + pub fn set_healthy(&self, writer: &mut heed::RwTxn) -> MResult<()> { + let common_store = self.common_store(); + common_store.delete::<_, Str>(writer, UNHEALTHY_KEY)?; + Ok(()) + } + + pub fn set_unhealthy(&self, writer: &mut heed::RwTxn) -> MResult<()> { + let common_store = self.common_store(); + common_store.put::<_, Str, Unit>(writer, UNHEALTHY_KEY, &())?; + Ok(()) + } + + pub fn get_health(&self, reader: &heed::RoTxn) -> MResult> { + let common_store = self.common_store(); + Ok(common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY)?) + } } #[cfg(test)] @@ -1094,3 +1178,4 @@ mod tests { assert_matches!(iter.next(), None); } } + diff --git a/meilisearch-core/src/error.rs b/meilisearch-core/src/error.rs index 7990f691f..e01816423 100644 --- a/meilisearch-core/src/error.rs +++ b/meilisearch-core/src/error.rs @@ -9,6 +9,8 @@ pub use fst::Error as FstError; pub use heed::Error as HeedError; pub use pest::error as pest_error; +use meilisearch_error::{ErrorCode, Code}; + pub type MResult = Result; #[derive(Debug)] @@ -21,7 +23,7 @@ pub enum Error { MissingDocumentId, MaxFieldsLimitExceeded, Schema(meilisearch_schema::Error), - Zlmdb(heed::Error), + Heed(heed::Error), Fst(fst::Error), SerdeJson(SerdeJsonError), Bincode(bincode::Error), @@ -32,6 +34,13 @@ pub enum Error { FacetError(FacetError), } +impl ErrorCode for Error { + fn error_code(&self) -> Code { + //TODO populate codes + Code::Other + } +} + impl From for Error { fn from(error: io::Error) -> Error { Error::Io(error) @@ -74,7 +83,7 @@ impl From for Error { impl From for Error { fn from(error: HeedError) -> Error { - Error::Zlmdb(error) + Error::Heed(error) } } @@ -126,7 +135,7 @@ impl fmt::Display for Error { MissingDocumentId => write!(f, "document id is missing"), MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"), Schema(e) => write!(f, "schema error; {}", e), - Zlmdb(e) => write!(f, "heed error; {}", e), + Heed(e) => write!(f, "heed error; {}", e), Fst(e) => write!(f, "fst error; {}", e), SerdeJson(e) => write!(f, "serde json error; {}", e), Bincode(e) => write!(f, "bincode error; {}", e), @@ -174,6 +183,7 @@ pub enum FacetError { AttributeNotFound(String), AttributeNotSet { expected: Vec, found: String }, InvalidDocumentAttribute(String), + NoFacetAttributes, } impl FacetError { @@ -198,6 +208,7 @@ impl fmt::Display for FacetError { AttributeNotFound(attr) => write!(f, "unknown {:?} attribute", attr), AttributeNotSet { found, expected } => write!(f, "`{}` is not set as a faceted attribute. available facet attributes: {}", found, expected.join(", ")), InvalidDocumentAttribute(attr) => write!(f, "invalid document attribute {}, accepted types: String and [String]", attr), + NoFacetAttributes => write!(f, "No attributes are set for faceting"), } } } diff --git a/meilisearch-core/src/facets.rs b/meilisearch-core/src/facets.rs index dc8654915..cc1737eaf 100644 --- a/meilisearch-core/src/facets.rs +++ b/meilisearch-core/src/facets.rs @@ -13,7 +13,7 @@ use meilisearch_schema::{FieldId, Schema}; use meilisearch_types::DocumentId; use crate::database::MainT; -use crate::error::{FacetError, Error}; +use crate::error::{FacetError, MResult}; use crate::store::BEU16; /// Data structure used to represent a boolean expression in the form of nested arrays. @@ -34,14 +34,13 @@ impl FacetFilter { s: &str, schema: &Schema, attributes_for_faceting: &[FieldId], - ) -> Result { - + ) -> MResult { let parsed = serde_json::from_str::(s).map_err(|e| FacetError::ParsingError(e.to_string()))?; let mut filter = Vec::new(); match parsed { Value::Array(and_exprs) => { if and_exprs.is_empty() { - return Err(FacetError::EmptyArray); + return Err(FacetError::EmptyArray.into()); } for expr in and_exprs { match expr { @@ -51,7 +50,7 @@ impl FacetFilter { } Value::Array(or_exprs) => { if or_exprs.is_empty() { - return Err(FacetError::EmptyArray); + return Err(FacetError::EmptyArray.into()); } let mut inner = Vec::new(); for expr in or_exprs { @@ -60,17 +59,17 @@ impl FacetFilter { let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?; inner.push(key); } - bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value)), + bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value).into()), } } filter.push(Either::Left(inner)); } - bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value)), + bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value).into()), } } return Ok(Self(filter)); } - bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value)), + bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value).into()), } } } @@ -183,7 +182,7 @@ pub fn facet_map_from_docids( index: &crate::Index, document_ids: &[DocumentId], attributes_for_facetting: &[FieldId], -) -> Result>, Error> { +) -> MResult>> { let mut facet_map = HashMap::new(); for document_id in document_ids { for result in index @@ -210,7 +209,7 @@ pub fn facet_map_from_docs( schema: &Schema, documents: &HashMap>, attributes_for_facetting: &[FieldId], -) -> Result>, Error> { +) -> MResult>> { let mut facet_map = HashMap::new(); let attributes_for_facetting = attributes_for_facetting .iter() diff --git a/meilisearch-core/src/query_builder.rs b/meilisearch-core/src/query_builder.rs index c9ed1933e..72accfcde 100644 --- a/meilisearch-core/src/query_builder.rs +++ b/meilisearch-core/src/query_builder.rs @@ -268,7 +268,7 @@ mod tests { let alternatives = self .index .synonyms - .synonyms(&writer, word.as_bytes()) + .synonyms_fst(&writer, word.as_bytes()) .unwrap(); let new = sdset_into_fstset(&new); diff --git a/meilisearch-core/src/query_tree.rs b/meilisearch-core/src/query_tree.rs index 2687028a0..4a3a622b2 100644 --- a/meilisearch-core/src/query_tree.rs +++ b/meilisearch-core/src/query_tree.rs @@ -147,7 +147,7 @@ fn split_best_frequency<'a>(reader: &heed::RoTxn, ctx: &Context, word: &' fn fetch_synonyms(reader: &heed::RoTxn, ctx: &Context, words: &[&str]) -> MResult>> { let words = normalize_str(&words.join(" ")); - let set = ctx.synonyms.synonyms(reader, words.as_bytes())?; + let set = ctx.synonyms.synonyms_fst(reader, words.as_bytes())?; let mut strings = Vec::new(); let mut stream = set.stream(); diff --git a/meilisearch-core/src/store/documents_fields_counts.rs b/meilisearch-core/src/store/documents_fields_counts.rs index 69bacd5f7..f0d23c99b 100644 --- a/meilisearch-core/src/store/documents_fields_counts.rs +++ b/meilisearch-core/src/store/documents_fields_counts.rs @@ -4,6 +4,7 @@ use crate::DocumentId; use heed::types::OwnedType; use heed::Result as ZResult; use meilisearch_schema::IndexedPos; +use crate::MResult; #[derive(Copy, Clone)] pub struct DocumentsFieldsCounts { @@ -60,7 +61,7 @@ impl DocumentsFieldsCounts { Ok(DocumentFieldsCountsIter { iter }) } - pub fn documents_ids<'txn>(self, reader: &'txn heed::RoTxn) -> ZResult> { + pub fn documents_ids<'txn>(self, reader: &'txn heed::RoTxn) -> MResult> { let iter = self.documents_fields_counts.iter(reader)?; Ok(DocumentsIdsIter { last_seen_id: None, @@ -102,7 +103,7 @@ pub struct DocumentsIdsIter<'txn> { } impl Iterator for DocumentsIdsIter<'_> { - type Item = ZResult; + type Item = MResult; fn next(&mut self) -> Option { for result in &mut self.iter { @@ -114,7 +115,7 @@ impl Iterator for DocumentsIdsIter<'_> { return Some(Ok(document_id)); } } - Err(e) => return Some(Err(e)), + Err(e) => return Some(Err(e.into())), } } None diff --git a/meilisearch-core/src/store/main.rs b/meilisearch-core/src/store/main.rs index 864970320..7873b3dba 100644 --- a/meilisearch-core/src/store/main.rs +++ b/meilisearch-core/src/store/main.rs @@ -2,14 +2,13 @@ use std::borrow::Cow; use std::collections::HashMap; use chrono::{DateTime, Utc}; -use heed::Result as ZResult; use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str}; use meilisearch_schema::{FieldId, Schema}; use meilisearch_types::DocumentId; use sdset::Set; use crate::database::MainT; -use crate::RankedMap; +use crate::{RankedMap, MResult}; use crate::settings::RankingRule; use crate::{FstSetCow, FstMapCow}; use super::{CowSet, DocumentsIds}; @@ -41,39 +40,38 @@ pub struct Main { } impl Main { - pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { - self.main.clear(writer) + pub fn clear(self, writer: &mut heed::RwTxn) -> MResult<()> { + Ok(self.main.clear(writer)?) } - pub fn put_name(self, writer: &mut heed::RwTxn, name: &str) -> ZResult<()> { - self.main.put::<_, Str, Str>(writer, NAME_KEY, name) + pub fn put_name(self, writer: &mut heed::RwTxn, name: &str) -> MResult<()> { + Ok(self.main.put::<_, Str, Str>(writer, NAME_KEY, name)?) } - pub fn name(self, reader: &heed::RoTxn) -> ZResult> { + pub fn name(self, reader: &heed::RoTxn) -> MResult> { Ok(self .main .get::<_, Str, Str>(reader, NAME_KEY)? .map(|name| name.to_owned())) } - pub fn put_created_at(self, writer: &mut heed::RwTxn) -> ZResult<()> { - self.main - .put::<_, Str, SerdeDatetime>(writer, CREATED_AT_KEY, &Utc::now()) + pub fn put_created_at(self, writer: &mut heed::RwTxn) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeDatetime>(writer, CREATED_AT_KEY, &Utc::now())?) } - pub fn created_at(self, reader: &heed::RoTxn) -> ZResult>> { - self.main.get::<_, Str, SerdeDatetime>(reader, CREATED_AT_KEY) + pub fn created_at(self, reader: &heed::RoTxn) -> MResult>> { + Ok(self.main.get::<_, Str, SerdeDatetime>(reader, CREATED_AT_KEY)?) } - pub fn put_updated_at(self, writer: &mut heed::RwTxn) -> ZResult<()> { - self.main - .put::<_, Str, SerdeDatetime>(writer, UPDATED_AT_KEY, &Utc::now()) + pub fn put_updated_at(self, writer: &mut heed::RwTxn) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeDatetime>(writer, UPDATED_AT_KEY, &Utc::now())?) } - pub fn updated_at(self, reader: &heed::RoTxn) -> ZResult>> { - self.main.get::<_, Str, SerdeDatetime>(reader, UPDATED_AT_KEY) + pub fn updated_at(self, reader: &heed::RoTxn) -> MResult>> { + Ok(self.main.get::<_, Str, SerdeDatetime>(reader, UPDATED_AT_KEY)?) } +<<<<<<< HEAD pub fn put_internal_docids(self, writer: &mut heed::RwTxn, ids: &sdset::Set) -> ZResult<()> { self.main.put::<_, Str, DocumentsIds>(writer, INTERNAL_DOCIDS_KEY, ids) } @@ -120,6 +118,21 @@ impl Main { let mut build = fst::MapBuilder::memory(); while let Some((docid, values)) = op.next() { build.insert(docid, values[0].value).unwrap(); +======= + pub fn put_words_fst(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> MResult<()> { + let bytes = fst.as_fst().as_bytes(); + Ok(self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, bytes)?) + } + + pub unsafe fn static_words_fst(self, reader: &heed::RoTxn) -> MResult> { + match self.main.get::<_, Str, ByteSlice>(reader, WORDS_KEY)? { + Some(bytes) => { + let bytes: &'static [u8] = std::mem::transmute(bytes); + let set = fst::Set::from_static_slice(bytes).unwrap(); + Ok(Some(set)) + } + None => Ok(None), +>>>>>>> 5c760d3... refactor errors / isolate core/http errors } drop(op); @@ -161,58 +174,76 @@ impl Main { self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, fst.as_fst().as_bytes()) } - pub fn words_fst(self, reader: &heed::RoTxn) -> ZResult { + pub fn words_fst(self, reader: &heed::RoTxn) -> MResult> { match self.main.get::<_, Str, ByteSlice>(reader, WORDS_KEY)? { Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), } } - pub fn put_schema(self, writer: &mut heed::RwTxn, schema: &Schema) -> ZResult<()> { - self.main.put::<_, Str, SerdeBincode>(writer, SCHEMA_KEY, schema) + pub fn put_schema(self, writer: &mut heed::RwTxn, schema: &Schema) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeBincode>(writer, SCHEMA_KEY, schema)?) } - pub fn schema(self, reader: &heed::RoTxn) -> ZResult> { - self.main.get::<_, Str, SerdeBincode>(reader, SCHEMA_KEY) + pub fn schema(self, reader: &heed::RoTxn) -> MResult> { + Ok(self.main.get::<_, Str, SerdeBincode>(reader, SCHEMA_KEY)?) } - pub fn delete_schema(self, writer: &mut heed::RwTxn) -> ZResult { - self.main.delete::<_, Str>(writer, SCHEMA_KEY) + pub fn delete_schema(self, writer: &mut heed::RwTxn) -> MResult { + Ok(self.main.delete::<_, Str>(writer, SCHEMA_KEY)?) } - pub fn put_ranked_map(self, writer: &mut heed::RwTxn, ranked_map: &RankedMap) -> ZResult<()> { - self.main.put::<_, Str, SerdeBincode>(writer, RANKED_MAP_KEY, &ranked_map) + pub fn put_ranked_map(self, writer: &mut heed::RwTxn, ranked_map: &RankedMap) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeBincode>(writer, RANKED_MAP_KEY, &ranked_map)?) } - pub fn ranked_map(self, reader: &heed::RoTxn) -> ZResult> { - self.main.get::<_, Str, SerdeBincode>(reader, RANKED_MAP_KEY) + pub fn ranked_map(self, reader: &heed::RoTxn) -> MResult> { + Ok(self.main.get::<_, Str, SerdeBincode>(reader, RANKED_MAP_KEY)?) } - pub fn put_synonyms_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> { + pub fn put_synonyms_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> MResult<()> { let bytes = fst.as_fst().as_bytes(); - self.main.put::<_, Str, ByteSlice>(writer, SYNONYMS_KEY, bytes) + Ok(self.main.put::<_, Str, ByteSlice>(writer, SYNONYMS_KEY, bytes)?) } - pub fn synonyms_fst(self, reader: &heed::RoTxn) -> ZResult { + pub(crate) fn synonyms_fst(self, reader: &heed::RoTxn) -> MResult> { match self.main.get::<_, Str, ByteSlice>(reader, SYNONYMS_KEY)? { Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), } } - pub fn put_stop_words_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> { - let bytes = fst.as_fst().as_bytes(); - self.main.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes) + pub fn synonyms_list(self, reader: &heed::RoTxn) -> MResult> { + let synonyms = self + .synonyms_fst(&reader)? + .unwrap_or_default() + .stream() + .into_strs()?; + Ok(synonyms) } - pub fn stop_words_fst(self, reader: &heed::RoTxn) -> ZResult { + pub fn put_stop_words_fst>(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> MResult<()> { + let bytes = fst.as_fst().as_bytes(); + Ok(self.main.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes)?) + } + + pub(crate) fn stop_words_fst(self, reader: &heed::RoTxn) -> MResult { match self.main.get::<_, Str, ByteSlice>(reader, STOP_WORDS_KEY)? { Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), } } - pub fn put_number_of_documents(self, writer: &mut heed::RwTxn, f: F) -> ZResult + pub fn stop_words_list(self, reader: &heed::RoTxn) -> MResult> { + let stop_word_list = self + .stop_words_fst(reader)? + .unwrap_or_default() + .stream() + .into_strs()?; + Ok(stop_word_list) + } + + pub fn put_number_of_documents(self, writer: &mut heed::RwTxn, f: F) -> MResult where F: Fn(u64) -> u64, { @@ -222,11 +253,10 @@ impl Main { Ok(new) } - pub fn number_of_documents(self, reader: &heed::RoTxn) -> ZResult { + pub fn number_of_documents(self, reader: &heed::RoTxn) -> MResult { match self .main - .get::<_, Str, OwnedType>(reader, NUMBER_OF_DOCUMENTS_KEY)? - { + .get::<_, Str, OwnedType>(reader, NUMBER_OF_DOCUMENTS_KEY)? { Some(value) => Ok(value), None => Ok(0), } @@ -235,13 +265,12 @@ impl Main { pub fn put_fields_distribution( self, writer: &mut heed::RwTxn, - fields_distribution: &FreqsMap, - ) -> ZResult<()> { - self.main - .put::<_, Str, SerdeFreqsMap>(writer, FIELDS_DISTRIBUTION_KEY, fields_distribution) + fields_frequency: &FreqsMap, + ) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeFreqsMap>(writer, FIELDS_FREQUENCY_KEY, fields_frequency)?) } - pub fn fields_distribution(&self, reader: &heed::RoTxn) -> ZResult> { + pub fn fields_distribution(&self, reader: &heed::RoTxn) -> MResult> { match self .main .get::<_, Str, SerdeFreqsMap>(reader, FIELDS_DISTRIBUTION_KEY)? @@ -251,51 +280,50 @@ impl Main { } } - pub fn attributes_for_faceting<'txn>(&self, reader: &'txn heed::RoTxn) -> ZResult>>> { - self.main.get::<_, Str, CowSet>(reader, ATTRIBUTES_FOR_FACETING_KEY) + pub fn attributes_for_faceting<'txn>(&self, reader: &'txn heed::RoTxn) -> MResult>>> { + Ok(self.main.get::<_, Str, CowSet>(reader, ATTRIBUTES_FOR_FACETING_KEY)?) } - pub fn put_attributes_for_faceting(self, writer: &mut heed::RwTxn, attributes: &Set) -> ZResult<()> { - self.main.put::<_, Str, CowSet>(writer, ATTRIBUTES_FOR_FACETING_KEY, attributes) + pub fn put_attributes_for_faceting(self, writer: &mut heed::RwTxn, attributes: &Set) -> MResult<()> { + Ok(self.main.put::<_, Str, CowSet>(writer, ATTRIBUTES_FOR_FACETING_KEY, attributes)?) } - pub fn delete_attributes_for_faceting(self, writer: &mut heed::RwTxn) -> ZResult { - self.main.delete::<_, Str>(writer, ATTRIBUTES_FOR_FACETING_KEY) + pub fn delete_attributes_for_faceting(self, writer: &mut heed::RwTxn) -> MResult { + Ok(self.main.delete::<_, Str>(writer, ATTRIBUTES_FOR_FACETING_KEY)?) } - pub fn ranking_rules(&self, reader: &heed::RoTxn) -> ZResult>> { - self.main.get::<_, Str, SerdeBincode>>(reader, RANKING_RULES_KEY) + pub fn ranking_rules(&self, reader: &heed::RoTxn) -> MResult>> { + Ok(self.main.get::<_, Str, SerdeBincode>>(reader, RANKING_RULES_KEY)?) } - pub fn put_ranking_rules(self, writer: &mut heed::RwTxn, value: &[RankingRule]) -> ZResult<()> { - self.main.put::<_, Str, SerdeBincode>>(writer, RANKING_RULES_KEY, &value.to_vec()) + pub fn put_ranking_rules(self, writer: &mut heed::RwTxn, value: &[RankingRule]) -> MResult<()> { + Ok(self.main.put::<_, Str, SerdeBincode>>(writer, RANKING_RULES_KEY, &value.to_vec())?) } - pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn) -> ZResult { - self.main.delete::<_, Str>(writer, RANKING_RULES_KEY) + pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn) -> MResult { + Ok(self.main.delete::<_, Str>(writer, RANKING_RULES_KEY)?) } - pub fn distinct_attribute(&self, reader: &heed::RoTxn) -> ZResult> { + pub fn distinct_attribute(&self, reader: &heed::RoTxn) -> MResult> { if let Some(value) = self.main.get::<_, Str, Str>(reader, DISTINCT_ATTRIBUTE_KEY)? { return Ok(Some(value.to_owned())) } return Ok(None) } - pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn, value: &str) -> ZResult<()> { - self.main.put::<_, Str, Str>(writer, DISTINCT_ATTRIBUTE_KEY, value) + pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn, value: &str) -> MResult<()> { + Ok(self.main.put::<_, Str, Str>(writer, DISTINCT_ATTRIBUTE_KEY, value)?) } - pub fn delete_distinct_attribute(self, writer: &mut heed::RwTxn) -> ZResult { - self.main.delete::<_, Str>(writer, DISTINCT_ATTRIBUTE_KEY) + pub fn delete_distinct_attribute(self, writer: &mut heed::RwTxn) -> MResult { + Ok(self.main.delete::<_, Str>(writer, DISTINCT_ATTRIBUTE_KEY)?) } - pub fn put_customs(self, writer: &mut heed::RwTxn, customs: &[u8]) -> ZResult<()> { - self.main - .put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs) + pub fn put_customs(self, writer: &mut heed::RwTxn, customs: &[u8]) -> MResult<()> { + Ok(self.main.put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs)?) } - pub fn customs<'txn>(self, reader: &'txn heed::RoTxn) -> ZResult> { - self.main.get::<_, Str, ByteSlice>(reader, CUSTOMS_KEY) + pub fn customs<'txn>(self, reader: &'txn heed::RoTxn) -> MResult> { + Ok(self.main.get::<_, Str, ByteSlice>(reader, CUSTOMS_KEY)?) } } diff --git a/meilisearch-core/src/store/mod.rs b/meilisearch-core/src/store/mod.rs index d19d3f7d3..fa5baa831 100644 --- a/meilisearch-core/src/store/mod.rs +++ b/meilisearch-core/src/store/mod.rs @@ -31,7 +31,6 @@ use std::collections::HashSet; use std::convert::TryInto; use std::{mem, ptr}; -use heed::Result as ZResult; use heed::{BytesEncode, BytesDecode}; use meilisearch_schema::{IndexedPos, FieldId}; use sdset::{Set, SetBuf}; @@ -279,14 +278,14 @@ impl Index { } } - pub fn customs_update(&self, writer: &mut heed::RwTxn, customs: Vec) -> ZResult { + pub fn customs_update(&self, writer: &mut heed::RwTxn, customs: Vec) -> MResult { let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); - update::push_customs_update(writer, self.updates, self.updates_results, customs) + Ok(update::push_customs_update(writer, self.updates, self.updates_results, customs)?) } - pub fn settings_update(&self, writer: &mut heed::RwTxn, update: SettingsUpdate) -> ZResult { + pub fn settings_update(&self, writer: &mut heed::RwTxn, update: SettingsUpdate) -> MResult { let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); - update::push_settings_update(writer, self.updates, self.updates_results, update) + Ok(update::push_settings_update(writer, self.updates, self.updates_results, update)?) } pub fn documents_addition(&self) -> update::DocumentsAddition { diff --git a/meilisearch-core/src/store/synonyms.rs b/meilisearch-core/src/store/synonyms.rs index 1ec8d313c..cf7cf5811 100644 --- a/meilisearch-core/src/store/synonyms.rs +++ b/meilisearch-core/src/store/synonyms.rs @@ -4,7 +4,7 @@ use heed::Result as ZResult; use heed::types::ByteSlice; use crate::database::MainT; -use crate::FstSetCow; +use crate::{FstSetCow, MResult}; #[derive(Copy, Clone)] pub struct Synonyms { @@ -27,10 +27,19 @@ impl Synonyms { self.synonyms.clear(writer) } - pub fn synonyms<'txn>(self, reader: &'txn heed::RoTxn, word: &[u8]) -> ZResult> { + pub fn synonyms_fst<'txn>(self, reader: &'txn heed::RoTxn, word: &[u8]) -> ZResult> { match self.synonyms.get(reader, word)? { Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), } } + + pub fn synonyms(self, reader: &heed::RoTxn, word: &[u8]) -> MResult>> { + let synonyms = self + .synonyms_fst(&reader, word)? + .map(|list| list.stream().into_strs()) + .transpose()?; + Ok(synonyms) + } } + diff --git a/meilisearch-core/src/update/customs_update.rs b/meilisearch-core/src/update/customs_update.rs index a413d13a6..a3a66e61d 100644 --- a/meilisearch-core/src/update/customs_update.rs +++ b/meilisearch-core/src/update/customs_update.rs @@ -1,14 +1,13 @@ -use heed::Result as ZResult; use crate::database::{MainT, UpdateT}; -use crate::store; +use crate::{store, MResult}; use crate::update::{next_update_id, Update}; pub fn apply_customs_update( writer: &mut heed::RwTxn, main_store: store::Main, customs: &[u8], -) -> ZResult<()> { +) -> MResult<()> { main_store.put_customs(writer, customs) } @@ -17,7 +16,7 @@ pub fn push_customs_update( updates_store: store::Updates, updates_results_store: store::UpdatesResults, customs: Vec, -) -> ZResult { +) -> MResult { let last_update_id = next_update_id(writer, updates_store, updates_results_store)?; let update = Update::customs(customs); diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 368c8f649..20f4d4f4e 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -35,6 +35,7 @@ indexmap = { version = "1.3.2", features = ["serde-1"] } log = "0.4.8" main_error = "0.1.0" meilisearch-core = { path = "../meilisearch-core", version = "0.10.1" } +meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.10.1" } meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.10.1"} mime = "0.3.16" diff --git a/meilisearch-http/src/data.rs b/meilisearch-http/src/data.rs index 39b7f9b1e..e278f0ffc 100644 --- a/meilisearch-http/src/data.rs +++ b/meilisearch-http/src/data.rs @@ -2,20 +2,14 @@ use std::collections::HashMap; use std::ops::Deref; use std::sync::Arc; -use chrono::{DateTime, Utc}; -use heed::types::{SerdeBincode, Str}; use log::error; -use meilisearch_core::{Database, DatabaseOptions, Error as MError, MResult, MainT, UpdateT}; +use meilisearch_core::{Database, DatabaseOptions, MResult, MainT, UpdateT}; use sha2::Digest; use sysinfo::Pid; use crate::index_update_callback; use crate::option::Opt; -const LAST_UPDATE_KEY: &str = "last-update"; - -type SerdeDatetime = SerdeBincode>; - #[derive(Clone)] pub struct Data { inner: Arc, @@ -70,24 +64,6 @@ impl DataInner { } } - pub fn last_update(&self, reader: &heed::RoTxn) -> MResult>> { - match self - .db - .common_store() - .get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)? - { - Some(datetime) => Ok(Some(datetime)), - None => Ok(None), - } - } - - pub fn set_last_update(&self, writer: &mut heed::RwTxn) -> MResult<()> { - self.db - .common_store() - .put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, &Utc::now()) - .map_err(Into::into) - } - pub fn compute_stats(&self, writer: &mut heed::RwTxn, index_uid: &str) -> MResult<()> { let index = match self.db.open_index(&index_uid) { Some(index) => index, @@ -124,7 +100,6 @@ impl DataInner { index .main .put_fields_distribution(writer, &distribution) - .map_err(MError::Zlmdb) } } @@ -170,3 +145,4 @@ impl Data { data } } + diff --git a/meilisearch-http/src/error.rs b/meilisearch-http/src/error.rs index e59848d3d..b5dde6f6e 100644 --- a/meilisearch-http/src/error.rs +++ b/meilisearch-http/src/error.rs @@ -1,4 +1,5 @@ use std::fmt; +use std::error; use actix_http::ResponseBuilder; use actix_web as aweb; @@ -6,6 +7,25 @@ use actix_web::http::StatusCode; use serde_json::json; use actix_web::error::JsonPayloadError; +use meilisearch_error::{ErrorCode, Code}; + +#[derive(Debug)] +pub struct ResponseError { + inner: Box, +} + +impl fmt::Display for ResponseError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.fmt(f) + } +} + +impl From for ResponseError { + fn from(error: Error) -> ResponseError { + ResponseError { inner: Box::new(error) } + } +} + #[derive(Debug)] pub enum Error { BadParameter(String, String), @@ -26,10 +46,18 @@ pub enum Error { SearchDocuments(String), PayloadTooLarge, UnsupportedMediaType, - FacetExpression(String), - FacetCount(String), } +impl error::Error for Error {} + +impl ErrorCode for Error { + fn error_code(&self) -> Code { + //TODO populate with right error codes + Code::Other + } +} + +#[derive(Debug)] pub enum FacetCountError { AttributeNotSet(String), SyntaxError(String), @@ -37,6 +65,14 @@ pub enum FacetCountError { NoFacetSet, } +impl error::Error for FacetCountError {} + +impl ErrorCode for FacetCountError { + fn error_code(&self) -> Code { + unimplemented!() + } +} + impl FacetCountError { pub fn unexpected_token(found: impl ToString, expected: &'static [&'static str]) -> FacetCountError { let found = found.to_string(); @@ -137,94 +173,42 @@ impl fmt::Display for Error { Self::InvalidIndexUid => f.write_str("Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."), Self::InvalidToken(err) => write!(f, "Invalid API key: {}", err), Self::Maintenance => f.write_str("Server is in maintenance, please try again later"), - Self::FilterParsing(err) => write!(f, "parsing error: {}", err), Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"), Self::MissingHeader(header) => write!(f, "Header {} is missing", header), Self::NotFound(err) => write!(f, "{} not found", err), Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err), Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err), Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err), - Self::FacetExpression(e) => write!(f, "error parsing facet filter expression: {}", e), Self::PayloadTooLarge => f.write_str("Payload to large"), Self::UnsupportedMediaType => f.write_str("Unsupported media type"), - Self::FacetCount(e) => write!(f, "error with facet count: {}", e), } } } -impl aweb::error::ResponseError for Error { +impl aweb::error::ResponseError for ResponseError { fn error_response(&self) -> aweb::HttpResponse { + let error_code = self.inner.error_code().internal(); ResponseBuilder::new(self.status_code()).json(json!({ "message": self.to_string(), + "errorCode": error_code, + "errorLink": format!("docs.meilisearch.come/error/{}", error_code), })) } fn status_code(&self) -> StatusCode { - match *self { - Self::BadParameter(_, _) - | Self::BadRequest(_) - | Self::CreateIndex(_) - | Self::InvalidIndexUid - | Self::OpenIndex(_) - | Self::RetrieveDocument(_, _) - | Self::FacetExpression(_) - | Self::SearchDocuments(_) - | Self::FacetCount(_) - | Self::FilterParsing(_) => StatusCode::BAD_REQUEST, - Self::DocumentNotFound(_) - | Self::IndexNotFound(_) - | Self::NotFound(_) => StatusCode::NOT_FOUND, - Self::InvalidToken(_) - | Self::MissingHeader(_) => StatusCode::UNAUTHORIZED, - Self::MissingAuthorizationHeader => StatusCode::FORBIDDEN, - Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, - Self::Maintenance => StatusCode::SERVICE_UNAVAILABLE, - Self::PayloadTooLarge => StatusCode::PAYLOAD_TOO_LARGE, - Self::UnsupportedMediaType => StatusCode::UNSUPPORTED_MEDIA_TYPE, - } + self.inner.error_code().http() } } -impl From for Error { - fn from(err: meilisearch_core::HeedError) -> Error { - Error::Internal(err.to_string()) +impl From for ResponseError { + fn from(err: meilisearch_core::Error) -> ResponseError { + ResponseError { inner: Box::new(err) } } } -impl From for Error { - fn from(err: meilisearch_core::FstError) -> Error { - Error::Internal(err.to_string()) - } -} - -impl From for Error { - fn from(error: meilisearch_core::FacetError) -> Error { - Error::FacetExpression(error.to_string()) - } -} - -impl From for Error { - fn from(err: meilisearch_core::Error) -> Error { - use meilisearch_core::pest_error::LineColLocation::*; - match err { - meilisearch_core::Error::FilterParseError(e) => { - let (line, column) = match e.line_col { - Span((line, _), (column, _)) => (line, column), - Pos((line, column)) => (line, column), - }; - let message = format!("parsing error on line {} at column {}: {}", line, column, e.variant.message()); - - Error::FilterParsing(message) - }, - meilisearch_core::Error::FacetError(e) => Error::FacetExpression(e.to_string()), - _ => Error::Internal(err.to_string()), - } - } -} - -impl From for Error { - fn from(err: meilisearch_schema::Error) -> Error { - Error::Internal(err.to_string()) +impl From for ResponseError { + fn from(err: meilisearch_schema::Error) -> ResponseError { + ResponseError { inner: Box::new(err) } } } @@ -234,9 +218,9 @@ impl From for Error { } } -impl From for Error { - fn from(other: FacetCountError) -> Error { - Error::FacetCount(other.to_string()) +impl From for ResponseError { + fn from(err: FacetCountError) -> ResponseError { + ResponseError { inner: Box::new(err) } } } @@ -251,6 +235,7 @@ impl From for Error { } } -pub fn json_error_handler(err: JsonPayloadError) -> Error { - err.into() +pub fn json_error_handler(err: JsonPayloadError) -> ResponseError { + let error = Error::from(err); + error.into() } diff --git a/meilisearch-http/src/helpers/authentication.rs b/meilisearch-http/src/helpers/authentication.rs index 4b921a783..927665ffe 100644 --- a/meilisearch-http/src/helpers/authentication.rs +++ b/meilisearch-http/src/helpers/authentication.rs @@ -7,7 +7,7 @@ use actix_service::{Service, Transform}; use actix_web::{dev::ServiceRequest, dev::ServiceResponse}; use futures::future::{err, ok, Future, Ready}; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::Data; #[derive(Clone)] @@ -71,10 +71,10 @@ where let auth_header = match req.headers().get("X-Meili-API-Key") { Some(auth) => match auth.to_str() { Ok(auth) => auth, - Err(_) => return Box::pin(err(Error::MissingAuthorizationHeader.into())), + Err(_) => return Box::pin(err(ResponseError::from(Error::MissingAuthorizationHeader).into())), }, None => { - return Box::pin(err(Error::MissingAuthorizationHeader.into())); + return Box::pin(err(ResponseError::from(Error::MissingAuthorizationHeader).into())); } }; @@ -95,7 +95,7 @@ where Box::pin(svc.call(req)) } else { Box::pin(err( - Error::InvalidToken(auth_header.to_string()).into() + ResponseError::from(Error::InvalidToken(auth_header.to_string())).into() )) } } diff --git a/meilisearch-http/src/helpers/meilisearch.rs b/meilisearch-http/src/helpers/meilisearch.rs index 297bb1f54..f760bb1ac 100644 --- a/meilisearch-http/src/helpers/meilisearch.rs +++ b/meilisearch-http/src/helpers/meilisearch.rs @@ -17,7 +17,7 @@ use serde_json::Value; use siphasher::sip::SipHasher; use slice_group_by::GroupBy; -use crate::error::Error; +use crate::error::{Error, ResponseError}; pub trait IndexSearchExt { fn new_search(&self, query: String) -> SearchBuilder; @@ -107,7 +107,7 @@ impl<'a> SearchBuilder<'a> { self } - pub fn search(self, reader: &heed::RoTxn) -> Result { + pub fn search(self, reader: &heed::RoTxn) -> Result { let schema = self .index .main @@ -260,7 +260,7 @@ impl<'a> SearchBuilder<'a> { reader: &heed::RoTxn, ranked_map: &'a RankedMap, schema: &Schema, - ) -> Result>, Error> { + ) -> Result>, ResponseError> { let ranking_rules = self.index.main.ranking_rules(reader)?; if let Some(ranking_rules) = ranking_rules { diff --git a/meilisearch-http/src/lib.rs b/meilisearch-http/src/lib.rs index e2e24d4d2..5a44d5761 100644 --- a/meilisearch-http/src/lib.rs +++ b/meilisearch-http/src/lib.rs @@ -7,14 +7,17 @@ pub mod models; pub mod option; pub mod routes; -pub use self::data::Data; -use self::error::json_error_handler; use actix_http::Error; use actix_service::ServiceFactory; use actix_web::{dev, web, App}; +use chrono::Utc; use log::error; + use meilisearch_core::ProcessedUpdateResult; +pub use self::data::Data; +use self::error::{json_error_handler, ResponseError}; + pub fn create_app( data: &Data, ) -> App< @@ -55,28 +58,23 @@ pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpda if let Some(index) = data.db.open_index(&index_uid) { let db = &data.db; - let mut writer = match db.main_write_txn() { - Ok(writer) => writer, - Err(e) => { - error!("Impossible to get write_txn; {}", e); - return; + let res = db.main_write::<_, _, ResponseError>(|mut writer| { + if let Err(e) = data.compute_stats(&mut writer, &index_uid) { + error!("Impossible to compute stats; {}", e) } - }; - if let Err(e) = data.compute_stats(&mut writer, &index_uid) { - error!("Impossible to compute stats; {}", e) - } + if let Err(e) = data.db.set_last_update(&mut writer, &Utc::now()) { + error!("Impossible to update last_update; {}", e) + } - if let Err(e) = data.set_last_update(&mut writer) { - error!("Impossible to update last_update; {}", e) - } - - if let Err(e) = index.main.put_updated_at(&mut writer) { - error!("Impossible to update updated_at; {}", e) - } - - if let Err(e) = writer.commit() { - error!("Impossible to get write_txn; {}", e); + if let Err(e) = index.main.put_updated_at(&mut writer) { + error!("Impossible to update updated_at; {}", e) + } + Ok(()) + }); + match res { + Ok(_) => (), + Err(e) => error!("{}", e), } } } diff --git a/meilisearch-http/src/routes/document.rs b/meilisearch-http/src/routes/document.rs index d2985f286..12e4b41f6 100644 --- a/meilisearch-http/src/routes/document.rs +++ b/meilisearch-http/src/routes/document.rs @@ -7,7 +7,7 @@ use meilisearch_core::update; use serde::Deserialize; use serde_json::Value; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::Data; @@ -37,7 +37,7 @@ pub fn services(cfg: &mut web::ServiceConfig) { async fn get_document( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -46,13 +46,9 @@ async fn get_document( let reader = data.db.main_read_txn()?; let internal_id = index.main .external_to_internal_docid(&reader, &path.document_id)? - .ok_or(ResponseError::document_not_found(&path.document_id))?; - - let response: Document = index - .document(&reader, None, document_id)? .ok_or(Error::document_not_found(&path.document_id))?; - Ok(HttpResponse::Ok().json(response)) + Ok(HttpResponse::Ok().json(document)) } #[delete( @@ -62,7 +58,7 @@ async fn get_document( async fn delete_document( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -70,14 +66,10 @@ async fn delete_document( let document_id = meilisearch_core::serde::compute_document_id(&path.document_id); - let mut update_writer = data.db.update_write_txn()?; - let mut documents_deletion = index.documents_deletion(); documents_deletion.delete_document_by_external_docid(path.document_id.clone()); - let update_id = documents_deletion.finalize(&mut update_writer)?; - - update_writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|writer| Ok(documents_deletion.finalize(writer)?))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -95,7 +87,7 @@ async fn get_all_documents( data: web::Data, path: web::Path, params: web::Query, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -105,7 +97,6 @@ async fn get_all_documents( let limit = params.limit.unwrap_or(20); let reader = data.db.main_read_txn()?; - let documents_ids: Result, _> = index .documents_fields_counts .documents_ids(&reader)? @@ -113,23 +104,21 @@ async fn get_all_documents( .take(limit) .collect(); - let documents_ids = documents_ids?; - let attributes: Option> = params .attributes_to_retrieve .as_ref() .map(|a| a.split(',').collect()); - let mut response = Vec::new(); - for document_id in documents_ids { + let mut documents = Vec::new(); + for document_id in documents_ids? { if let Ok(Some(document)) = index.document::(&reader, attributes.as_ref(), document_id) { - response.push(document); + documents.push(document); } } - Ok(HttpResponse::Ok().json(response)) + Ok(HttpResponse::Ok().json(documents)) } fn find_primary_key(document: &IndexMap) -> Option { @@ -153,7 +142,7 @@ async fn update_multiple_documents( params: web::Query, body: web::Json>, is_partial: bool, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -175,13 +164,14 @@ async fn update_multiple_documents( .ok_or(Error::bad_request("Could not infer a primary key"))?, }; - let mut writer = data.db.main_write_txn()?; - schema .set_primary_key(&id) .map_err(Error::bad_request)?; - index.main.put_schema(&mut writer, &schema)?; - writer.commit()?; + + data.db.main_write::<_, _, ResponseError>(|mut writer| { + index.main.put_schema(&mut writer, &schema)?; + Ok(()) + })?; } let mut document_addition = if is_partial { @@ -194,9 +184,12 @@ async fn update_multiple_documents( document_addition.update_document(document); } - let mut update_writer = data.db.update_write_txn()?; - let update_id = document_addition.finalize(&mut update_writer)?; - update_writer.commit()?; + let update_id = data + .db + .update_write::<_, _, ResponseError>(move |writer| { + let update_id = document_addition.finalize(writer)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -207,7 +200,7 @@ async fn add_documents( path: web::Path, params: web::Query, body: web::Json>, -) -> Result { +) -> Result { update_multiple_documents(data, path, params, body, false).await } @@ -217,7 +210,7 @@ async fn update_documents( path: web::Path, params: web::Query, body: web::Json>, -) -> Result { +) -> Result { update_multiple_documents(data, path, params, body, true).await } @@ -229,13 +222,12 @@ async fn delete_documents( data: web::Data, path: web::Path, body: web::Json>, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; let mut documents_deletion = index.documents_deletion(); @@ -244,9 +236,7 @@ async fn delete_documents( documents_deletion.delete_document_by_external_docid(document_id); } - let update_id = documents_deletion.finalize(&mut writer)?; - - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|writer| Ok(documents_deletion.finalize(writer)?))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -255,17 +245,13 @@ async fn delete_documents( async fn clear_all_documents( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; - - let update_id = index.clear_all(&mut writer)?; - - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|writer| Ok(index.clear_all(writer)?))?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } diff --git a/meilisearch-http/src/routes/health.rs b/meilisearch-http/src/routes/health.rs index 316b6dd3a..eb7dfbf74 100644 --- a/meilisearch-http/src/routes/health.rs +++ b/meilisearch-http/src/routes/health.rs @@ -1,46 +1,37 @@ use actix_web::{web, HttpResponse}; use actix_web_macros::{get, put}; -use heed::types::{Str, Unit}; use serde::Deserialize; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::Data; -const UNHEALTHY_KEY: &str = "_is_unhealthy"; - pub fn services(cfg: &mut web::ServiceConfig) { cfg.service(get_health).service(change_healthyness); } #[get("/health", wrap = "Authentication::Private")] -async fn get_health(data: web::Data) -> Result { +async fn get_health(data: web::Data) -> Result { let reader = data.db.main_read_txn()?; - - let common_store = data.db.common_store(); - - if let Ok(Some(_)) = common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY) { - return Err(Error::Maintenance); + if let Ok(Some(_)) = data.db.get_health(&reader) { + return Err(Error::Maintenance.into()); } - Ok(HttpResponse::Ok().finish()) } -async fn set_healthy(data: web::Data) -> Result { - let mut writer = data.db.main_write_txn()?; - let common_store = data.db.common_store(); - common_store.delete::<_, Str>(&mut writer, UNHEALTHY_KEY)?; - writer.commit()?; - +async fn set_healthy(data: web::Data) -> Result { + data.db.main_write::<_, _, ResponseError>(|writer| { + data.db.set_healthy(writer)?; + Ok(()) + })?; Ok(HttpResponse::Ok().finish()) } -async fn set_unhealthy(data: web::Data) -> Result { - let mut writer = data.db.main_write_txn()?; - let common_store = data.db.common_store(); - common_store.put::<_, Str, Unit>(&mut writer, UNHEALTHY_KEY, &())?; - writer.commit()?; - +async fn set_unhealthy(data: web::Data) -> Result { + data.db.main_write::<_, _, ResponseError>(|writer| { + data.db.set_unhealthy(writer)?; + Ok(()) + })?; Ok(HttpResponse::Ok().finish()) } @@ -52,8 +43,8 @@ struct HealthBody { #[put("/health", wrap = "Authentication::Private")] async fn change_healthyness( data: web::Data, - body: web::Json, -) -> Result { + body: web::Json, +) -> Result { if body.health { set_healthy(data).await } else { diff --git a/meilisearch-http/src/routes/index.rs b/meilisearch-http/src/routes/index.rs index d669f2d84..6d8d654a0 100644 --- a/meilisearch-http/src/routes/index.rs +++ b/meilisearch-http/src/routes/index.rs @@ -5,7 +5,7 @@ use log::error; use rand::seq::SliceRandom; use serde::{Deserialize, Serialize}; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::IndexParam; use crate::Data; @@ -40,10 +40,9 @@ struct IndexResponse { } #[get("/indexes", wrap = "Authentication::Private")] -async fn list_indexes(data: web::Data) -> Result { +async fn list_indexes(data: web::Data) -> Result { let reader = data.db.main_read_txn()?; - - let mut response = Vec::new(); + let mut indexes = Vec::new(); for index_uid in data.db.indexes_uids() { let index = data.db.open_index(&index_uid); @@ -51,19 +50,19 @@ async fn list_indexes(data: web::Data) -> Result { match index { Some(index) => { let name = index.main.name(&reader)?.ok_or(Error::internal( - "Impossible to get the name of an index", + "Impossible to get the name of an index", ))?; let created_at = index .main .created_at(&reader)? .ok_or(Error::internal( - "Impossible to get the create date of an index", + "Impossible to get the create date of an index", ))?; let updated_at = index .main .updated_at(&reader)? .ok_or(Error::internal( - "Impossible to get the last update date of an index", + "Impossible to get the last update date of an index", ))?; let primary_key = match index.main.schema(&reader) { @@ -81,7 +80,7 @@ async fn list_indexes(data: web::Data) -> Result { updated_at, primary_key, }; - response.push(index_response); + indexes.push(index_response); } None => error!( "Index {} is referenced in the indexes list but cannot be found", @@ -90,35 +89,34 @@ async fn list_indexes(data: web::Data) -> Result { } } - Ok(HttpResponse::Ok().json(response)) + Ok(HttpResponse::Ok().json(indexes)) } #[get("/indexes/{index_uid}", wrap = "Authentication::Private")] async fn get_index( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; - let name = index.main.name(&reader)?.ok_or(Error::internal( - "Impossible to get the name of an index", + "Impossible to get the name of an index", ))?; let created_at = index .main .created_at(&reader)? .ok_or(Error::internal( - "Impossible to get the create date of an index", + "Impossible to get the create date of an index", ))?; let updated_at = index .main .updated_at(&reader)? .ok_or(Error::internal( - "Impossible to get the last update date of an index", + "Impossible to get the last update date of an index", ))?; let primary_key = match index.main.schema(&reader) { @@ -128,14 +126,15 @@ async fn get_index( }, _ => None, }; - - Ok(HttpResponse::Ok().json(IndexResponse { + let index_response = IndexResponse { name, uid: path.index_uid.clone(), created_at, updated_at, primary_key, - })) + }; + + Ok(HttpResponse::Ok().json(index_response)) } #[derive(Debug, Deserialize)] @@ -150,11 +149,11 @@ struct IndexCreateRequest { async fn create_index( data: web::Data, body: web::Json, -) -> Result { +) -> Result { if let (None, None) = (body.name.clone(), body.uid.clone()) { return Err(Error::bad_request( "Index creation must have an uid", - )); + ).into()); } let uid = match &body.uid { @@ -165,7 +164,7 @@ async fn create_index( { uid.to_owned() } else { - return Err(Error::InvalidIndexUid); + return Err(Error::InvalidIndexUid.into()); } } None => loop { @@ -181,39 +180,39 @@ async fn create_index( .create_index(&uid) .map_err(Error::create_index)?; - let mut writer = data.db.main_write_txn()?; + let index_response = data.db.main_write::<_, _, ResponseError>(|mut writer| { + let name = body.name.as_ref().unwrap_or(&uid); + created_index.main.put_name(&mut writer, name)?; - let name = body.name.as_ref().unwrap_or(&uid); - created_index.main.put_name(&mut writer, name)?; + let created_at = created_index + .main + .created_at(&writer)? + .ok_or(Error::internal("Impossible to read created at"))?; - let created_at = created_index - .main - .created_at(&writer)? - .ok_or(Error::internal("Impossible to read created at"))?; + let updated_at = created_index + .main + .updated_at(&writer)? + .ok_or(Error::internal("Impossible to read updated at"))?; - let updated_at = created_index - .main - .updated_at(&writer)? - .ok_or(Error::internal("Impossible to read updated at"))?; - - if let Some(id) = body.primary_key.clone() { - if let Some(mut schema) = created_index.main.schema(&writer)? { - schema - .set_primary_key(&id) - .map_err(Error::bad_request)?; - created_index.main.put_schema(&mut writer, &schema)?; + if let Some(id) = body.primary_key.clone() { + if let Some(mut schema) = created_index.main.schema(&writer)? { + schema + .set_primary_key(&id) + .map_err(Error::bad_request)?; + created_index.main.put_schema(&mut writer, &schema)?; + } } - } + let index_response = IndexResponse { + name: name.to_string(), + uid, + created_at, + updated_at, + primary_key: body.primary_key.clone(), + }; + Ok(index_response) + })?; - writer.commit()?; - - Ok(HttpResponse::Created().json(IndexResponse { - name: name.to_string(), - uid, - created_at, - updated_at, - primary_key: body.primary_key.clone(), - })) + Ok(HttpResponse::Created().json(index_response)) } #[derive(Debug, Deserialize)] @@ -238,53 +237,51 @@ async fn update_index( data: web::Data, path: web::Path, body: web::Json, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.main_write_txn()?; + data.db.main_write::<_, _, ResponseError>(|mut writer| { + if let Some(name) = &body.name { + index.main.put_name(&mut writer, name)?; + } - if let Some(name) = &body.name { - index.main.put_name(&mut writer, name)?; - } - - if let Some(id) = body.primary_key.clone() { - if let Some(mut schema) = index.main.schema(&writer)? { - match schema.primary_key() { - Some(_) => { - return Err(Error::bad_request( - "The primary key cannot be updated", - )); - } - None => { - schema.set_primary_key(&id)?; - index.main.put_schema(&mut writer, &schema)?; + if let Some(id) = body.primary_key.clone() { + if let Some(mut schema) = index.main.schema(&writer)? { + match schema.primary_key() { + Some(_) => { + return Err(Error::bad_request( + "The primary key cannot be updated", + ).into()); + } + None => { + schema.set_primary_key(&id)?; + index.main.put_schema(&mut writer, &schema)?; + } } } } - } - - index.main.put_updated_at(&mut writer)?; - writer.commit()?; + index.main.put_updated_at(&mut writer)?; + Ok(()) + })?; let reader = data.db.main_read_txn()?; - let name = index.main.name(&reader)?.ok_or(Error::internal( - "Impossible to get the name of an index", + "Impossible to get the name of an index", ))?; let created_at = index .main .created_at(&reader)? .ok_or(Error::internal( - "Impossible to get the create date of an index", + "Impossible to get the create date of an index", ))?; let updated_at = index .main .updated_at(&reader)? .ok_or(Error::internal( - "Impossible to get the last update date of an index", + "Impossible to get the last update date of an index", ))?; let primary_key = match index.main.schema(&reader) { @@ -295,20 +292,22 @@ async fn update_index( _ => None, }; - Ok(HttpResponse::Ok().json(IndexResponse { + let index_response = IndexResponse { name, uid: path.index_uid.clone(), created_at, updated_at, primary_key, - })) + }; + + Ok(HttpResponse::Ok().json(index_response)) } #[delete("/indexes/{index_uid}", wrap = "Authentication::Private")] async fn delete_index( data: web::Data, path: web::Path, -) -> Result { +) -> Result { data.db.delete_index(&path.index_uid)?; Ok(HttpResponse::NoContent().finish()) @@ -327,7 +326,7 @@ struct UpdateParam { async fn get_update_status( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -342,7 +341,7 @@ async fn get_update_status( None => Err(Error::NotFound(format!( "Update {} not found", path.update_id - ))), + )).into()), } } @@ -350,7 +349,7 @@ async fn get_update_status( async fn get_all_updates_status( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) diff --git a/meilisearch-http/src/routes/search.rs b/meilisearch-http/src/routes/search.rs index 4224edf79..b0ee0e6bf 100644 --- a/meilisearch-http/src/routes/search.rs +++ b/meilisearch-http/src/routes/search.rs @@ -7,7 +7,7 @@ use actix_web_macros::get; use serde::Deserialize; use serde_json::Value; -use crate::error::{Error, FacetCountError}; +use crate::error::{Error, FacetCountError, ResponseError}; use crate::helpers::meilisearch::IndexSearchExt; use crate::helpers::Authentication; use crate::routes::IndexParam; @@ -41,14 +41,13 @@ async fn search_with_url_query( data: web::Data, path: web::Path, params: web::Query, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; - let schema = index .main .schema(&reader)? @@ -88,9 +87,9 @@ async fn search_with_url_query( } if let Some(ref facet_filters) = params.facet_filters { - match index.main.attributes_for_faceting(&reader)? { - Some(ref attrs) => { search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, attrs)?); }, - None => return Err(Error::FacetExpression("can't filter on facets, as no facet is set".to_string())) + let attrs = index.main.attributes_for_faceting(&reader)?; + if let Some(attrs) = attrs { + search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, &attrs)?); } } @@ -100,7 +99,7 @@ async fn search_with_url_query( let field_ids = prepare_facet_list(&facets, &schema, attrs)?; search_builder.add_facets(field_ids); }, - None => return Err(FacetCountError::NoFacetSet.into()) + None => todo!() /* return Err(FacetCountError::NoFacetSet.into()) */ } } @@ -160,8 +159,9 @@ async fn search_with_url_query( search_builder.get_matches(); } } + let search_result = search_builder.search(&reader)?; - Ok(HttpResponse::Ok().json(search_builder.search(&reader)?)) + Ok(HttpResponse::Ok().json(search_result)) } /// Parses the incoming string into an array of attributes for which to return a count. It returns diff --git a/meilisearch-http/src/routes/setting.rs b/meilisearch-http/src/routes/setting.rs index 8d5c48530..99ea052a9 100644 --- a/meilisearch-http/src/routes/setting.rs +++ b/meilisearch-http/src/routes/setting.rs @@ -3,7 +3,7 @@ use actix_web_macros::{delete, get, post}; use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES}; use std::collections::{BTreeMap, BTreeSet, HashSet}; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::Data; @@ -33,19 +33,20 @@ async fn update_all( data: web::Data, path: web::Path, body: web::Json, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; - let settings = body - .into_inner() - .into_update() - .map_err(Error::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let settings = body + .into_inner() + .into_update() + .map_err(Error::bad_request)?; + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -54,7 +55,7 @@ async fn update_all( async fn get_all( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -62,19 +63,21 @@ async fn get_all( let reader = data.db.main_read_txn()?; - let stop_words_fst = index.main.stop_words_fst(&reader)?; - let stop_words = stop_words_fst.stream().into_strs()?; - let stop_words: BTreeSet = stop_words.into_iter().collect(); + let stop_words: BTreeSet = index + .main + .stop_words_list(&reader)? + .into_iter() + .collect(); - let synonyms_fst = index.main.synonyms_fst(&reader)?; - let synonyms_list = synonyms_fst.stream().into_strs()?; + let synonyms_list = index.main.synonyms_list(&reader)?; let mut synonyms = BTreeMap::new(); let index_synonyms = &index.synonyms; for synonym in synonyms_list { let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?; - let list = alternative_list.stream().into_strs()?; - synonyms.insert(synonym, list); + if let Some(list) = alternative_list { + synonyms.insert(synonym, list); + } } let ranking_rules = index @@ -134,12 +137,11 @@ async fn get_all( async fn delete_all( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; let settings = SettingsUpdate { ranking_rules: UpdateState::Clear, @@ -153,8 +155,10 @@ async fn delete_all( attributes_for_faceting: UpdateState::Clear, }; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|writer| { + let update_id = index.settings_update(writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -166,7 +170,7 @@ async fn delete_all( async fn get_rules( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -192,7 +196,7 @@ async fn update_rules( data: web::Data, path: web::Path, body: web::Json>>, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -203,10 +207,12 @@ async fn update_rules( ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; let settings = settings.into_update().map_err(Error::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -218,21 +224,21 @@ async fn update_rules( async fn delete_rules( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; let settings = SettingsUpdate { ranking_rules: UpdateState::Clear, ..SettingsUpdate::default() }; - let update_id = index.settings_update(&mut writer, settings)?; - - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -244,7 +250,7 @@ async fn delete_rules( async fn get_distinct( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -263,7 +269,7 @@ async fn update_distinct( data: web::Data, path: web::Path, body: web::Json>, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -274,10 +280,12 @@ async fn update_distinct( ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; let settings = settings.into_update().map_err(Error::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -289,21 +297,21 @@ async fn update_distinct( async fn delete_distinct( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; - let mut writer = data.db.update_write_txn()?; let settings = SettingsUpdate { distinct_attribute: UpdateState::Clear, ..SettingsUpdate::default() }; - let update_id = index.settings_update(&mut writer, settings)?; - - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -315,7 +323,7 @@ async fn delete_distinct( async fn get_searchable( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -336,7 +344,7 @@ async fn update_searchable( data: web::Data, path: web::Path, body: web::Json>>, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -347,10 +355,12 @@ async fn update_searchable( ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; let settings = settings.into_update().map_err(Error::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -362,7 +372,7 @@ async fn update_searchable( async fn delete_searchable( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -373,9 +383,10 @@ async fn delete_searchable( ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -387,7 +398,7 @@ async fn delete_searchable( async fn get_displayed( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -410,7 +421,7 @@ async fn update_displayed( data: web::Data, path: web::Path, body: web::Json>>, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -421,10 +432,12 @@ async fn update_displayed( ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; let settings = settings.into_update().map_err(Error::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -436,7 +449,7 @@ async fn update_displayed( async fn delete_displayed( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -447,9 +460,10 @@ async fn delete_displayed( ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -461,7 +475,7 @@ async fn delete_displayed( async fn get_accept_new_fields( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -483,7 +497,7 @@ async fn update_accept_new_fields( data: web::Data, path: web::Path, body: web::Json>, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -494,10 +508,12 @@ async fn update_accept_new_fields( ..Settings::default() }; - let mut writer = data.db.update_write_txn()?; let settings = settings.into_update().map_err(Error::bad_request)?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } diff --git a/meilisearch-http/src/routes/stats.rs b/meilisearch-http/src/routes/stats.rs index e7de46b42..5d5988518 100644 --- a/meilisearch-http/src/routes/stats.rs +++ b/meilisearch-http/src/routes/stats.rs @@ -10,7 +10,7 @@ use serde::Serialize; use sysinfo::{NetworkExt, ProcessExt, ProcessorExt, System, SystemExt}; use walkdir::WalkDir; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::IndexParam; use crate::Data; @@ -35,7 +35,7 @@ struct IndexStatsResponse { async fn index_stats( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -71,7 +71,7 @@ struct StatsResult { } #[get("/stats", wrap = "Authentication::Private")] -async fn get_stats(data: web::Data) -> Result { +async fn get_stats(data: web::Data) -> Result { let mut index_list = HashMap::new(); let reader = data.db.main_read_txn()?; @@ -111,7 +111,7 @@ async fn get_stats(data: web::Data) -> Result { .filter(|metadata| metadata.is_file()) .fold(0, |acc, m| acc + m.len()); - let last_update = data.last_update(&reader)?; + let last_update = data.db.last_update(&reader)?; Ok(HttpResponse::Ok().json(StatsResult { database_size, diff --git a/meilisearch-http/src/routes/stop_words.rs b/meilisearch-http/src/routes/stop_words.rs index f09dd2442..b420c149b 100644 --- a/meilisearch-http/src/routes/stop_words.rs +++ b/meilisearch-http/src/routes/stop_words.rs @@ -3,7 +3,7 @@ use actix_web_macros::{delete, get, post}; use meilisearch_core::settings::{SettingsUpdate, UpdateState}; use std::collections::BTreeSet; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::Data; @@ -19,14 +19,13 @@ pub fn services(cfg: &mut web::ServiceConfig) { async fn get( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) .ok_or(Error::index_not_found(&path.index_uid))?; let reader = data.db.main_read_txn()?; - let stop_words_fst = index.main.stop_words_fst(&reader)?; - let stop_words = stop_words_fst.stream().into_strs()?; + let stop_words = index.main.stop_words_list(&reader)?; Ok(HttpResponse::Ok().json(stop_words)) } @@ -39,7 +38,7 @@ async fn update( data: web::Data, path: web::Path, body: web::Json>, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -50,9 +49,10 @@ async fn update( ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -64,7 +64,7 @@ async fn update( async fn delete( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -75,9 +75,10 @@ async fn delete( ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } diff --git a/meilisearch-http/src/routes/synonym.rs b/meilisearch-http/src/routes/synonym.rs index fd3888e29..c334297af 100644 --- a/meilisearch-http/src/routes/synonym.rs +++ b/meilisearch-http/src/routes/synonym.rs @@ -5,7 +5,7 @@ use actix_web_macros::{delete, get, post}; use indexmap::IndexMap; use meilisearch_core::settings::{SettingsUpdate, UpdateState}; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::helpers::Authentication; use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::Data; @@ -21,7 +21,7 @@ pub fn services(cfg: &mut web::ServiceConfig) { async fn get( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -29,15 +29,15 @@ async fn get( let reader = data.db.main_read_txn()?; - let synonyms_fst = index.main.synonyms_fst(&reader)?; - let synonyms_list = synonyms_fst.stream().into_strs()?; + let synonyms_list = index.main.synonyms_list(&reader)?; let mut synonyms = IndexMap::new(); let index_synonyms = &index.synonyms; for synonym in synonyms_list { let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?; - let list = alternative_list.stream().into_strs()?; - synonyms.insert(synonym, list); + if let Some(list) = alternative_list { + synonyms.insert(synonym, list); + } } Ok(HttpResponse::Ok().json(synonyms)) @@ -51,7 +51,7 @@ async fn update( data: web::Data, path: web::Path, body: web::Json>>, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -62,9 +62,10 @@ async fn update( ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } @@ -76,7 +77,7 @@ async fn update( async fn delete( data: web::Data, path: web::Path, -) -> Result { +) -> Result { let index = data .db .open_index(&path.index_uid) @@ -87,10 +88,10 @@ async fn delete( ..SettingsUpdate::default() }; - let mut writer = data.db.update_write_txn()?; - let update_id = index.settings_update(&mut writer, settings)?; - - writer.commit()?; + let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| { + let update_id = index.settings_update(&mut writer, settings)?; + Ok(update_id) + })?; Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) } diff --git a/meilisearch-http/tests/documents_delete.rs b/meilisearch-http/tests/documents_delete.rs index 7edc9ac63..48b8f70ee 100644 --- a/meilisearch-http/tests/documents_delete.rs +++ b/meilisearch-http/tests/documents_delete.rs @@ -21,6 +21,7 @@ async fn delete_batch() { server.populate_movies().await; let (_response, status_code) = server.get_document(419704).await; + println!("{:?}", _response); assert_eq!(status_code, 200); let body = serde_json::json!([419704, 512200, 181812]); diff --git a/meilisearch-schema/Cargo.toml b/meilisearch-schema/Cargo.toml index 712fdb008..a795fa1db 100644 --- a/meilisearch-schema/Cargo.toml +++ b/meilisearch-schema/Cargo.toml @@ -8,6 +8,7 @@ edition = "2018" [dependencies] bincode = "1.2.1" indexmap = { version = "1.3.2", features = ["serde-1"] } +meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" } serde = { version = "1.0.105", features = ["derive"] } serde_json = { version = "1.0.50", features = ["preserve_order"] } toml = { version = "0.5.6", features = ["preserve_order"] } diff --git a/meilisearch-schema/src/error.rs b/meilisearch-schema/src/error.rs index c31596df2..c1b0ffb3f 100644 --- a/meilisearch-schema/src/error.rs +++ b/meilisearch-schema/src/error.rs @@ -1,6 +1,7 @@ - use std::{error, fmt}; +use meilisearch_error::{ErrorCode, Code}; + pub type SResult = Result; #[derive(Debug)] @@ -22,3 +23,9 @@ impl fmt::Display for Error { } impl error::Error for Error {} + +impl ErrorCode for Error { + fn error_code(&self) -> Code { + unimplemented!() + } +}