refactor errors / isolate core/http errors

This commit is contained in:
mpostma 2020-05-22 12:03:57 +02:00
parent e2db197b3f
commit d69180ec67
29 changed files with 585 additions and 480 deletions

10
Cargo.lock generated
View File

@ -1653,6 +1653,7 @@ dependencies = [
"jemallocator", "jemallocator",
"levenshtein_automata", "levenshtein_automata",
"log", "log",
"meilisearch-error",
"meilisearch-schema", "meilisearch-schema",
"meilisearch-tokenizer", "meilisearch-tokenizer",
"meilisearch-types", "meilisearch-types",
@ -1673,6 +1674,13 @@ dependencies = [
"zerocopy", "zerocopy",
] ]
[[package]]
name = "meilisearch-error"
version = "0.10.1"
dependencies = [
"actix-http",
]
[[package]] [[package]]
name = "meilisearch-http" name = "meilisearch-http"
version = "0.10.1" version = "0.10.1"
@ -1699,6 +1707,7 @@ dependencies = [
"log", "log",
"main_error", "main_error",
"meilisearch-core", "meilisearch-core",
"meilisearch-error",
"meilisearch-schema", "meilisearch-schema",
"meilisearch-tokenizer", "meilisearch-tokenizer",
"mime", "mime",
@ -1729,6 +1738,7 @@ version = "0.10.1"
dependencies = [ dependencies = [
"bincode", "bincode",
"indexmap", "indexmap",
"meilisearch-error",
"serde", "serde",
"serde_json", "serde_json",
"toml", "toml",

View File

@ -24,6 +24,7 @@ intervaltree = "0.2.5"
itertools = "0.9.0" itertools = "0.9.0"
levenshtein_automata = { version = "0.2.0", features = ["fst_automaton"] } levenshtein_automata = { version = "0.2.0", features = ["fst_automaton"] }
log = "0.4.8" log = "0.4.8"
meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" }
meilisearch-schema = { path = "../meilisearch-schema", version = "0.10.1" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.10.1" }
meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.10.1" } meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.10.1" }
meilisearch-types = { path = "../meilisearch-types", version = "0.10.1" } meilisearch-types = { path = "../meilisearch-types", version = "0.10.1" }

View File

@ -4,17 +4,23 @@ use std::path::Path;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::{fs, thread}; use std::{fs, thread};
use chrono::{DateTime, Utc};
use crossbeam_channel::{Receiver, Sender}; use crossbeam_channel::{Receiver, Sender};
use heed::types::{Str, Unit}; use heed::types::{Str, Unit, SerdeBincode};
use heed::{CompactionOption, Result as ZResult}; use heed::{CompactionOption, Result as ZResult};
use log::debug; use log::debug;
use meilisearch_schema::Schema; use meilisearch_schema::Schema;
use crate::{store, update, Index, MResult}; use crate::{store, update, Index, MResult, Error};
pub type BoxUpdateFn = Box<dyn Fn(&str, update::ProcessedUpdateResult) + Send + Sync + 'static>; pub type BoxUpdateFn = Box<dyn Fn(&str, update::ProcessedUpdateResult) + Send + Sync + 'static>;
type ArcSwapFn = arc_swap::ArcSwapOption<BoxUpdateFn>; type ArcSwapFn = arc_swap::ArcSwapOption<BoxUpdateFn>;
type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
const UNHEALTHY_KEY: &str = "_is_unhealthy";
const LAST_UPDATE_KEY: &str = "last-update";
pub struct MainT; pub struct MainT;
pub struct UpdateT; pub struct UpdateT;
@ -319,20 +325,66 @@ impl Database {
self.update_fn.swap(None); self.update_fn.swap(None);
} }
pub fn main_read_txn(&self) -> heed::Result<heed::RoTxn<MainT>> { pub fn main_read_txn(&self) -> MResult<heed::RoTxn<MainT>> {
self.env.typed_read_txn::<MainT>() Ok(self.env.typed_read_txn::<MainT>()?)
} }
pub fn main_write_txn(&self) -> heed::Result<heed::RwTxn<MainT>> { pub(crate) fn main_write_txn(&self) -> MResult<heed::RwTxn<MainT>> {
self.env.typed_write_txn::<MainT>() Ok(self.env.typed_write_txn::<MainT>()?)
} }
pub fn update_read_txn(&self) -> heed::Result<heed::RoTxn<UpdateT>> { /// Calls f providing it with a writer to the main database. After f is called, makes sure the
self.update_env.typed_read_txn::<UpdateT>() /// transaction is commited. Returns whatever result f returns.
pub fn main_write<F, R, E>(&self, f: F) -> Result<R, E>
where
F: FnOnce(&mut heed::RwTxn<MainT>) -> Result<R, E>,
E: From<Error>,
{
let mut writer = self.main_write_txn()?;
let result = f(&mut writer)?;
writer.commit().map_err(Error::Heed)?;
Ok(result)
} }
pub fn update_write_txn(&self) -> heed::Result<heed::RwTxn<UpdateT>> { /// provides a context with a reader to the main database. experimental.
self.update_env.typed_write_txn::<UpdateT>() pub fn main_read<F, R, E>(&self, f: F) -> Result<R, E>
where
F: Fn(&heed::RoTxn<MainT>) -> Result<R, E>,
E: From<Error>,
{
let reader = self.main_read_txn()?;
f(&reader)
}
pub fn update_read_txn(&self) -> MResult<heed::RoTxn<UpdateT>> {
Ok(self.update_env.typed_read_txn::<UpdateT>()?)
}
/// Calls f providing it with a writer to the main database. After f is called, makes sure the
/// transaction is commited. Returns whatever result f returns.
pub fn update_write<F, R, E>(&self, f: F) -> Result<R, E>
where
F: FnOnce(&mut heed::RwTxn<UpdateT>) -> Result<R, E>,
E: From<Error>,
{
let mut writer = self.update_write_txn()?;
let result = f(&mut writer)?;
writer.commit().map_err(Error::Heed)?;
Ok(result)
}
/// provides a context with a reader to the update database. experimental.
pub fn update_read<F, R, E>(&self, f: F) -> Result<R, E>
where
F: Fn(&heed::RoTxn<UpdateT>) -> Result<R, E>,
E: From<Error>,
{
let reader = self.update_read_txn()?;
f(&reader)
}
pub fn update_write_txn(&self) -> MResult<heed::RwTxn<UpdateT>> {
Ok(self.update_env.typed_write_txn::<UpdateT>()?)
} }
pub fn copy_and_compact_to_path<P: AsRef<Path>>(&self, path: P) -> ZResult<(File, File)> { pub fn copy_and_compact_to_path<P: AsRef<Path>>(&self, path: P) -> ZResult<(File, File)> {
@ -362,9 +414,41 @@ impl Database {
indexes.keys().cloned().collect() indexes.keys().cloned().collect()
} }
pub fn common_store(&self) -> heed::PolyDatabase { pub(crate) fn common_store(&self) -> heed::PolyDatabase {
self.common_store self.common_store
} }
pub fn last_update(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<DateTime<Utc>>> {
match self.common_store()
.get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)?
{
Some(datetime) => Ok(Some(datetime)),
None => Ok(None),
}
}
pub fn set_last_update(&self, writer: &mut heed::RwTxn<MainT>, time: &DateTime<Utc>) -> MResult<()> {
self.common_store()
.put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, time)?;
Ok(())
}
pub fn set_healthy(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
let common_store = self.common_store();
common_store.delete::<_, Str>(writer, UNHEALTHY_KEY)?;
Ok(())
}
pub fn set_unhealthy(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
let common_store = self.common_store();
common_store.put::<_, Str, Unit>(writer, UNHEALTHY_KEY, &())?;
Ok(())
}
pub fn get_health(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<()>> {
let common_store = self.common_store();
Ok(common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY)?)
}
} }
#[cfg(test)] #[cfg(test)]
@ -1094,3 +1178,4 @@ mod tests {
assert_matches!(iter.next(), None); assert_matches!(iter.next(), None);
} }
} }

View File

@ -9,6 +9,8 @@ pub use fst::Error as FstError;
pub use heed::Error as HeedError; pub use heed::Error as HeedError;
pub use pest::error as pest_error; pub use pest::error as pest_error;
use meilisearch_error::{ErrorCode, Code};
pub type MResult<T> = Result<T, Error>; pub type MResult<T> = Result<T, Error>;
#[derive(Debug)] #[derive(Debug)]
@ -21,7 +23,7 @@ pub enum Error {
MissingDocumentId, MissingDocumentId,
MaxFieldsLimitExceeded, MaxFieldsLimitExceeded,
Schema(meilisearch_schema::Error), Schema(meilisearch_schema::Error),
Zlmdb(heed::Error), Heed(heed::Error),
Fst(fst::Error), Fst(fst::Error),
SerdeJson(SerdeJsonError), SerdeJson(SerdeJsonError),
Bincode(bincode::Error), Bincode(bincode::Error),
@ -32,6 +34,13 @@ pub enum Error {
FacetError(FacetError), FacetError(FacetError),
} }
impl ErrorCode for Error {
fn error_code(&self) -> Code {
//TODO populate codes
Code::Other
}
}
impl From<io::Error> for Error { impl From<io::Error> for Error {
fn from(error: io::Error) -> Error { fn from(error: io::Error) -> Error {
Error::Io(error) Error::Io(error)
@ -74,7 +83,7 @@ impl From<meilisearch_schema::Error> for Error {
impl From<HeedError> for Error { impl From<HeedError> for Error {
fn from(error: HeedError) -> Error { fn from(error: HeedError) -> Error {
Error::Zlmdb(error) Error::Heed(error)
} }
} }
@ -126,7 +135,7 @@ impl fmt::Display for Error {
MissingDocumentId => write!(f, "document id is missing"), MissingDocumentId => write!(f, "document id is missing"),
MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"), MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"),
Schema(e) => write!(f, "schema error; {}", e), Schema(e) => write!(f, "schema error; {}", e),
Zlmdb(e) => write!(f, "heed error; {}", e), Heed(e) => write!(f, "heed error; {}", e),
Fst(e) => write!(f, "fst error; {}", e), Fst(e) => write!(f, "fst error; {}", e),
SerdeJson(e) => write!(f, "serde json error; {}", e), SerdeJson(e) => write!(f, "serde json error; {}", e),
Bincode(e) => write!(f, "bincode error; {}", e), Bincode(e) => write!(f, "bincode error; {}", e),
@ -174,6 +183,7 @@ pub enum FacetError {
AttributeNotFound(String), AttributeNotFound(String),
AttributeNotSet { expected: Vec<String>, found: String }, AttributeNotSet { expected: Vec<String>, found: String },
InvalidDocumentAttribute(String), InvalidDocumentAttribute(String),
NoFacetAttributes,
} }
impl FacetError { impl FacetError {
@ -198,6 +208,7 @@ impl fmt::Display for FacetError {
AttributeNotFound(attr) => write!(f, "unknown {:?} attribute", attr), AttributeNotFound(attr) => write!(f, "unknown {:?} attribute", attr),
AttributeNotSet { found, expected } => write!(f, "`{}` is not set as a faceted attribute. available facet attributes: {}", found, expected.join(", ")), AttributeNotSet { found, expected } => write!(f, "`{}` is not set as a faceted attribute. available facet attributes: {}", found, expected.join(", ")),
InvalidDocumentAttribute(attr) => write!(f, "invalid document attribute {}, accepted types: String and [String]", attr), InvalidDocumentAttribute(attr) => write!(f, "invalid document attribute {}, accepted types: String and [String]", attr),
NoFacetAttributes => write!(f, "No attributes are set for faceting"),
} }
} }
} }

View File

@ -13,7 +13,7 @@ use meilisearch_schema::{FieldId, Schema};
use meilisearch_types::DocumentId; use meilisearch_types::DocumentId;
use crate::database::MainT; use crate::database::MainT;
use crate::error::{FacetError, Error}; use crate::error::{FacetError, MResult};
use crate::store::BEU16; use crate::store::BEU16;
/// Data structure used to represent a boolean expression in the form of nested arrays. /// Data structure used to represent a boolean expression in the form of nested arrays.
@ -34,14 +34,13 @@ impl FacetFilter {
s: &str, s: &str,
schema: &Schema, schema: &Schema,
attributes_for_faceting: &[FieldId], attributes_for_faceting: &[FieldId],
) -> Result<Self, FacetError> { ) -> MResult<FacetFilter> {
let parsed = serde_json::from_str::<Value>(s).map_err(|e| FacetError::ParsingError(e.to_string()))?; let parsed = serde_json::from_str::<Value>(s).map_err(|e| FacetError::ParsingError(e.to_string()))?;
let mut filter = Vec::new(); let mut filter = Vec::new();
match parsed { match parsed {
Value::Array(and_exprs) => { Value::Array(and_exprs) => {
if and_exprs.is_empty() { if and_exprs.is_empty() {
return Err(FacetError::EmptyArray); return Err(FacetError::EmptyArray.into());
} }
for expr in and_exprs { for expr in and_exprs {
match expr { match expr {
@ -51,7 +50,7 @@ impl FacetFilter {
} }
Value::Array(or_exprs) => { Value::Array(or_exprs) => {
if or_exprs.is_empty() { if or_exprs.is_empty() {
return Err(FacetError::EmptyArray); return Err(FacetError::EmptyArray.into());
} }
let mut inner = Vec::new(); let mut inner = Vec::new();
for expr in or_exprs { for expr in or_exprs {
@ -60,17 +59,17 @@ impl FacetFilter {
let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?; let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?;
inner.push(key); inner.push(key);
} }
bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value)), bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value).into()),
} }
} }
filter.push(Either::Left(inner)); filter.push(Either::Left(inner));
} }
bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value)), bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value).into()),
} }
} }
return Ok(Self(filter)); return Ok(Self(filter));
} }
bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value)), bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value).into()),
} }
} }
} }
@ -183,7 +182,7 @@ pub fn facet_map_from_docids(
index: &crate::Index, index: &crate::Index,
document_ids: &[DocumentId], document_ids: &[DocumentId],
attributes_for_facetting: &[FieldId], attributes_for_facetting: &[FieldId],
) -> Result<HashMap<FacetKey, Vec<DocumentId>>, Error> { ) -> MResult<HashMap<FacetKey, Vec<DocumentId>>> {
let mut facet_map = HashMap::new(); let mut facet_map = HashMap::new();
for document_id in document_ids { for document_id in document_ids {
for result in index for result in index
@ -210,7 +209,7 @@ pub fn facet_map_from_docs(
schema: &Schema, schema: &Schema,
documents: &HashMap<DocumentId, IndexMap<String, Value>>, documents: &HashMap<DocumentId, IndexMap<String, Value>>,
attributes_for_facetting: &[FieldId], attributes_for_facetting: &[FieldId],
) -> Result<HashMap<FacetKey, Vec<DocumentId>>, Error> { ) -> MResult<HashMap<FacetKey, Vec<DocumentId>>> {
let mut facet_map = HashMap::new(); let mut facet_map = HashMap::new();
let attributes_for_facetting = attributes_for_facetting let attributes_for_facetting = attributes_for_facetting
.iter() .iter()

View File

@ -268,7 +268,7 @@ mod tests {
let alternatives = self let alternatives = self
.index .index
.synonyms .synonyms
.synonyms(&writer, word.as_bytes()) .synonyms_fst(&writer, word.as_bytes())
.unwrap(); .unwrap();
let new = sdset_into_fstset(&new); let new = sdset_into_fstset(&new);

View File

@ -147,7 +147,7 @@ fn split_best_frequency<'a>(reader: &heed::RoTxn<MainT>, ctx: &Context, word: &'
fn fetch_synonyms(reader: &heed::RoTxn<MainT>, ctx: &Context, words: &[&str]) -> MResult<Vec<Vec<String>>> { fn fetch_synonyms(reader: &heed::RoTxn<MainT>, ctx: &Context, words: &[&str]) -> MResult<Vec<Vec<String>>> {
let words = normalize_str(&words.join(" ")); let words = normalize_str(&words.join(" "));
let set = ctx.synonyms.synonyms(reader, words.as_bytes())?; let set = ctx.synonyms.synonyms_fst(reader, words.as_bytes())?;
let mut strings = Vec::new(); let mut strings = Vec::new();
let mut stream = set.stream(); let mut stream = set.stream();

View File

@ -4,6 +4,7 @@ use crate::DocumentId;
use heed::types::OwnedType; use heed::types::OwnedType;
use heed::Result as ZResult; use heed::Result as ZResult;
use meilisearch_schema::IndexedPos; use meilisearch_schema::IndexedPos;
use crate::MResult;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub struct DocumentsFieldsCounts { pub struct DocumentsFieldsCounts {
@ -60,7 +61,7 @@ impl DocumentsFieldsCounts {
Ok(DocumentFieldsCountsIter { iter }) Ok(DocumentFieldsCountsIter { iter })
} }
pub fn documents_ids<'txn>(self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<DocumentsIdsIter<'txn>> { pub fn documents_ids<'txn>(self, reader: &'txn heed::RoTxn<MainT>) -> MResult<DocumentsIdsIter<'txn>> {
let iter = self.documents_fields_counts.iter(reader)?; let iter = self.documents_fields_counts.iter(reader)?;
Ok(DocumentsIdsIter { Ok(DocumentsIdsIter {
last_seen_id: None, last_seen_id: None,
@ -102,7 +103,7 @@ pub struct DocumentsIdsIter<'txn> {
} }
impl Iterator for DocumentsIdsIter<'_> { impl Iterator for DocumentsIdsIter<'_> {
type Item = ZResult<DocumentId>; type Item = MResult<DocumentId>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
for result in &mut self.iter { for result in &mut self.iter {
@ -114,7 +115,7 @@ impl Iterator for DocumentsIdsIter<'_> {
return Some(Ok(document_id)); return Some(Ok(document_id));
} }
} }
Err(e) => return Some(Err(e)), Err(e) => return Some(Err(e.into())),
} }
} }
None None

View File

@ -2,14 +2,13 @@ use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use heed::Result as ZResult;
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str}; use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str};
use meilisearch_schema::{FieldId, Schema}; use meilisearch_schema::{FieldId, Schema};
use meilisearch_types::DocumentId; use meilisearch_types::DocumentId;
use sdset::Set; use sdset::Set;
use crate::database::MainT; use crate::database::MainT;
use crate::RankedMap; use crate::{RankedMap, MResult};
use crate::settings::RankingRule; use crate::settings::RankingRule;
use crate::{FstSetCow, FstMapCow}; use crate::{FstSetCow, FstMapCow};
use super::{CowSet, DocumentsIds}; use super::{CowSet, DocumentsIds};
@ -41,39 +40,38 @@ pub struct Main {
} }
impl Main { impl Main {
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
self.main.clear(writer) Ok(self.main.clear(writer)?)
} }
pub fn put_name(self, writer: &mut heed::RwTxn<MainT>, name: &str) -> ZResult<()> { pub fn put_name(self, writer: &mut heed::RwTxn<MainT>, name: &str) -> MResult<()> {
self.main.put::<_, Str, Str>(writer, NAME_KEY, name) Ok(self.main.put::<_, Str, Str>(writer, NAME_KEY, name)?)
} }
pub fn name(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<String>> { pub fn name(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<String>> {
Ok(self Ok(self
.main .main
.get::<_, Str, Str>(reader, NAME_KEY)? .get::<_, Str, Str>(reader, NAME_KEY)?
.map(|name| name.to_owned())) .map(|name| name.to_owned()))
} }
pub fn put_created_at(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> { pub fn put_created_at(self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
self.main Ok(self.main.put::<_, Str, SerdeDatetime>(writer, CREATED_AT_KEY, &Utc::now())?)
.put::<_, Str, SerdeDatetime>(writer, CREATED_AT_KEY, &Utc::now())
} }
pub fn created_at(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<DateTime<Utc>>> { pub fn created_at(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<DateTime<Utc>>> {
self.main.get::<_, Str, SerdeDatetime>(reader, CREATED_AT_KEY) Ok(self.main.get::<_, Str, SerdeDatetime>(reader, CREATED_AT_KEY)?)
} }
pub fn put_updated_at(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> { pub fn put_updated_at(self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
self.main Ok(self.main.put::<_, Str, SerdeDatetime>(writer, UPDATED_AT_KEY, &Utc::now())?)
.put::<_, Str, SerdeDatetime>(writer, UPDATED_AT_KEY, &Utc::now())
} }
pub fn updated_at(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<DateTime<Utc>>> { pub fn updated_at(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<DateTime<Utc>>> {
self.main.get::<_, Str, SerdeDatetime>(reader, UPDATED_AT_KEY) Ok(self.main.get::<_, Str, SerdeDatetime>(reader, UPDATED_AT_KEY)?)
} }
<<<<<<< HEAD
pub fn put_internal_docids(self, writer: &mut heed::RwTxn<MainT>, ids: &sdset::Set<DocumentId>) -> ZResult<()> { pub fn put_internal_docids(self, writer: &mut heed::RwTxn<MainT>, ids: &sdset::Set<DocumentId>) -> ZResult<()> {
self.main.put::<_, Str, DocumentsIds>(writer, INTERNAL_DOCIDS_KEY, ids) self.main.put::<_, Str, DocumentsIds>(writer, INTERNAL_DOCIDS_KEY, ids)
} }
@ -120,6 +118,21 @@ impl Main {
let mut build = fst::MapBuilder::memory(); let mut build = fst::MapBuilder::memory();
while let Some((docid, values)) = op.next() { while let Some((docid, values)) = op.next() {
build.insert(docid, values[0].value).unwrap(); build.insert(docid, values[0].value).unwrap();
=======
pub fn put_words_fst(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set) -> MResult<()> {
let bytes = fst.as_fst().as_bytes();
Ok(self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, bytes)?)
}
pub unsafe fn static_words_fst(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<fst::Set>> {
match self.main.get::<_, Str, ByteSlice>(reader, WORDS_KEY)? {
Some(bytes) => {
let bytes: &'static [u8] = std::mem::transmute(bytes);
let set = fst::Set::from_static_slice(bytes).unwrap();
Ok(Some(set))
}
None => Ok(None),
>>>>>>> 5c760d3... refactor errors / isolate core/http errors
} }
drop(op); drop(op);
@ -161,58 +174,76 @@ impl Main {
self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, fst.as_fst().as_bytes()) self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, fst.as_fst().as_bytes())
} }
pub fn words_fst(self, reader: &heed::RoTxn<MainT>) -> ZResult<FstSetCow> { pub fn words_fst(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<fst::Set>> {
match self.main.get::<_, Str, ByteSlice>(reader, WORDS_KEY)? { match self.main.get::<_, Str, ByteSlice>(reader, WORDS_KEY)? {
Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()),
None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()),
} }
} }
pub fn put_schema(self, writer: &mut heed::RwTxn<MainT>, schema: &Schema) -> ZResult<()> { pub fn put_schema(self, writer: &mut heed::RwTxn<MainT>, schema: &Schema) -> MResult<()> {
self.main.put::<_, Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema) Ok(self.main.put::<_, Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema)?)
} }
pub fn schema(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Schema>> { pub fn schema(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<Schema>> {
self.main.get::<_, Str, SerdeBincode<Schema>>(reader, SCHEMA_KEY) Ok(self.main.get::<_, Str, SerdeBincode<Schema>>(reader, SCHEMA_KEY)?)
} }
pub fn delete_schema(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> { pub fn delete_schema(self, writer: &mut heed::RwTxn<MainT>) -> MResult<bool> {
self.main.delete::<_, Str>(writer, SCHEMA_KEY) Ok(self.main.delete::<_, Str>(writer, SCHEMA_KEY)?)
} }
pub fn put_ranked_map(self, writer: &mut heed::RwTxn<MainT>, ranked_map: &RankedMap) -> ZResult<()> { pub fn put_ranked_map(self, writer: &mut heed::RwTxn<MainT>, ranked_map: &RankedMap) -> MResult<()> {
self.main.put::<_, Str, SerdeBincode<RankedMap>>(writer, RANKED_MAP_KEY, &ranked_map) Ok(self.main.put::<_, Str, SerdeBincode<RankedMap>>(writer, RANKED_MAP_KEY, &ranked_map)?)
} }
pub fn ranked_map(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<RankedMap>> { pub fn ranked_map(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<RankedMap>> {
self.main.get::<_, Str, SerdeBincode<RankedMap>>(reader, RANKED_MAP_KEY) Ok(self.main.get::<_, Str, SerdeBincode<RankedMap>>(reader, RANKED_MAP_KEY)?)
} }
pub fn put_synonyms_fst<A: AsRef<[u8]>>(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set<A>) -> ZResult<()> { pub fn put_synonyms_fst<A: AsRef<[u8]>>(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set<A>) -> MResult<()> {
let bytes = fst.as_fst().as_bytes(); let bytes = fst.as_fst().as_bytes();
self.main.put::<_, Str, ByteSlice>(writer, SYNONYMS_KEY, bytes) Ok(self.main.put::<_, Str, ByteSlice>(writer, SYNONYMS_KEY, bytes)?)
} }
pub fn synonyms_fst(self, reader: &heed::RoTxn<MainT>) -> ZResult<FstSetCow> { pub(crate) fn synonyms_fst(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<fst::Set>> {
match self.main.get::<_, Str, ByteSlice>(reader, SYNONYMS_KEY)? { match self.main.get::<_, Str, ByteSlice>(reader, SYNONYMS_KEY)? {
Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()),
None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()),
} }
} }
pub fn put_stop_words_fst<A: AsRef<[u8]>>(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set<A>) -> ZResult<()> { pub fn synonyms_list(self, reader: &heed::RoTxn<MainT>) -> MResult<Vec<String>> {
let bytes = fst.as_fst().as_bytes(); let synonyms = self
self.main.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes) .synonyms_fst(&reader)?
.unwrap_or_default()
.stream()
.into_strs()?;
Ok(synonyms)
} }
pub fn stop_words_fst(self, reader: &heed::RoTxn<MainT>) -> ZResult<FstSetCow> { pub fn put_stop_words_fst<A: AsRef<[u8]>>(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set<A>) -> MResult<()> {
let bytes = fst.as_fst().as_bytes();
Ok(self.main.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes)?)
}
pub(crate) fn stop_words_fst(self, reader: &heed::RoTxn<MainT>) -> MResult<FstSetCow> {
match self.main.get::<_, Str, ByteSlice>(reader, STOP_WORDS_KEY)? { match self.main.get::<_, Str, ByteSlice>(reader, STOP_WORDS_KEY)? {
Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()),
None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()),
} }
} }
pub fn put_number_of_documents<F>(self, writer: &mut heed::RwTxn<MainT>, f: F) -> ZResult<u64> pub fn stop_words_list(self, reader: &heed::RoTxn<MainT>) -> MResult<Vec<String>> {
let stop_word_list = self
.stop_words_fst(reader)?
.unwrap_or_default()
.stream()
.into_strs()?;
Ok(stop_word_list)
}
pub fn put_number_of_documents<F>(self, writer: &mut heed::RwTxn<MainT>, f: F) -> MResult<u64>
where where
F: Fn(u64) -> u64, F: Fn(u64) -> u64,
{ {
@ -222,11 +253,10 @@ impl Main {
Ok(new) Ok(new)
} }
pub fn number_of_documents(self, reader: &heed::RoTxn<MainT>) -> ZResult<u64> { pub fn number_of_documents(self, reader: &heed::RoTxn<MainT>) -> MResult<u64> {
match self match self
.main .main
.get::<_, Str, OwnedType<u64>>(reader, NUMBER_OF_DOCUMENTS_KEY)? .get::<_, Str, OwnedType<u64>>(reader, NUMBER_OF_DOCUMENTS_KEY)? {
{
Some(value) => Ok(value), Some(value) => Ok(value),
None => Ok(0), None => Ok(0),
} }
@ -235,13 +265,12 @@ impl Main {
pub fn put_fields_distribution( pub fn put_fields_distribution(
self, self,
writer: &mut heed::RwTxn<MainT>, writer: &mut heed::RwTxn<MainT>,
fields_distribution: &FreqsMap, fields_frequency: &FreqsMap,
) -> ZResult<()> { ) -> MResult<()> {
self.main Ok(self.main.put::<_, Str, SerdeFreqsMap>(writer, FIELDS_FREQUENCY_KEY, fields_frequency)?)
.put::<_, Str, SerdeFreqsMap>(writer, FIELDS_DISTRIBUTION_KEY, fields_distribution)
} }
pub fn fields_distribution(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<FreqsMap>> { pub fn fields_distribution(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<FreqsMap>> {
match self match self
.main .main
.get::<_, Str, SerdeFreqsMap>(reader, FIELDS_DISTRIBUTION_KEY)? .get::<_, Str, SerdeFreqsMap>(reader, FIELDS_DISTRIBUTION_KEY)?
@ -251,51 +280,50 @@ impl Main {
} }
} }
pub fn attributes_for_faceting<'txn>(&self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<Option<Cow<'txn, Set<FieldId>>>> { pub fn attributes_for_faceting<'txn>(&self, reader: &'txn heed::RoTxn<MainT>) -> MResult<Option<Cow<'txn, Set<FieldId>>>> {
self.main.get::<_, Str, CowSet<FieldId>>(reader, ATTRIBUTES_FOR_FACETING_KEY) Ok(self.main.get::<_, Str, CowSet<FieldId>>(reader, ATTRIBUTES_FOR_FACETING_KEY)?)
} }
pub fn put_attributes_for_faceting(self, writer: &mut heed::RwTxn<MainT>, attributes: &Set<FieldId>) -> ZResult<()> { pub fn put_attributes_for_faceting(self, writer: &mut heed::RwTxn<MainT>, attributes: &Set<FieldId>) -> MResult<()> {
self.main.put::<_, Str, CowSet<FieldId>>(writer, ATTRIBUTES_FOR_FACETING_KEY, attributes) Ok(self.main.put::<_, Str, CowSet<FieldId>>(writer, ATTRIBUTES_FOR_FACETING_KEY, attributes)?)
} }
pub fn delete_attributes_for_faceting(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> { pub fn delete_attributes_for_faceting(self, writer: &mut heed::RwTxn<MainT>) -> MResult<bool> {
self.main.delete::<_, Str>(writer, ATTRIBUTES_FOR_FACETING_KEY) Ok(self.main.delete::<_, Str>(writer, ATTRIBUTES_FOR_FACETING_KEY)?)
} }
pub fn ranking_rules(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Vec<RankingRule>>> { pub fn ranking_rules(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<Vec<RankingRule>>> {
self.main.get::<_, Str, SerdeBincode<Vec<RankingRule>>>(reader, RANKING_RULES_KEY) Ok(self.main.get::<_, Str, SerdeBincode<Vec<RankingRule>>>(reader, RANKING_RULES_KEY)?)
} }
pub fn put_ranking_rules(self, writer: &mut heed::RwTxn<MainT>, value: &[RankingRule]) -> ZResult<()> { pub fn put_ranking_rules(self, writer: &mut heed::RwTxn<MainT>, value: &[RankingRule]) -> MResult<()> {
self.main.put::<_, Str, SerdeBincode<Vec<RankingRule>>>(writer, RANKING_RULES_KEY, &value.to_vec()) Ok(self.main.put::<_, Str, SerdeBincode<Vec<RankingRule>>>(writer, RANKING_RULES_KEY, &value.to_vec())?)
} }
pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> { pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn<MainT>) -> MResult<bool> {
self.main.delete::<_, Str>(writer, RANKING_RULES_KEY) Ok(self.main.delete::<_, Str>(writer, RANKING_RULES_KEY)?)
} }
pub fn distinct_attribute(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<String>> { pub fn distinct_attribute(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<String>> {
if let Some(value) = self.main.get::<_, Str, Str>(reader, DISTINCT_ATTRIBUTE_KEY)? { if let Some(value) = self.main.get::<_, Str, Str>(reader, DISTINCT_ATTRIBUTE_KEY)? {
return Ok(Some(value.to_owned())) return Ok(Some(value.to_owned()))
} }
return Ok(None) return Ok(None)
} }
pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>, value: &str) -> ZResult<()> { pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>, value: &str) -> MResult<()> {
self.main.put::<_, Str, Str>(writer, DISTINCT_ATTRIBUTE_KEY, value) Ok(self.main.put::<_, Str, Str>(writer, DISTINCT_ATTRIBUTE_KEY, value)?)
} }
pub fn delete_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> { pub fn delete_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>) -> MResult<bool> {
self.main.delete::<_, Str>(writer, DISTINCT_ATTRIBUTE_KEY) Ok(self.main.delete::<_, Str>(writer, DISTINCT_ATTRIBUTE_KEY)?)
} }
pub fn put_customs(self, writer: &mut heed::RwTxn<MainT>, customs: &[u8]) -> ZResult<()> { pub fn put_customs(self, writer: &mut heed::RwTxn<MainT>, customs: &[u8]) -> MResult<()> {
self.main Ok(self.main.put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs)?)
.put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs)
} }
pub fn customs<'txn>(self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<Option<&'txn [u8]>> { pub fn customs<'txn>(self, reader: &'txn heed::RoTxn<MainT>) -> MResult<Option<&'txn [u8]>> {
self.main.get::<_, Str, ByteSlice>(reader, CUSTOMS_KEY) Ok(self.main.get::<_, Str, ByteSlice>(reader, CUSTOMS_KEY)?)
} }
} }

View File

@ -31,7 +31,6 @@ use std::collections::HashSet;
use std::convert::TryInto; use std::convert::TryInto;
use std::{mem, ptr}; use std::{mem, ptr};
use heed::Result as ZResult;
use heed::{BytesEncode, BytesDecode}; use heed::{BytesEncode, BytesDecode};
use meilisearch_schema::{IndexedPos, FieldId}; use meilisearch_schema::{IndexedPos, FieldId};
use sdset::{Set, SetBuf}; use sdset::{Set, SetBuf};
@ -279,14 +278,14 @@ impl Index {
} }
} }
pub fn customs_update(&self, writer: &mut heed::RwTxn<UpdateT>, customs: Vec<u8>) -> ZResult<u64> { pub fn customs_update(&self, writer: &mut heed::RwTxn<UpdateT>, customs: Vec<u8>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_customs_update(writer, self.updates, self.updates_results, customs) Ok(update::push_customs_update(writer, self.updates, self.updates_results, customs)?)
} }
pub fn settings_update(&self, writer: &mut heed::RwTxn<UpdateT>, update: SettingsUpdate) -> ZResult<u64> { pub fn settings_update(&self, writer: &mut heed::RwTxn<UpdateT>, update: SettingsUpdate) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_settings_update(writer, self.updates, self.updates_results, update) Ok(update::push_settings_update(writer, self.updates, self.updates_results, update)?)
} }
pub fn documents_addition<D>(&self) -> update::DocumentsAddition<D> { pub fn documents_addition<D>(&self) -> update::DocumentsAddition<D> {

View File

@ -4,7 +4,7 @@ use heed::Result as ZResult;
use heed::types::ByteSlice; use heed::types::ByteSlice;
use crate::database::MainT; use crate::database::MainT;
use crate::FstSetCow; use crate::{FstSetCow, MResult};
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub struct Synonyms { pub struct Synonyms {
@ -27,10 +27,19 @@ impl Synonyms {
self.synonyms.clear(writer) self.synonyms.clear(writer)
} }
pub fn synonyms<'txn>(self, reader: &'txn heed::RoTxn<MainT>, word: &[u8]) -> ZResult<FstSetCow<'txn>> { pub fn synonyms_fst<'txn>(self, reader: &'txn heed::RoTxn<MainT>, word: &[u8]) -> ZResult<FstSetCow<'txn>> {
match self.synonyms.get(reader, word)? { match self.synonyms.get(reader, word)? {
Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()), Some(bytes) => Ok(fst::Set::new(bytes).unwrap().map_data(Cow::Borrowed).unwrap()),
None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()), None => Ok(fst::Set::default().map_data(Cow::Owned).unwrap()),
} }
} }
pub fn synonyms(self, reader: &heed::RoTxn<MainT>, word: &[u8]) -> MResult<Option<Vec<String>>> {
let synonyms = self
.synonyms_fst(&reader, word)?
.map(|list| list.stream().into_strs())
.transpose()?;
Ok(synonyms)
} }
}

View File

@ -1,14 +1,13 @@
use heed::Result as ZResult;
use crate::database::{MainT, UpdateT}; use crate::database::{MainT, UpdateT};
use crate::store; use crate::{store, MResult};
use crate::update::{next_update_id, Update}; use crate::update::{next_update_id, Update};
pub fn apply_customs_update( pub fn apply_customs_update(
writer: &mut heed::RwTxn<MainT>, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
customs: &[u8], customs: &[u8],
) -> ZResult<()> { ) -> MResult<()> {
main_store.put_customs(writer, customs) main_store.put_customs(writer, customs)
} }
@ -17,7 +16,7 @@ pub fn push_customs_update(
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
customs: Vec<u8>, customs: Vec<u8>,
) -> ZResult<u64> { ) -> MResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?; let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::customs(customs); let update = Update::customs(customs);

View File

@ -35,6 +35,7 @@ indexmap = { version = "1.3.2", features = ["serde-1"] }
log = "0.4.8" log = "0.4.8"
main_error = "0.1.0" main_error = "0.1.0"
meilisearch-core = { path = "../meilisearch-core", version = "0.10.1" } meilisearch-core = { path = "../meilisearch-core", version = "0.10.1" }
meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" }
meilisearch-schema = { path = "../meilisearch-schema", version = "0.10.1" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.10.1" }
meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.10.1"} meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.10.1"}
mime = "0.3.16" mime = "0.3.16"

View File

@ -2,20 +2,14 @@ use std::collections::HashMap;
use std::ops::Deref; use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
use chrono::{DateTime, Utc};
use heed::types::{SerdeBincode, Str};
use log::error; use log::error;
use meilisearch_core::{Database, DatabaseOptions, Error as MError, MResult, MainT, UpdateT}; use meilisearch_core::{Database, DatabaseOptions, MResult, MainT, UpdateT};
use sha2::Digest; use sha2::Digest;
use sysinfo::Pid; use sysinfo::Pid;
use crate::index_update_callback; use crate::index_update_callback;
use crate::option::Opt; use crate::option::Opt;
const LAST_UPDATE_KEY: &str = "last-update";
type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
#[derive(Clone)] #[derive(Clone)]
pub struct Data { pub struct Data {
inner: Arc<DataInner>, inner: Arc<DataInner>,
@ -70,24 +64,6 @@ impl DataInner {
} }
} }
pub fn last_update(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<DateTime<Utc>>> {
match self
.db
.common_store()
.get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)?
{
Some(datetime) => Ok(Some(datetime)),
None => Ok(None),
}
}
pub fn set_last_update(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
self.db
.common_store()
.put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, &Utc::now())
.map_err(Into::into)
}
pub fn compute_stats(&self, writer: &mut heed::RwTxn<MainT>, index_uid: &str) -> MResult<()> { pub fn compute_stats(&self, writer: &mut heed::RwTxn<MainT>, index_uid: &str) -> MResult<()> {
let index = match self.db.open_index(&index_uid) { let index = match self.db.open_index(&index_uid) {
Some(index) => index, Some(index) => index,
@ -124,7 +100,6 @@ impl DataInner {
index index
.main .main
.put_fields_distribution(writer, &distribution) .put_fields_distribution(writer, &distribution)
.map_err(MError::Zlmdb)
} }
} }
@ -170,3 +145,4 @@ impl Data {
data data
} }
} }

View File

@ -1,4 +1,5 @@
use std::fmt; use std::fmt;
use std::error;
use actix_http::ResponseBuilder; use actix_http::ResponseBuilder;
use actix_web as aweb; use actix_web as aweb;
@ -6,6 +7,25 @@ use actix_web::http::StatusCode;
use serde_json::json; use serde_json::json;
use actix_web::error::JsonPayloadError; use actix_web::error::JsonPayloadError;
use meilisearch_error::{ErrorCode, Code};
#[derive(Debug)]
pub struct ResponseError {
inner: Box<dyn ErrorCode>,
}
impl fmt::Display for ResponseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.inner.fmt(f)
}
}
impl From<Error> for ResponseError {
fn from(error: Error) -> ResponseError {
ResponseError { inner: Box::new(error) }
}
}
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
BadParameter(String, String), BadParameter(String, String),
@ -26,10 +46,18 @@ pub enum Error {
SearchDocuments(String), SearchDocuments(String),
PayloadTooLarge, PayloadTooLarge,
UnsupportedMediaType, UnsupportedMediaType,
FacetExpression(String),
FacetCount(String),
} }
impl error::Error for Error {}
impl ErrorCode for Error {
fn error_code(&self) -> Code {
//TODO populate with right error codes
Code::Other
}
}
#[derive(Debug)]
pub enum FacetCountError { pub enum FacetCountError {
AttributeNotSet(String), AttributeNotSet(String),
SyntaxError(String), SyntaxError(String),
@ -37,6 +65,14 @@ pub enum FacetCountError {
NoFacetSet, NoFacetSet,
} }
impl error::Error for FacetCountError {}
impl ErrorCode for FacetCountError {
fn error_code(&self) -> Code {
unimplemented!()
}
}
impl FacetCountError { impl FacetCountError {
pub fn unexpected_token(found: impl ToString, expected: &'static [&'static str]) -> FacetCountError { pub fn unexpected_token(found: impl ToString, expected: &'static [&'static str]) -> FacetCountError {
let found = found.to_string(); let found = found.to_string();
@ -137,94 +173,42 @@ impl fmt::Display for Error {
Self::InvalidIndexUid => f.write_str("Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."), Self::InvalidIndexUid => f.write_str("Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."),
Self::InvalidToken(err) => write!(f, "Invalid API key: {}", err), Self::InvalidToken(err) => write!(f, "Invalid API key: {}", err),
Self::Maintenance => f.write_str("Server is in maintenance, please try again later"), Self::Maintenance => f.write_str("Server is in maintenance, please try again later"),
Self::FilterParsing(err) => write!(f, "parsing error: {}", err),
Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"), Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"),
Self::MissingHeader(header) => write!(f, "Header {} is missing", header), Self::MissingHeader(header) => write!(f, "Header {} is missing", header),
Self::NotFound(err) => write!(f, "{} not found", err), Self::NotFound(err) => write!(f, "{} not found", err),
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err), Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err), Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err),
Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err), Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
Self::FacetExpression(e) => write!(f, "error parsing facet filter expression: {}", e),
Self::PayloadTooLarge => f.write_str("Payload to large"), Self::PayloadTooLarge => f.write_str("Payload to large"),
Self::UnsupportedMediaType => f.write_str("Unsupported media type"), Self::UnsupportedMediaType => f.write_str("Unsupported media type"),
Self::FacetCount(e) => write!(f, "error with facet count: {}", e),
} }
} }
} }
impl aweb::error::ResponseError for Error { impl aweb::error::ResponseError for ResponseError {
fn error_response(&self) -> aweb::HttpResponse { fn error_response(&self) -> aweb::HttpResponse {
let error_code = self.inner.error_code().internal();
ResponseBuilder::new(self.status_code()).json(json!({ ResponseBuilder::new(self.status_code()).json(json!({
"message": self.to_string(), "message": self.to_string(),
"errorCode": error_code,
"errorLink": format!("docs.meilisearch.come/error/{}", error_code),
})) }))
} }
fn status_code(&self) -> StatusCode { fn status_code(&self) -> StatusCode {
match *self { self.inner.error_code().http()
Self::BadParameter(_, _)
| Self::BadRequest(_)
| Self::CreateIndex(_)
| Self::InvalidIndexUid
| Self::OpenIndex(_)
| Self::RetrieveDocument(_, _)
| Self::FacetExpression(_)
| Self::SearchDocuments(_)
| Self::FacetCount(_)
| Self::FilterParsing(_) => StatusCode::BAD_REQUEST,
Self::DocumentNotFound(_)
| Self::IndexNotFound(_)
| Self::NotFound(_) => StatusCode::NOT_FOUND,
Self::InvalidToken(_)
| Self::MissingHeader(_) => StatusCode::UNAUTHORIZED,
Self::MissingAuthorizationHeader => StatusCode::FORBIDDEN,
Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Maintenance => StatusCode::SERVICE_UNAVAILABLE,
Self::PayloadTooLarge => StatusCode::PAYLOAD_TOO_LARGE,
Self::UnsupportedMediaType => StatusCode::UNSUPPORTED_MEDIA_TYPE,
}
}
}
impl From<meilisearch_core::HeedError> for Error {
fn from(err: meilisearch_core::HeedError) -> Error {
Error::Internal(err.to_string())
} }
} }
impl From<meilisearch_core::FstError> for Error { impl From<meilisearch_core::Error> for ResponseError {
fn from(err: meilisearch_core::FstError) -> Error { fn from(err: meilisearch_core::Error) -> ResponseError {
Error::Internal(err.to_string()) ResponseError { inner: Box::new(err) }
} }
} }
impl From<meilisearch_core::FacetError> for Error { impl From<meilisearch_schema::Error> for ResponseError {
fn from(error: meilisearch_core::FacetError) -> Error { fn from(err: meilisearch_schema::Error) -> ResponseError {
Error::FacetExpression(error.to_string()) ResponseError { inner: Box::new(err) }
}
}
impl From<meilisearch_core::Error> for Error {
fn from(err: meilisearch_core::Error) -> Error {
use meilisearch_core::pest_error::LineColLocation::*;
match err {
meilisearch_core::Error::FilterParseError(e) => {
let (line, column) = match e.line_col {
Span((line, _), (column, _)) => (line, column),
Pos((line, column)) => (line, column),
};
let message = format!("parsing error on line {} at column {}: {}", line, column, e.variant.message());
Error::FilterParsing(message)
},
meilisearch_core::Error::FacetError(e) => Error::FacetExpression(e.to_string()),
_ => Error::Internal(err.to_string()),
}
}
}
impl From<meilisearch_schema::Error> for Error {
fn from(err: meilisearch_schema::Error) -> Error {
Error::Internal(err.to_string())
} }
} }
@ -234,9 +218,9 @@ impl From<actix_http::Error> for Error {
} }
} }
impl From<FacetCountError> for Error { impl From<FacetCountError> for ResponseError {
fn from(other: FacetCountError) -> Error { fn from(err: FacetCountError) -> ResponseError {
Error::FacetCount(other.to_string()) ResponseError { inner: Box::new(err) }
} }
} }
@ -251,6 +235,7 @@ impl From<JsonPayloadError> for Error {
} }
} }
pub fn json_error_handler(err: JsonPayloadError) -> Error { pub fn json_error_handler(err: JsonPayloadError) -> ResponseError {
err.into() let error = Error::from(err);
error.into()
} }

View File

@ -7,7 +7,7 @@ use actix_service::{Service, Transform};
use actix_web::{dev::ServiceRequest, dev::ServiceResponse}; use actix_web::{dev::ServiceRequest, dev::ServiceResponse};
use futures::future::{err, ok, Future, Ready}; use futures::future::{err, ok, Future, Ready};
use crate::error::Error; use crate::error::{Error, ResponseError};
use crate::Data; use crate::Data;
#[derive(Clone)] #[derive(Clone)]
@ -71,10 +71,10 @@ where
let auth_header = match req.headers().get("X-Meili-API-Key") { let auth_header = match req.headers().get("X-Meili-API-Key") {
Some(auth) => match auth.to_str() { Some(auth) => match auth.to_str() {
Ok(auth) => auth, Ok(auth) => auth,
Err(_) => return Box::pin(err(Error::MissingAuthorizationHeader.into())), Err(_) => return Box::pin(err(ResponseError::from(Error::MissingAuthorizationHeader).into())),
}, },
None => { None => {
return Box::pin(err(Error::MissingAuthorizationHeader.into())); return Box::pin(err(ResponseError::from(Error::MissingAuthorizationHeader).into()));
} }
}; };
@ -95,7 +95,7 @@ where
Box::pin(svc.call(req)) Box::pin(svc.call(req))
} else { } else {
Box::pin(err( Box::pin(err(
Error::InvalidToken(auth_header.to_string()).into() ResponseError::from(Error::InvalidToken(auth_header.to_string())).into()
)) ))
} }
} }

View File

@ -17,7 +17,7 @@ use serde_json::Value;
use siphasher::sip::SipHasher; use siphasher::sip::SipHasher;
use slice_group_by::GroupBy; use slice_group_by::GroupBy;
use crate::error::Error; use crate::error::{Error, ResponseError};
pub trait IndexSearchExt { pub trait IndexSearchExt {
fn new_search(&self, query: String) -> SearchBuilder; fn new_search(&self, query: String) -> SearchBuilder;
@ -107,7 +107,7 @@ impl<'a> SearchBuilder<'a> {
self self
} }
pub fn search(self, reader: &heed::RoTxn<MainT>) -> Result<SearchResult, Error> { pub fn search(self, reader: &heed::RoTxn<MainT>) -> Result<SearchResult, ResponseError> {
let schema = self let schema = self
.index .index
.main .main
@ -260,7 +260,7 @@ impl<'a> SearchBuilder<'a> {
reader: &heed::RoTxn<MainT>, reader: &heed::RoTxn<MainT>,
ranked_map: &'a RankedMap, ranked_map: &'a RankedMap,
schema: &Schema, schema: &Schema,
) -> Result<Option<Criteria<'a>>, Error> { ) -> Result<Option<Criteria<'a>>, ResponseError> {
let ranking_rules = self.index.main.ranking_rules(reader)?; let ranking_rules = self.index.main.ranking_rules(reader)?;
if let Some(ranking_rules) = ranking_rules { if let Some(ranking_rules) = ranking_rules {

View File

@ -7,14 +7,17 @@ pub mod models;
pub mod option; pub mod option;
pub mod routes; pub mod routes;
pub use self::data::Data;
use self::error::json_error_handler;
use actix_http::Error; use actix_http::Error;
use actix_service::ServiceFactory; use actix_service::ServiceFactory;
use actix_web::{dev, web, App}; use actix_web::{dev, web, App};
use chrono::Utc;
use log::error; use log::error;
use meilisearch_core::ProcessedUpdateResult; use meilisearch_core::ProcessedUpdateResult;
pub use self::data::Data;
use self::error::{json_error_handler, ResponseError};
pub fn create_app( pub fn create_app(
data: &Data, data: &Data,
) -> App< ) -> App<
@ -55,28 +58,23 @@ pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpda
if let Some(index) = data.db.open_index(&index_uid) { if let Some(index) = data.db.open_index(&index_uid) {
let db = &data.db; let db = &data.db;
let mut writer = match db.main_write_txn() { let res = db.main_write::<_, _, ResponseError>(|mut writer| {
Ok(writer) => writer,
Err(e) => {
error!("Impossible to get write_txn; {}", e);
return;
}
};
if let Err(e) = data.compute_stats(&mut writer, &index_uid) { if let Err(e) = data.compute_stats(&mut writer, &index_uid) {
error!("Impossible to compute stats; {}", e) error!("Impossible to compute stats; {}", e)
} }
if let Err(e) = data.set_last_update(&mut writer) { if let Err(e) = data.db.set_last_update(&mut writer, &Utc::now()) {
error!("Impossible to update last_update; {}", e) error!("Impossible to update last_update; {}", e)
} }
if let Err(e) = index.main.put_updated_at(&mut writer) { if let Err(e) = index.main.put_updated_at(&mut writer) {
error!("Impossible to update updated_at; {}", e) error!("Impossible to update updated_at; {}", e)
} }
Ok(())
if let Err(e) = writer.commit() { });
error!("Impossible to get write_txn; {}", e); match res {
Ok(_) => (),
Err(e) => error!("{}", e),
} }
} }
} }

View File

@ -7,7 +7,7 @@ use meilisearch_core::update;
use serde::Deserialize; use serde::Deserialize;
use serde_json::Value; use serde_json::Value;
use crate::error::Error; use crate::error::{Error, ResponseError};
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data; use crate::Data;
@ -37,7 +37,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
async fn get_document( async fn get_document(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<DocumentParam>, path: web::Path<DocumentParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -46,13 +46,9 @@ async fn get_document(
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let internal_id = index.main let internal_id = index.main
.external_to_internal_docid(&reader, &path.document_id)? .external_to_internal_docid(&reader, &path.document_id)?
.ok_or(ResponseError::document_not_found(&path.document_id))?;
let response: Document = index
.document(&reader, None, document_id)?
.ok_or(Error::document_not_found(&path.document_id))?; .ok_or(Error::document_not_found(&path.document_id))?;
Ok(HttpResponse::Ok().json(response)) Ok(HttpResponse::Ok().json(document))
} }
#[delete( #[delete(
@ -62,7 +58,7 @@ async fn get_document(
async fn delete_document( async fn delete_document(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<DocumentParam>, path: web::Path<DocumentParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -70,14 +66,10 @@ async fn delete_document(
let document_id = meilisearch_core::serde::compute_document_id(&path.document_id); let document_id = meilisearch_core::serde::compute_document_id(&path.document_id);
let mut update_writer = data.db.update_write_txn()?;
let mut documents_deletion = index.documents_deletion(); let mut documents_deletion = index.documents_deletion();
documents_deletion.delete_document_by_external_docid(path.document_id.clone()); documents_deletion.delete_document_by_external_docid(path.document_id.clone());
let update_id = documents_deletion.finalize(&mut update_writer)?; let update_id = data.db.update_write::<_, _, ResponseError>(|writer| Ok(documents_deletion.finalize(writer)?))?;
update_writer.commit()?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -95,7 +87,7 @@ async fn get_all_documents(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
params: web::Query<BrowseQuery>, params: web::Query<BrowseQuery>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -105,7 +97,6 @@ async fn get_all_documents(
let limit = params.limit.unwrap_or(20); let limit = params.limit.unwrap_or(20);
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let documents_ids: Result<BTreeSet<_>, _> = index let documents_ids: Result<BTreeSet<_>, _> = index
.documents_fields_counts .documents_fields_counts
.documents_ids(&reader)? .documents_ids(&reader)?
@ -113,23 +104,21 @@ async fn get_all_documents(
.take(limit) .take(limit)
.collect(); .collect();
let documents_ids = documents_ids?;
let attributes: Option<HashSet<&str>> = params let attributes: Option<HashSet<&str>> = params
.attributes_to_retrieve .attributes_to_retrieve
.as_ref() .as_ref()
.map(|a| a.split(',').collect()); .map(|a| a.split(',').collect());
let mut response = Vec::new(); let mut documents = Vec::new();
for document_id in documents_ids { for document_id in documents_ids? {
if let Ok(Some(document)) = if let Ok(Some(document)) =
index.document::<Document>(&reader, attributes.as_ref(), document_id) index.document::<Document>(&reader, attributes.as_ref(), document_id)
{ {
response.push(document); documents.push(document);
} }
} }
Ok(HttpResponse::Ok().json(response)) Ok(HttpResponse::Ok().json(documents))
} }
fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> { fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {
@ -153,7 +142,7 @@ async fn update_multiple_documents(
params: web::Query<UpdateDocumentsQuery>, params: web::Query<UpdateDocumentsQuery>,
body: web::Json<Vec<Document>>, body: web::Json<Vec<Document>>,
is_partial: bool, is_partial: bool,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -175,13 +164,14 @@ async fn update_multiple_documents(
.ok_or(Error::bad_request("Could not infer a primary key"))?, .ok_or(Error::bad_request("Could not infer a primary key"))?,
}; };
let mut writer = data.db.main_write_txn()?;
schema schema
.set_primary_key(&id) .set_primary_key(&id)
.map_err(Error::bad_request)?; .map_err(Error::bad_request)?;
data.db.main_write::<_, _, ResponseError>(|mut writer| {
index.main.put_schema(&mut writer, &schema)?; index.main.put_schema(&mut writer, &schema)?;
writer.commit()?; Ok(())
})?;
} }
let mut document_addition = if is_partial { let mut document_addition = if is_partial {
@ -194,9 +184,12 @@ async fn update_multiple_documents(
document_addition.update_document(document); document_addition.update_document(document);
} }
let mut update_writer = data.db.update_write_txn()?; let update_id = data
let update_id = document_addition.finalize(&mut update_writer)?; .db
update_writer.commit()?; .update_write::<_, _, ResponseError>(move |writer| {
let update_id = document_addition.finalize(writer)?;
Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -207,7 +200,7 @@ async fn add_documents(
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
params: web::Query<UpdateDocumentsQuery>, params: web::Query<UpdateDocumentsQuery>,
body: web::Json<Vec<Document>>, body: web::Json<Vec<Document>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
update_multiple_documents(data, path, params, body, false).await update_multiple_documents(data, path, params, body, false).await
} }
@ -217,7 +210,7 @@ async fn update_documents(
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
params: web::Query<UpdateDocumentsQuery>, params: web::Query<UpdateDocumentsQuery>,
body: web::Json<Vec<Document>>, body: web::Json<Vec<Document>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
update_multiple_documents(data, path, params, body, true).await update_multiple_documents(data, path, params, body, true).await
} }
@ -229,13 +222,12 @@ async fn delete_documents(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<Vec<Value>>, body: web::Json<Vec<Value>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?;
let mut documents_deletion = index.documents_deletion(); let mut documents_deletion = index.documents_deletion();
@ -244,9 +236,7 @@ async fn delete_documents(
documents_deletion.delete_document_by_external_docid(document_id); documents_deletion.delete_document_by_external_docid(document_id);
} }
let update_id = documents_deletion.finalize(&mut writer)?; let update_id = data.db.update_write::<_, _, ResponseError>(|writer| Ok(documents_deletion.finalize(writer)?))?;
writer.commit()?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -255,17 +245,13 @@ async fn delete_documents(
async fn clear_all_documents( async fn clear_all_documents(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?; let update_id = data.db.update_write::<_, _, ResponseError>(|writer| Ok(index.clear_all(writer)?))?;
let update_id = index.clear_all(&mut writer)?;
writer.commit()?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }

View File

@ -1,46 +1,37 @@
use actix_web::{web, HttpResponse}; use actix_web::{web, HttpResponse};
use actix_web_macros::{get, put}; use actix_web_macros::{get, put};
use heed::types::{Str, Unit};
use serde::Deserialize; use serde::Deserialize;
use crate::error::Error; use crate::error::{Error, ResponseError};
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::Data; use crate::Data;
const UNHEALTHY_KEY: &str = "_is_unhealthy";
pub fn services(cfg: &mut web::ServiceConfig) { pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_health).service(change_healthyness); cfg.service(get_health).service(change_healthyness);
} }
#[get("/health", wrap = "Authentication::Private")] #[get("/health", wrap = "Authentication::Private")]
async fn get_health(data: web::Data<Data>) -> Result<HttpResponse, Error> { async fn get_health(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
if let Ok(Some(_)) = data.db.get_health(&reader) {
let common_store = data.db.common_store(); return Err(Error::Maintenance.into());
if let Ok(Some(_)) = common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY) {
return Err(Error::Maintenance);
} }
Ok(HttpResponse::Ok().finish()) Ok(HttpResponse::Ok().finish())
} }
async fn set_healthy(data: web::Data<Data>) -> Result<HttpResponse, Error> { async fn set_healthy(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let mut writer = data.db.main_write_txn()?; data.db.main_write::<_, _, ResponseError>(|writer| {
let common_store = data.db.common_store(); data.db.set_healthy(writer)?;
common_store.delete::<_, Str>(&mut writer, UNHEALTHY_KEY)?; Ok(())
writer.commit()?; })?;
Ok(HttpResponse::Ok().finish()) Ok(HttpResponse::Ok().finish())
} }
async fn set_unhealthy(data: web::Data<Data>) -> Result<HttpResponse, Error> { async fn set_unhealthy(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let mut writer = data.db.main_write_txn()?; data.db.main_write::<_, _, ResponseError>(|writer| {
let common_store = data.db.common_store(); data.db.set_unhealthy(writer)?;
common_store.put::<_, Str, Unit>(&mut writer, UNHEALTHY_KEY, &())?; Ok(())
writer.commit()?; })?;
Ok(HttpResponse::Ok().finish()) Ok(HttpResponse::Ok().finish())
} }
@ -52,8 +43,8 @@ struct HealthBody {
#[put("/health", wrap = "Authentication::Private")] #[put("/health", wrap = "Authentication::Private")]
async fn change_healthyness( async fn change_healthyness(
data: web::Data<Data>, data: web::Data<Data>,
body: web::Json<HealthBody>, body: web::Json<HealtBody>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
if body.health { if body.health {
set_healthy(data).await set_healthy(data).await
} else { } else {

View File

@ -5,7 +5,7 @@ use log::error;
use rand::seq::SliceRandom; use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::error::Error; use crate::error::{Error, ResponseError};
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::IndexParam; use crate::routes::IndexParam;
use crate::Data; use crate::Data;
@ -40,10 +40,9 @@ struct IndexResponse {
} }
#[get("/indexes", wrap = "Authentication::Private")] #[get("/indexes", wrap = "Authentication::Private")]
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, Error> { async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let mut indexes = Vec::new();
let mut response = Vec::new();
for index_uid in data.db.indexes_uids() { for index_uid in data.db.indexes_uids() {
let index = data.db.open_index(&index_uid); let index = data.db.open_index(&index_uid);
@ -81,7 +80,7 @@ async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, Error> {
updated_at, updated_at,
primary_key, primary_key,
}; };
response.push(index_response); indexes.push(index_response);
} }
None => error!( None => error!(
"Index {} is referenced in the indexes list but cannot be found", "Index {} is referenced in the indexes list but cannot be found",
@ -90,21 +89,20 @@ async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, Error> {
} }
} }
Ok(HttpResponse::Ok().json(response)) Ok(HttpResponse::Ok().json(indexes))
} }
#[get("/indexes/{index_uid}", wrap = "Authentication::Private")] #[get("/indexes/{index_uid}", wrap = "Authentication::Private")]
async fn get_index( async fn get_index(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let name = index.main.name(&reader)?.ok_or(Error::internal( let name = index.main.name(&reader)?.ok_or(Error::internal(
"Impossible to get the name of an index", "Impossible to get the name of an index",
))?; ))?;
@ -128,14 +126,15 @@ async fn get_index(
}, },
_ => None, _ => None,
}; };
let index_response = IndexResponse {
Ok(HttpResponse::Ok().json(IndexResponse {
name, name,
uid: path.index_uid.clone(), uid: path.index_uid.clone(),
created_at, created_at,
updated_at, updated_at,
primary_key, primary_key,
})) };
Ok(HttpResponse::Ok().json(index_response))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -150,11 +149,11 @@ struct IndexCreateRequest {
async fn create_index( async fn create_index(
data: web::Data<Data>, data: web::Data<Data>,
body: web::Json<IndexCreateRequest>, body: web::Json<IndexCreateRequest>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
if let (None, None) = (body.name.clone(), body.uid.clone()) { if let (None, None) = (body.name.clone(), body.uid.clone()) {
return Err(Error::bad_request( return Err(Error::bad_request(
"Index creation must have an uid", "Index creation must have an uid",
)); ).into());
} }
let uid = match &body.uid { let uid = match &body.uid {
@ -165,7 +164,7 @@ async fn create_index(
{ {
uid.to_owned() uid.to_owned()
} else { } else {
return Err(Error::InvalidIndexUid); return Err(Error::InvalidIndexUid.into());
} }
} }
None => loop { None => loop {
@ -181,8 +180,7 @@ async fn create_index(
.create_index(&uid) .create_index(&uid)
.map_err(Error::create_index)?; .map_err(Error::create_index)?;
let mut writer = data.db.main_write_txn()?; let index_response = data.db.main_write::<_, _, ResponseError>(|mut writer| {
let name = body.name.as_ref().unwrap_or(&uid); let name = body.name.as_ref().unwrap_or(&uid);
created_index.main.put_name(&mut writer, name)?; created_index.main.put_name(&mut writer, name)?;
@ -204,16 +202,17 @@ async fn create_index(
created_index.main.put_schema(&mut writer, &schema)?; created_index.main.put_schema(&mut writer, &schema)?;
} }
} }
let index_response = IndexResponse {
writer.commit()?;
Ok(HttpResponse::Created().json(IndexResponse {
name: name.to_string(), name: name.to_string(),
uid, uid,
created_at, created_at,
updated_at, updated_at,
primary_key: body.primary_key.clone(), primary_key: body.primary_key.clone(),
})) };
Ok(index_response)
})?;
Ok(HttpResponse::Created().json(index_response))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -238,14 +237,13 @@ async fn update_index(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<IndexCreateRequest>, body: web::Json<IndexCreateRequest>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let mut writer = data.db.main_write_txn()?; data.db.main_write::<_, _, ResponseError>(|mut writer| {
if let Some(name) = &body.name { if let Some(name) = &body.name {
index.main.put_name(&mut writer, name)?; index.main.put_name(&mut writer, name)?;
} }
@ -256,7 +254,7 @@ async fn update_index(
Some(_) => { Some(_) => {
return Err(Error::bad_request( return Err(Error::bad_request(
"The primary key cannot be updated", "The primary key cannot be updated",
)); ).into());
} }
None => { None => {
schema.set_primary_key(&id)?; schema.set_primary_key(&id)?;
@ -265,12 +263,11 @@ async fn update_index(
} }
} }
} }
index.main.put_updated_at(&mut writer)?; index.main.put_updated_at(&mut writer)?;
writer.commit()?; Ok(())
})?;
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let name = index.main.name(&reader)?.ok_or(Error::internal( let name = index.main.name(&reader)?.ok_or(Error::internal(
"Impossible to get the name of an index", "Impossible to get the name of an index",
))?; ))?;
@ -295,20 +292,22 @@ async fn update_index(
_ => None, _ => None,
}; };
Ok(HttpResponse::Ok().json(IndexResponse { let index_response = IndexResponse {
name, name,
uid: path.index_uid.clone(), uid: path.index_uid.clone(),
created_at, created_at,
updated_at, updated_at,
primary_key, primary_key,
})) };
Ok(HttpResponse::Ok().json(index_response))
} }
#[delete("/indexes/{index_uid}", wrap = "Authentication::Private")] #[delete("/indexes/{index_uid}", wrap = "Authentication::Private")]
async fn delete_index( async fn delete_index(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
data.db.delete_index(&path.index_uid)?; data.db.delete_index(&path.index_uid)?;
Ok(HttpResponse::NoContent().finish()) Ok(HttpResponse::NoContent().finish())
@ -327,7 +326,7 @@ struct UpdateParam {
async fn get_update_status( async fn get_update_status(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<UpdateParam>, path: web::Path<UpdateParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -342,7 +341,7 @@ async fn get_update_status(
None => Err(Error::NotFound(format!( None => Err(Error::NotFound(format!(
"Update {} not found", "Update {} not found",
path.update_id path.update_id
))), )).into()),
} }
} }
@ -350,7 +349,7 @@ async fn get_update_status(
async fn get_all_updates_status( async fn get_all_updates_status(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)

View File

@ -7,7 +7,7 @@ use actix_web_macros::get;
use serde::Deserialize; use serde::Deserialize;
use serde_json::Value; use serde_json::Value;
use crate::error::{Error, FacetCountError}; use crate::error::{Error, FacetCountError, ResponseError};
use crate::helpers::meilisearch::IndexSearchExt; use crate::helpers::meilisearch::IndexSearchExt;
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::IndexParam; use crate::routes::IndexParam;
@ -41,14 +41,13 @@ async fn search_with_url_query(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
params: web::Query<SearchQuery>, params: web::Query<SearchQuery>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let schema = index let schema = index
.main .main
.schema(&reader)? .schema(&reader)?
@ -88,9 +87,9 @@ async fn search_with_url_query(
} }
if let Some(ref facet_filters) = params.facet_filters { if let Some(ref facet_filters) = params.facet_filters {
match index.main.attributes_for_faceting(&reader)? { let attrs = index.main.attributes_for_faceting(&reader)?;
Some(ref attrs) => { search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, attrs)?); }, if let Some(attrs) = attrs {
None => return Err(Error::FacetExpression("can't filter on facets, as no facet is set".to_string())) search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, &attrs)?);
} }
} }
@ -100,7 +99,7 @@ async fn search_with_url_query(
let field_ids = prepare_facet_list(&facets, &schema, attrs)?; let field_ids = prepare_facet_list(&facets, &schema, attrs)?;
search_builder.add_facets(field_ids); search_builder.add_facets(field_ids);
}, },
None => return Err(FacetCountError::NoFacetSet.into()) None => todo!() /* return Err(FacetCountError::NoFacetSet.into()) */
} }
} }
@ -160,8 +159,9 @@ async fn search_with_url_query(
search_builder.get_matches(); search_builder.get_matches();
} }
} }
let search_result = search_builder.search(&reader)?;
Ok(HttpResponse::Ok().json(search_builder.search(&reader)?)) Ok(HttpResponse::Ok().json(search_result))
} }
/// Parses the incoming string into an array of attributes for which to return a count. It returns /// Parses the incoming string into an array of attributes for which to return a count. It returns

View File

@ -3,7 +3,7 @@ use actix_web_macros::{delete, get, post};
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES}; use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES};
use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::collections::{BTreeMap, BTreeSet, HashSet};
use crate::error::Error; use crate::error::{Error, ResponseError};
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data; use crate::Data;
@ -33,19 +33,20 @@ async fn update_all(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<Settings>, body: web::Json<Settings>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?; let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let settings = body let settings = body
.into_inner() .into_inner()
.into_update() .into_update()
.map_err(Error::bad_request)?; .map_err(Error::bad_request)?;
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -54,7 +55,7 @@ async fn update_all(
async fn get_all( async fn get_all(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -62,20 +63,22 @@ async fn get_all(
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let stop_words_fst = index.main.stop_words_fst(&reader)?; let stop_words: BTreeSet<String> = index
let stop_words = stop_words_fst.stream().into_strs()?; .main
let stop_words: BTreeSet<String> = stop_words.into_iter().collect(); .stop_words_list(&reader)?
.into_iter()
.collect();
let synonyms_fst = index.main.synonyms_fst(&reader)?; let synonyms_list = index.main.synonyms_list(&reader)?;
let synonyms_list = synonyms_fst.stream().into_strs()?;
let mut synonyms = BTreeMap::new(); let mut synonyms = BTreeMap::new();
let index_synonyms = &index.synonyms; let index_synonyms = &index.synonyms;
for synonym in synonyms_list { for synonym in synonyms_list {
let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?; let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
let list = alternative_list.stream().into_strs()?; if let Some(list) = alternative_list {
synonyms.insert(synonym, list); synonyms.insert(synonym, list);
} }
}
let ranking_rules = index let ranking_rules = index
.main .main
@ -134,12 +137,11 @@ async fn get_all(
async fn delete_all( async fn delete_all(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?;
let settings = SettingsUpdate { let settings = SettingsUpdate {
ranking_rules: UpdateState::Clear, ranking_rules: UpdateState::Clear,
@ -153,8 +155,10 @@ async fn delete_all(
attributes_for_faceting: UpdateState::Clear, attributes_for_faceting: UpdateState::Clear,
}; };
let update_id = index.settings_update(&mut writer, settings)?; let update_id = data.db.update_write::<_, _, ResponseError>(|writer| {
writer.commit()?; let update_id = index.settings_update(writer, settings)?;
Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -166,7 +170,7 @@ async fn delete_all(
async fn get_rules( async fn get_rules(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -192,7 +196,7 @@ async fn update_rules(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<Option<Vec<String>>>, body: web::Json<Option<Vec<String>>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -203,10 +207,12 @@ async fn update_rules(
..Settings::default() ..Settings::default()
}; };
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(Error::bad_request)?; let settings = settings.into_update().map_err(Error::bad_request)?;
let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -218,21 +224,21 @@ async fn update_rules(
async fn delete_rules( async fn delete_rules(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?;
let settings = SettingsUpdate { let settings = SettingsUpdate {
ranking_rules: UpdateState::Clear, ranking_rules: UpdateState::Clear,
..SettingsUpdate::default() ..SettingsUpdate::default()
}; };
let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
Ok(update_id)
writer.commit()?; })?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -244,7 +250,7 @@ async fn delete_rules(
async fn get_distinct( async fn get_distinct(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -263,7 +269,7 @@ async fn update_distinct(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<Option<String>>, body: web::Json<Option<String>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -274,10 +280,12 @@ async fn update_distinct(
..Settings::default() ..Settings::default()
}; };
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(Error::bad_request)?; let settings = settings.into_update().map_err(Error::bad_request)?;
let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -289,21 +297,21 @@ async fn update_distinct(
async fn delete_distinct( async fn delete_distinct(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?;
let settings = SettingsUpdate { let settings = SettingsUpdate {
distinct_attribute: UpdateState::Clear, distinct_attribute: UpdateState::Clear,
..SettingsUpdate::default() ..SettingsUpdate::default()
}; };
let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
Ok(update_id)
writer.commit()?; })?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -315,7 +323,7 @@ async fn delete_distinct(
async fn get_searchable( async fn get_searchable(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -336,7 +344,7 @@ async fn update_searchable(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<Option<Vec<String>>>, body: web::Json<Option<Vec<String>>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -347,10 +355,12 @@ async fn update_searchable(
..Settings::default() ..Settings::default()
}; };
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(Error::bad_request)?; let settings = settings.into_update().map_err(Error::bad_request)?;
let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -362,7 +372,7 @@ async fn update_searchable(
async fn delete_searchable( async fn delete_searchable(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -373,9 +383,10 @@ async fn delete_searchable(
..SettingsUpdate::default() ..SettingsUpdate::default()
}; };
let mut writer = data.db.update_write_txn()?; let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -387,7 +398,7 @@ async fn delete_searchable(
async fn get_displayed( async fn get_displayed(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -410,7 +421,7 @@ async fn update_displayed(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<Option<HashSet<String>>>, body: web::Json<Option<HashSet<String>>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -421,10 +432,12 @@ async fn update_displayed(
..Settings::default() ..Settings::default()
}; };
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(Error::bad_request)?; let settings = settings.into_update().map_err(Error::bad_request)?;
let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -436,7 +449,7 @@ async fn update_displayed(
async fn delete_displayed( async fn delete_displayed(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -447,9 +460,10 @@ async fn delete_displayed(
..SettingsUpdate::default() ..SettingsUpdate::default()
}; };
let mut writer = data.db.update_write_txn()?; let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -461,7 +475,7 @@ async fn delete_displayed(
async fn get_accept_new_fields( async fn get_accept_new_fields(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -483,7 +497,7 @@ async fn update_accept_new_fields(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<Option<bool>>, body: web::Json<Option<bool>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -494,10 +508,12 @@ async fn update_accept_new_fields(
..Settings::default() ..Settings::default()
}; };
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(Error::bad_request)?; let settings = settings.into_update().map_err(Error::bad_request)?;
let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }

View File

@ -10,7 +10,7 @@ use serde::Serialize;
use sysinfo::{NetworkExt, ProcessExt, ProcessorExt, System, SystemExt}; use sysinfo::{NetworkExt, ProcessExt, ProcessorExt, System, SystemExt};
use walkdir::WalkDir; use walkdir::WalkDir;
use crate::error::Error; use crate::error::{Error, ResponseError};
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::IndexParam; use crate::routes::IndexParam;
use crate::Data; use crate::Data;
@ -35,7 +35,7 @@ struct IndexStatsResponse {
async fn index_stats( async fn index_stats(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -71,7 +71,7 @@ struct StatsResult {
} }
#[get("/stats", wrap = "Authentication::Private")] #[get("/stats", wrap = "Authentication::Private")]
async fn get_stats(data: web::Data<Data>) -> Result<HttpResponse, Error> { async fn get_stats(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let mut index_list = HashMap::new(); let mut index_list = HashMap::new();
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
@ -111,7 +111,7 @@ async fn get_stats(data: web::Data<Data>) -> Result<HttpResponse, Error> {
.filter(|metadata| metadata.is_file()) .filter(|metadata| metadata.is_file())
.fold(0, |acc, m| acc + m.len()); .fold(0, |acc, m| acc + m.len());
let last_update = data.last_update(&reader)?; let last_update = data.db.last_update(&reader)?;
Ok(HttpResponse::Ok().json(StatsResult { Ok(HttpResponse::Ok().json(StatsResult {
database_size, database_size,

View File

@ -3,7 +3,7 @@ use actix_web_macros::{delete, get, post};
use meilisearch_core::settings::{SettingsUpdate, UpdateState}; use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use std::collections::BTreeSet; use std::collections::BTreeSet;
use crate::error::Error; use crate::error::{Error, ResponseError};
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data; use crate::Data;
@ -19,14 +19,13 @@ pub fn services(cfg: &mut web::ServiceConfig) {
async fn get( async fn get(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?; .ok_or(Error::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let stop_words_fst = index.main.stop_words_fst(&reader)?; let stop_words = index.main.stop_words_list(&reader)?;
let stop_words = stop_words_fst.stream().into_strs()?;
Ok(HttpResponse::Ok().json(stop_words)) Ok(HttpResponse::Ok().json(stop_words))
} }
@ -39,7 +38,7 @@ async fn update(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<BTreeSet<String>>, body: web::Json<BTreeSet<String>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -50,9 +49,10 @@ async fn update(
..SettingsUpdate::default() ..SettingsUpdate::default()
}; };
let mut writer = data.db.update_write_txn()?; let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -64,7 +64,7 @@ async fn update(
async fn delete( async fn delete(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -75,9 +75,10 @@ async fn delete(
..SettingsUpdate::default() ..SettingsUpdate::default()
}; };
let mut writer = data.db.update_write_txn()?; let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }

View File

@ -5,7 +5,7 @@ use actix_web_macros::{delete, get, post};
use indexmap::IndexMap; use indexmap::IndexMap;
use meilisearch_core::settings::{SettingsUpdate, UpdateState}; use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use crate::error::Error; use crate::error::{Error, ResponseError};
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse}; use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data; use crate::Data;
@ -21,7 +21,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
async fn get( async fn get(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -29,16 +29,16 @@ async fn get(
let reader = data.db.main_read_txn()?; let reader = data.db.main_read_txn()?;
let synonyms_fst = index.main.synonyms_fst(&reader)?; let synonyms_list = index.main.synonyms_list(&reader)?;
let synonyms_list = synonyms_fst.stream().into_strs()?;
let mut synonyms = IndexMap::new(); let mut synonyms = IndexMap::new();
let index_synonyms = &index.synonyms; let index_synonyms = &index.synonyms;
for synonym in synonyms_list { for synonym in synonyms_list {
let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?; let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
let list = alternative_list.stream().into_strs()?; if let Some(list) = alternative_list {
synonyms.insert(synonym, list); synonyms.insert(synonym, list);
} }
}
Ok(HttpResponse::Ok().json(synonyms)) Ok(HttpResponse::Ok().json(synonyms))
} }
@ -51,7 +51,7 @@ async fn update(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
body: web::Json<BTreeMap<String, Vec<String>>>, body: web::Json<BTreeMap<String, Vec<String>>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -62,9 +62,10 @@ async fn update(
..SettingsUpdate::default() ..SettingsUpdate::default()
}; };
let mut writer = data.db.update_write_txn()?; let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?; Ok(update_id)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }
@ -76,7 +77,7 @@ async fn update(
async fn delete( async fn delete(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ResponseError> {
let index = data let index = data
.db .db
.open_index(&path.index_uid) .open_index(&path.index_uid)
@ -87,10 +88,10 @@ async fn delete(
..SettingsUpdate::default() ..SettingsUpdate::default()
}; };
let mut writer = data.db.update_write_txn()?; let update_id = data.db.update_write::<_, _, ResponseError>(|mut writer| {
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;
Ok(update_id)
writer.commit()?; })?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id))) Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
} }

View File

@ -21,6 +21,7 @@ async fn delete_batch() {
server.populate_movies().await; server.populate_movies().await;
let (_response, status_code) = server.get_document(419704).await; let (_response, status_code) = server.get_document(419704).await;
println!("{:?}", _response);
assert_eq!(status_code, 200); assert_eq!(status_code, 200);
let body = serde_json::json!([419704, 512200, 181812]); let body = serde_json::json!([419704, 512200, 181812]);

View File

@ -8,6 +8,7 @@ edition = "2018"
[dependencies] [dependencies]
bincode = "1.2.1" bincode = "1.2.1"
indexmap = { version = "1.3.2", features = ["serde-1"] } indexmap = { version = "1.3.2", features = ["serde-1"] }
meilisearch-error = { path = "../meilisearch-error", version = "0.10.1" }
serde = { version = "1.0.105", features = ["derive"] } serde = { version = "1.0.105", features = ["derive"] }
serde_json = { version = "1.0.50", features = ["preserve_order"] } serde_json = { version = "1.0.50", features = ["preserve_order"] }
toml = { version = "0.5.6", features = ["preserve_order"] } toml = { version = "0.5.6", features = ["preserve_order"] }

View File

@ -1,6 +1,7 @@
use std::{error, fmt}; use std::{error, fmt};
use meilisearch_error::{ErrorCode, Code};
pub type SResult<T> = Result<T, Error>; pub type SResult<T> = Result<T, Error>;
#[derive(Debug)] #[derive(Debug)]
@ -22,3 +23,9 @@ impl fmt::Display for Error {
} }
impl error::Error for Error {} impl error::Error for Error {}
impl ErrorCode for Error {
fn error_code(&self) -> Code {
unimplemented!()
}
}