integrate milli errors

This commit is contained in:
marin postma 2021-06-17 14:36:32 +02:00
parent 0dfd1b74c8
commit abdf642d68
No known key found for this signature in database
GPG Key ID: 6088B7721C3E39F9
13 changed files with 101 additions and 103 deletions

2
Cargo.lock generated
View File

@ -1708,7 +1708,7 @@ dependencies = [
[[package]] [[package]]
name = "milli" name = "milli"
version = "0.4.0" version = "0.4.0"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.4.0#3bd4cf94cc60733393b94021fca77eb100bfe17a" source = "git+https://github.com/meilisearch/milli.git?rev=70bee7d405711d5e6d24b62710e92671be5ac67a#70bee7d405711d5e6d24b62710e92671be5ac67a"
dependencies = [ dependencies = [
"bstr", "bstr",
"byteorder", "byteorder",

View File

@ -51,7 +51,7 @@ main_error = "0.1.0"
meilisearch-error = { path = "../meilisearch-error" } meilisearch-error = { path = "../meilisearch-error" }
meilisearch-tokenizer = { git = "https://github.com/meilisearch/Tokenizer.git", tag = "v0.2.2" } meilisearch-tokenizer = { git = "https://github.com/meilisearch/Tokenizer.git", tag = "v0.2.2" }
memmap = "0.7.0" memmap = "0.7.0"
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.4.0" } milli = { git = "https://github.com/meilisearch/milli.git", rev = "70bee7d405711d5e6d24b62710e92671be5ac67a" }
mime = "0.3.16" mime = "0.3.16"
once_cell = "1.5.2" once_cell = "1.5.2"
oxidized-json-checker = "0.3.2" oxidized-json-checker = "0.3.2"

View File

@ -7,6 +7,7 @@ use actix_web::body::Body;
use actix_web::dev::BaseHttpResponseBuilder; use actix_web::dev::BaseHttpResponseBuilder;
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use meilisearch_error::{Code, ErrorCode}; use meilisearch_error::{Code, ErrorCode};
use milli::UserError;
use serde::ser::{Serialize, SerializeStruct, Serializer}; use serde::ser::{Serialize, SerializeStruct, Serializer};
use crate::index_controller::error::IndexControllerError; use crate::index_controller::error::IndexControllerError;
@ -139,3 +140,44 @@ macro_rules! internal_error {
)* )*
} }
} }
#[derive(Debug)]
pub struct MilliError<'a>(pub &'a milli::Error);
impl Error for MilliError<'_> {}
impl fmt::Display for MilliError<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl ErrorCode for MilliError<'_> {
fn error_code(&self) -> Code {
match self.0 {
milli::Error::InternalError(_) => Code::Internal,
milli::Error::IoError(_) => Code::Internal,
milli::Error::UserError(ref error) => {
match error {
// TODO: wait for spec for new error codes.
UserError::AttributeLimitReached
| UserError::Csv(_)
| UserError::SerdeJson(_)
| UserError::MaxDatabaseSizeReached
| UserError::InvalidCriterionName { .. }
| UserError::InvalidDocumentId { .. }
| UserError::InvalidStoreFile
| UserError::NoSpaceLeftOnDevice
| UserError::DocumentLimitReached => todo!(),
UserError::InvalidFilter(_) => Code::Filter,
UserError::InvalidFilterAttribute(_) => Code::Filter,
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
UserError::MissingPrimaryKey => Code::MissingPrimaryKey,
UserError::PrimaryKeyCannotBeChanged => Code::PrimaryKeyAlreadyPresent,
UserError::PrimaryKeyCannotBeReset => Code::PrimaryKeyAlreadyPresent,
UserError::UnknownInternalDocumentId { .. } => Code::DocumentNotFound,
}
},
}
}
}

View File

@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize};
use crate::option::IndexerOpts; use crate::option::IndexerOpts;
use super::error::{IndexError, Result}; use super::error::Result;
use super::{update_handler::UpdateHandler, Index, Settings, Unchecked}; use super::{update_handler::UpdateHandler, Index, Settings, Unchecked};
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -38,9 +38,7 @@ impl Index {
let document_file_path = path.as_ref().join(DATA_FILE_NAME); let document_file_path = path.as_ref().join(DATA_FILE_NAME);
let mut document_file = File::create(&document_file_path)?; let mut document_file = File::create(&document_file_path)?;
let documents = self let documents = self.all_documents(txn)?;
.all_documents(txn)
.map_err(|e| IndexError::Internal(e.into()))?;
let fields_ids_map = self.fields_ids_map(txn)?; let fields_ids_map = self.fields_ids_map(txn)?;
// dump documents // dump documents

View File

@ -3,6 +3,8 @@ use std::error::Error;
use meilisearch_error::{Code, ErrorCode}; use meilisearch_error::{Code, ErrorCode};
use serde_json::Value; use serde_json::Value;
use crate::error::MilliError;
pub type Result<T> = std::result::Result<T, IndexError>; pub type Result<T> = std::result::Result<T, IndexError>;
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
@ -13,6 +15,8 @@ pub enum IndexError {
DocumentNotFound(String), DocumentNotFound(String),
#[error("error with facet: {0}")] #[error("error with facet: {0}")]
Facet(#[from] FacetError), Facet(#[from] FacetError),
#[error("{0}")]
Milli(#[from] milli::Error),
} }
internal_error!( internal_error!(
@ -29,6 +33,7 @@ impl ErrorCode for IndexError {
IndexError::Internal(_) => Code::Internal, IndexError::Internal(_) => Code::Internal,
IndexError::DocumentNotFound(_) => Code::DocumentNotFound, IndexError::DocumentNotFound(_) => Code::DocumentNotFound,
IndexError::Facet(e) => e.error_code(), IndexError::Facet(e) => e.error_code(),
IndexError::Milli(e) => MilliError(e).error_code(),
} }
} }
} }

View File

@ -51,8 +51,7 @@ impl Index {
create_dir_all(&path)?; create_dir_all(&path)?;
let mut options = EnvOpenOptions::new(); let mut options = EnvOpenOptions::new();
options.map_size(size); options.map_size(size);
let index = let index = milli::Index::new(options, &path)?;
milli::Index::new(options, &path).map_err(|e| IndexError::Internal(e.into()))?;
Ok(Index(Arc::new(index))) Ok(Index(Arc::new(index)))
} }
@ -70,11 +69,7 @@ impl Index {
.searchable_fields(&txn)? .searchable_fields(&txn)?
.map(|fields| fields.into_iter().map(String::from).collect()); .map(|fields| fields.into_iter().map(String::from).collect());
let faceted_attributes = self let faceted_attributes = self.faceted_fields(&txn)?.into_iter().collect();
.faceted_fields(&txn)
.map_err(|e| IndexError::Internal(Box::new(e)))?
.into_iter()
.collect();
let criteria = self let criteria = self
.criteria(&txn)? .criteria(&txn)?
@ -83,8 +78,7 @@ impl Index {
.collect(); .collect();
let stop_words = self let stop_words = self
.stop_words(&txn) .stop_words(&txn)?
.map_err(|e| IndexError::Internal(e.into()))?
.map(|stop_words| -> Result<BTreeSet<_>> { .map(|stop_words| -> Result<BTreeSet<_>> {
Ok(stop_words.stream().into_strs()?.into_iter().collect()) Ok(stop_words.stream().into_strs()?.into_iter().collect())
}) })
@ -126,9 +120,8 @@ impl Index {
let txn = self.read_txn()?; let txn = self.read_txn()?;
let fields_ids_map = self.fields_ids_map(&txn)?; let fields_ids_map = self.fields_ids_map(&txn)?;
let fields_to_display = self let fields_to_display =
.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map) self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?;
.map_err(|e| IndexError::Internal(e.into()))?;
let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit); let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit);
@ -136,8 +129,7 @@ impl Index {
for entry in iter { for entry in iter {
let (_id, obkv) = entry?; let (_id, obkv) = entry?;
let object = obkv_to_json(&fields_to_display, &fields_ids_map, obkv) let object = obkv_to_json(&fields_to_display, &fields_ids_map, obkv)?;
.map_err(|e| IndexError::Internal(e.into()))?;
documents.push(object); documents.push(object);
} }
@ -153,31 +145,24 @@ impl Index {
let fields_ids_map = self.fields_ids_map(&txn)?; let fields_ids_map = self.fields_ids_map(&txn)?;
let fields_to_display = self let fields_to_display =
.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map) self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?;
.map_err(|e| IndexError::Internal(e.into()))?;
let internal_id = self let internal_id = self
.external_documents_ids(&txn) .external_documents_ids(&txn)?
.map_err(|e| IndexError::Internal(e.into()))?
.get(doc_id.as_bytes()) .get(doc_id.as_bytes())
.ok_or_else(|| IndexError::DocumentNotFound(doc_id.clone()))?; .ok_or_else(|| IndexError::DocumentNotFound(doc_id.clone()))?;
let document = self let document = self
.documents(&txn, std::iter::once(internal_id)) .documents(&txn, std::iter::once(internal_id))?
.map_err(|e| IndexError::Internal(e.into()))?
.into_iter() .into_iter()
.next() .next()
.map(|(_, d)| d); .map(|(_, d)| d)
.ok_or(IndexError::DocumentNotFound(doc_id))?;
match document { let document = obkv_to_json(&fields_to_display, &fields_ids_map, document)?;
Some(document) => {
let document = obkv_to_json(&fields_to_display, &fields_ids_map, document) Ok(document)
.map_err(|e| IndexError::Internal(e.into()))?;
Ok(document)
}
None => Err(IndexError::DocumentNotFound(doc_id)),
}
} }
pub fn size(&self) -> u64 { pub fn size(&self) -> u64 {
@ -190,8 +175,7 @@ impl Index {
attributes_to_retrieve: &Option<Vec<S>>, attributes_to_retrieve: &Option<Vec<S>>,
fields_ids_map: &milli::FieldsIdsMap, fields_ids_map: &milli::FieldsIdsMap,
) -> Result<Vec<u8>> { ) -> Result<Vec<u8>> {
let mut displayed_fields_ids = match self.displayed_fields_ids(&txn) let mut displayed_fields_ids = match self.displayed_fields_ids(&txn)? {
.map_err(|e| IndexError::Internal(Box::new(e)))? {
Some(ids) => ids.into_iter().collect::<Vec<_>>(), Some(ids) => ids.into_iter().collect::<Vec<_>>(),
None => fields_ids_map.iter().map(|(id, _)| id).collect(), None => fields_ids_map.iter().map(|(id, _)| id).collect(),
}; };

View File

@ -12,7 +12,7 @@ use serde_json::Value;
use crate::index::error::FacetError; use crate::index::error::FacetError;
use super::error::{IndexError, Result}; use super::error::Result;
use super::Index; use super::Index;
pub type Document = IndexMap<String, Value>; pub type Document = IndexMap<String, Value>;
@ -97,9 +97,8 @@ impl Index {
matching_words, matching_words,
candidates, candidates,
.. ..
} = search } = search.execute()?;
.execute()
.map_err(|e| IndexError::Internal(e.into()))?;
let fields_ids_map = self.fields_ids_map(&rtxn).unwrap(); let fields_ids_map = self.fields_ids_map(&rtxn).unwrap();
let displayed_ids = self let displayed_ids = self
@ -164,6 +163,8 @@ impl Index {
let mut documents = Vec::new(); let mut documents = Vec::new();
let documents_iter = self.documents(&rtxn, documents_ids)?;
for (_id, obkv) in self.documents(&rtxn, documents_ids)? { for (_id, obkv) in self.documents(&rtxn, documents_ids)? {
let document = make_document(&to_retrieve_ids, &fields_ids_map, obkv)?; let document = make_document(&to_retrieve_ids, &fields_ids_map, obkv)?;
let formatted = format_fields( let formatted = format_fields(
@ -191,8 +192,7 @@ impl Index {
} }
let distribution = facet_distribution let distribution = facet_distribution
.candidates(candidates) .candidates(candidates)
.execute() .execute()?;
.map_err(|e| IndexError::Internal(e.into()))?;
Some(distribution) Some(distribution)
} }
@ -528,8 +528,7 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
fn parse_filter(facets: &Value, index: &Index, txn: &RoTxn) -> Result<Option<FilterCondition>> { fn parse_filter(facets: &Value, index: &Index, txn: &RoTxn) -> Result<Option<FilterCondition>> {
match facets { match facets {
Value::String(expr) => { Value::String(expr) => {
let condition = FilterCondition::from_str(txn, index, expr) let condition = FilterCondition::from_str(txn, index, expr)?;
.map_err(|e| IndexError::Internal(e.into()))?;
Ok(Some(condition)) Ok(Some(condition))
} }
Value::Array(arr) => parse_filter_array(txn, index, arr), Value::Array(arr) => parse_filter_array(txn, index, arr),
@ -566,7 +565,7 @@ fn parse_filter_array(
} }
} }
FilterCondition::from_array(txn, &index.0, ands).map_err(|e| IndexError::Internal(Box::new(e))) Ok(FilterCondition::from_array(txn, &index.0, ands)?)
} }
#[cfg(test)] #[cfg(test)]

View File

@ -8,7 +8,6 @@ use log::info;
use milli::update::{IndexDocumentsMethod, UpdateBuilder, UpdateFormat}; use milli::update::{IndexDocumentsMethod, UpdateBuilder, UpdateFormat};
use serde::{Deserialize, Serialize, Serializer}; use serde::{Deserialize, Serialize, Serializer};
use crate::index::error::IndexError;
use crate::index_controller::UpdateResult; use crate::index_controller::UpdateResult;
use super::error::Result; use super::error::Result;
@ -206,11 +205,9 @@ impl Index {
// Set the primary key if not set already, ignore if already set. // Set the primary key if not set already, ignore if already set.
if let (None, Some(primary_key)) = (self.primary_key(txn)?, primary_key) { if let (None, Some(primary_key)) = (self.primary_key(txn)?, primary_key) {
let mut builder = UpdateBuilder::new(0) let mut builder = UpdateBuilder::new(0).settings(txn, &self);
.settings(txn, &self);
builder.set_primary_key(primary_key.to_string()); builder.set_primary_key(primary_key.to_string());
builder.execute(|_, _| ()) builder.execute(|_, _| ())?;
.map_err(|e| IndexError::Internal(Box::new(e)))?;
} }
let mut builder = update_builder.index_documents(txn, self); let mut builder = update_builder.index_documents(txn, self);
@ -222,15 +219,9 @@ impl Index {
let gzipped = false; let gzipped = false;
let addition = match content { let addition = match content {
Some(content) if gzipped => builder Some(content) if gzipped => builder.execute(GzDecoder::new(content), indexing_callback)?,
.execute(GzDecoder::new(content), indexing_callback) Some(content) => builder.execute(content, indexing_callback)?,
.map_err(|e| IndexError::Internal(e.into()))?, None => builder.execute(std::io::empty(), indexing_callback)?,
Some(content) => builder
.execute(content, indexing_callback)
.map_err(|e| IndexError::Internal(e.into()))?,
None => builder
.execute(std::io::empty(), indexing_callback)
.map_err(|e| IndexError::Internal(e.into()))?,
}; };
info!("document addition done: {:?}", addition); info!("document addition done: {:?}", addition);
@ -243,13 +234,11 @@ impl Index {
let mut wtxn = self.write_txn()?; let mut wtxn = self.write_txn()?;
let builder = update_builder.clear_documents(&mut wtxn, self); let builder = update_builder.clear_documents(&mut wtxn, self);
match builder.execute() { let _count = builder.execute()?;
Ok(_count) => wtxn
.commit() wtxn.commit()
.and(Ok(UpdateResult::Other)) .and(Ok(UpdateResult::Other))
.map_err(Into::into), .map_err(Into::into)
Err(e) => Err(IndexError::Internal(Box::new(e))),
}
} }
pub fn update_settings_txn<'a, 'b>( pub fn update_settings_txn<'a, 'b>(
@ -308,9 +297,7 @@ impl Index {
} }
} }
builder builder.execute(|indexing_step, update_id| info!("update {}: {:?}", update_id, indexing_step))?;
.execute(|indexing_step, update_id| info!("update {}: {:?}", update_id, indexing_step))
.map_err(|e| IndexError::Internal(e.into()))?;
Ok(UpdateResult::Other) Ok(UpdateResult::Other)
} }
@ -332,22 +319,17 @@ impl Index {
update_builder: UpdateBuilder, update_builder: UpdateBuilder,
) -> Result<UpdateResult> { ) -> Result<UpdateResult> {
let mut txn = self.write_txn()?; let mut txn = self.write_txn()?;
let mut builder = update_builder let mut builder = update_builder.delete_documents(&mut txn, self)?;
.delete_documents(&mut txn, self)
.map_err(|e| IndexError::Internal(e.into()))?;
// We ignore unexisting document ids // We ignore unexisting document ids
document_ids.iter().for_each(|id| { document_ids.iter().for_each(|id| {
builder.delete_external_id(id); builder.delete_external_id(id);
}); });
match builder.execute() { let deleted = builder.execute()?;
Ok(deleted) => txn txn.commit()
.commit() .and(Ok(UpdateResult::DocumentDeletion { deleted }))
.and(Ok(UpdateResult::DocumentDeletion { deleted })) .map_err(Into::into)
.map_err(Into::into),
Err(e) => Err(IndexError::Internal(Box::new(e))),
}
} }
} }

View File

@ -268,9 +268,7 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
} }
let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index); let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index);
builder.set_primary_key(primary_key); builder.set_primary_key(primary_key);
builder builder.execute(|_, _| ())?;
.execute(|_, _| ())
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
let meta = IndexMeta::new_txn(&index, &txn)?; let meta = IndexMeta::new_txn(&index, &txn)?;
txn.commit()?; txn.commit()?;
Ok(meta) Ok(meta)
@ -340,13 +338,9 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
Ok(IndexStats { Ok(IndexStats {
size: index.size(), size: index.size(),
number_of_documents: index number_of_documents: index.number_of_documents(&rtxn)?,
.number_of_documents(&rtxn)
.map_err(|e| IndexActorError::Internal(Box::new(e)))?,
is_indexing: None, is_indexing: None,
fields_distribution: index fields_distribution: index.fields_distribution(&rtxn)?,
.fields_distribution(&rtxn)
.map_err(|e| IndexActorError::Internal(e.into()))?,
}) })
}) })
.await? .await?

View File

@ -1,6 +1,6 @@
use meilisearch_error::{Code, ErrorCode}; use meilisearch_error::{Code, ErrorCode};
use crate::index::error::IndexError; use crate::{error::MilliError, index::error::IndexError};
pub type Result<T> = std::result::Result<T, IndexActorError>; pub type Result<T> = std::result::Result<T, IndexActorError>;
@ -16,6 +16,8 @@ pub enum IndexActorError {
ExistingPrimaryKey, ExistingPrimaryKey,
#[error("Internal Index Error: {0}")] #[error("Internal Index Error: {0}")]
Internal(Box<dyn std::error::Error + Send + Sync + 'static>), Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
#[error("{0}")]
Milli(#[from] milli::Error),
} }
macro_rules! internal_error { macro_rules! internal_error {
@ -40,6 +42,7 @@ impl ErrorCode for IndexActorError {
IndexActorError::UnexistingIndex => Code::IndexNotFound, IndexActorError::UnexistingIndex => Code::IndexNotFound,
IndexActorError::ExistingPrimaryKey => Code::PrimaryKeyAlreadyPresent, IndexActorError::ExistingPrimaryKey => Code::PrimaryKeyAlreadyPresent,
IndexActorError::Internal(_) => Code::Internal, IndexActorError::Internal(_) => Code::Internal,
IndexActorError::Milli(e) => MilliError(e).error_code(),
} }
} }
} }

View File

@ -17,8 +17,6 @@ use crate::index::{Checked, Document, Index, SearchQuery, SearchResult, Settings
use crate::index_controller::{Failed, IndexStats, Processed, Processing}; use crate::index_controller::{Failed, IndexStats, Processed, Processing};
use error::Result; use error::Result;
use self::error::IndexActorError;
use super::IndexSettings; use super::IndexSettings;
mod actor; mod actor;
@ -42,12 +40,8 @@ impl IndexMeta {
} }
fn new_txn(index: &Index, txn: &heed::RoTxn) -> Result<Self> { fn new_txn(index: &Index, txn: &heed::RoTxn) -> Result<Self> {
let created_at = index let created_at = index.created_at(&txn)?;
.created_at(&txn) let updated_at = index.updated_at(&txn)?;
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
let updated_at = index
.updated_at(&txn)
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
let primary_key = index.primary_key(&txn)?.map(String::from); let primary_key = index.primary_key(&txn)?.map(String::from);
Ok(Self { Ok(Self {
created_at, created_at,

View File

@ -61,8 +61,7 @@ impl IndexStore for MapIndexStore {
let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index); let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index);
builder.set_primary_key(primary_key); builder.set_primary_key(primary_key);
builder.execute(|_, _| ()) builder.execute(|_, _| ())?;
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
txn.commit()?; txn.commit()?;
} }

View File

@ -39,8 +39,6 @@ async fn stats() {
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 0); assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 0);
assert!(response["indexes"]["test"]["isIndexing"] == false); assert!(response["indexes"]["test"]["isIndexing"] == false);
let last_update = response["lastUpdate"].as_str().unwrap();
let documents = json!([ let documents = json!([
{ {
"id": 1, "id": 1,