mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-26 06:44:27 +01:00
integrate milli errors
This commit is contained in:
parent
0dfd1b74c8
commit
abdf642d68
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -1708,7 +1708,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "milli"
|
||||
version = "0.4.0"
|
||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.4.0#3bd4cf94cc60733393b94021fca77eb100bfe17a"
|
||||
source = "git+https://github.com/meilisearch/milli.git?rev=70bee7d405711d5e6d24b62710e92671be5ac67a#70bee7d405711d5e6d24b62710e92671be5ac67a"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"byteorder",
|
||||
|
@ -51,7 +51,7 @@ main_error = "0.1.0"
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
meilisearch-tokenizer = { git = "https://github.com/meilisearch/Tokenizer.git", tag = "v0.2.2" }
|
||||
memmap = "0.7.0"
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.4.0" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", rev = "70bee7d405711d5e6d24b62710e92671be5ac67a" }
|
||||
mime = "0.3.16"
|
||||
once_cell = "1.5.2"
|
||||
oxidized-json-checker = "0.3.2"
|
||||
|
@ -7,6 +7,7 @@ use actix_web::body::Body;
|
||||
use actix_web::dev::BaseHttpResponseBuilder;
|
||||
use actix_web::http::StatusCode;
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use milli::UserError;
|
||||
use serde::ser::{Serialize, SerializeStruct, Serializer};
|
||||
|
||||
use crate::index_controller::error::IndexControllerError;
|
||||
@ -139,3 +140,44 @@ macro_rules! internal_error {
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MilliError<'a>(pub &'a milli::Error);
|
||||
|
||||
impl Error for MilliError<'_> {}
|
||||
|
||||
impl fmt::Display for MilliError<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl ErrorCode for MilliError<'_> {
|
||||
fn error_code(&self) -> Code {
|
||||
match self.0 {
|
||||
milli::Error::InternalError(_) => Code::Internal,
|
||||
milli::Error::IoError(_) => Code::Internal,
|
||||
milli::Error::UserError(ref error) => {
|
||||
match error {
|
||||
// TODO: wait for spec for new error codes.
|
||||
UserError::AttributeLimitReached
|
||||
| UserError::Csv(_)
|
||||
| UserError::SerdeJson(_)
|
||||
| UserError::MaxDatabaseSizeReached
|
||||
| UserError::InvalidCriterionName { .. }
|
||||
| UserError::InvalidDocumentId { .. }
|
||||
| UserError::InvalidStoreFile
|
||||
| UserError::NoSpaceLeftOnDevice
|
||||
| UserError::DocumentLimitReached => todo!(),
|
||||
UserError::InvalidFilter(_) => Code::Filter,
|
||||
UserError::InvalidFilterAttribute(_) => Code::Filter,
|
||||
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
|
||||
UserError::MissingPrimaryKey => Code::MissingPrimaryKey,
|
||||
UserError::PrimaryKeyCannotBeChanged => Code::PrimaryKeyAlreadyPresent,
|
||||
UserError::PrimaryKeyCannotBeReset => Code::PrimaryKeyAlreadyPresent,
|
||||
UserError::UnknownInternalDocumentId { .. } => Code::DocumentNotFound,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::option::IndexerOpts;
|
||||
|
||||
use super::error::{IndexError, Result};
|
||||
use super::error::Result;
|
||||
use super::{update_handler::UpdateHandler, Index, Settings, Unchecked};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@ -38,9 +38,7 @@ impl Index {
|
||||
let document_file_path = path.as_ref().join(DATA_FILE_NAME);
|
||||
let mut document_file = File::create(&document_file_path)?;
|
||||
|
||||
let documents = self
|
||||
.all_documents(txn)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
let documents = self.all_documents(txn)?;
|
||||
let fields_ids_map = self.fields_ids_map(txn)?;
|
||||
|
||||
// dump documents
|
||||
|
@ -3,6 +3,8 @@ use std::error::Error;
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::error::MilliError;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, IndexError>;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
@ -13,6 +15,8 @@ pub enum IndexError {
|
||||
DocumentNotFound(String),
|
||||
#[error("error with facet: {0}")]
|
||||
Facet(#[from] FacetError),
|
||||
#[error("{0}")]
|
||||
Milli(#[from] milli::Error),
|
||||
}
|
||||
|
||||
internal_error!(
|
||||
@ -29,6 +33,7 @@ impl ErrorCode for IndexError {
|
||||
IndexError::Internal(_) => Code::Internal,
|
||||
IndexError::DocumentNotFound(_) => Code::DocumentNotFound,
|
||||
IndexError::Facet(e) => e.error_code(),
|
||||
IndexError::Milli(e) => MilliError(e).error_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -51,8 +51,7 @@ impl Index {
|
||||
create_dir_all(&path)?;
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(size);
|
||||
let index =
|
||||
milli::Index::new(options, &path).map_err(|e| IndexError::Internal(e.into()))?;
|
||||
let index = milli::Index::new(options, &path)?;
|
||||
Ok(Index(Arc::new(index)))
|
||||
}
|
||||
|
||||
@ -70,11 +69,7 @@ impl Index {
|
||||
.searchable_fields(&txn)?
|
||||
.map(|fields| fields.into_iter().map(String::from).collect());
|
||||
|
||||
let faceted_attributes = self
|
||||
.faceted_fields(&txn)
|
||||
.map_err(|e| IndexError::Internal(Box::new(e)))?
|
||||
.into_iter()
|
||||
.collect();
|
||||
let faceted_attributes = self.faceted_fields(&txn)?.into_iter().collect();
|
||||
|
||||
let criteria = self
|
||||
.criteria(&txn)?
|
||||
@ -83,8 +78,7 @@ impl Index {
|
||||
.collect();
|
||||
|
||||
let stop_words = self
|
||||
.stop_words(&txn)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?
|
||||
.stop_words(&txn)?
|
||||
.map(|stop_words| -> Result<BTreeSet<_>> {
|
||||
Ok(stop_words.stream().into_strs()?.into_iter().collect())
|
||||
})
|
||||
@ -126,9 +120,8 @@ impl Index {
|
||||
let txn = self.read_txn()?;
|
||||
|
||||
let fields_ids_map = self.fields_ids_map(&txn)?;
|
||||
let fields_to_display = self
|
||||
.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
let fields_to_display =
|
||||
self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?;
|
||||
|
||||
let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit);
|
||||
|
||||
@ -136,8 +129,7 @@ impl Index {
|
||||
|
||||
for entry in iter {
|
||||
let (_id, obkv) = entry?;
|
||||
let object = obkv_to_json(&fields_to_display, &fields_ids_map, obkv)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
let object = obkv_to_json(&fields_to_display, &fields_ids_map, obkv)?;
|
||||
documents.push(object);
|
||||
}
|
||||
|
||||
@ -153,32 +145,25 @@ impl Index {
|
||||
|
||||
let fields_ids_map = self.fields_ids_map(&txn)?;
|
||||
|
||||
let fields_to_display = self
|
||||
.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
let fields_to_display =
|
||||
self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?;
|
||||
|
||||
let internal_id = self
|
||||
.external_documents_ids(&txn)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?
|
||||
.external_documents_ids(&txn)?
|
||||
.get(doc_id.as_bytes())
|
||||
.ok_or_else(|| IndexError::DocumentNotFound(doc_id.clone()))?;
|
||||
|
||||
let document = self
|
||||
.documents(&txn, std::iter::once(internal_id))
|
||||
.map_err(|e| IndexError::Internal(e.into()))?
|
||||
.documents(&txn, std::iter::once(internal_id))?
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|(_, d)| d);
|
||||
.map(|(_, d)| d)
|
||||
.ok_or(IndexError::DocumentNotFound(doc_id))?;
|
||||
|
||||
let document = obkv_to_json(&fields_to_display, &fields_ids_map, document)?;
|
||||
|
||||
match document {
|
||||
Some(document) => {
|
||||
let document = obkv_to_json(&fields_to_display, &fields_ids_map, document)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
Ok(document)
|
||||
}
|
||||
None => Err(IndexError::DocumentNotFound(doc_id)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn size(&self) -> u64 {
|
||||
self.env.size()
|
||||
@ -190,8 +175,7 @@ impl Index {
|
||||
attributes_to_retrieve: &Option<Vec<S>>,
|
||||
fields_ids_map: &milli::FieldsIdsMap,
|
||||
) -> Result<Vec<u8>> {
|
||||
let mut displayed_fields_ids = match self.displayed_fields_ids(&txn)
|
||||
.map_err(|e| IndexError::Internal(Box::new(e)))? {
|
||||
let mut displayed_fields_ids = match self.displayed_fields_ids(&txn)? {
|
||||
Some(ids) => ids.into_iter().collect::<Vec<_>>(),
|
||||
None => fields_ids_map.iter().map(|(id, _)| id).collect(),
|
||||
};
|
||||
|
@ -12,7 +12,7 @@ use serde_json::Value;
|
||||
|
||||
use crate::index::error::FacetError;
|
||||
|
||||
use super::error::{IndexError, Result};
|
||||
use super::error::Result;
|
||||
use super::Index;
|
||||
|
||||
pub type Document = IndexMap<String, Value>;
|
||||
@ -97,9 +97,8 @@ impl Index {
|
||||
matching_words,
|
||||
candidates,
|
||||
..
|
||||
} = search
|
||||
.execute()
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
} = search.execute()?;
|
||||
|
||||
let fields_ids_map = self.fields_ids_map(&rtxn).unwrap();
|
||||
|
||||
let displayed_ids = self
|
||||
@ -164,6 +163,8 @@ impl Index {
|
||||
|
||||
let mut documents = Vec::new();
|
||||
|
||||
let documents_iter = self.documents(&rtxn, documents_ids)?;
|
||||
|
||||
for (_id, obkv) in self.documents(&rtxn, documents_ids)? {
|
||||
let document = make_document(&to_retrieve_ids, &fields_ids_map, obkv)?;
|
||||
let formatted = format_fields(
|
||||
@ -191,8 +192,7 @@ impl Index {
|
||||
}
|
||||
let distribution = facet_distribution
|
||||
.candidates(candidates)
|
||||
.execute()
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
.execute()?;
|
||||
|
||||
Some(distribution)
|
||||
}
|
||||
@ -528,8 +528,7 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
||||
fn parse_filter(facets: &Value, index: &Index, txn: &RoTxn) -> Result<Option<FilterCondition>> {
|
||||
match facets {
|
||||
Value::String(expr) => {
|
||||
let condition = FilterCondition::from_str(txn, index, expr)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
let condition = FilterCondition::from_str(txn, index, expr)?;
|
||||
Ok(Some(condition))
|
||||
}
|
||||
Value::Array(arr) => parse_filter_array(txn, index, arr),
|
||||
@ -566,7 +565,7 @@ fn parse_filter_array(
|
||||
}
|
||||
}
|
||||
|
||||
FilterCondition::from_array(txn, &index.0, ands).map_err(|e| IndexError::Internal(Box::new(e)))
|
||||
Ok(FilterCondition::from_array(txn, &index.0, ands)?)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -8,7 +8,6 @@ use log::info;
|
||||
use milli::update::{IndexDocumentsMethod, UpdateBuilder, UpdateFormat};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
|
||||
use crate::index::error::IndexError;
|
||||
use crate::index_controller::UpdateResult;
|
||||
|
||||
use super::error::Result;
|
||||
@ -206,11 +205,9 @@ impl Index {
|
||||
|
||||
// Set the primary key if not set already, ignore if already set.
|
||||
if let (None, Some(primary_key)) = (self.primary_key(txn)?, primary_key) {
|
||||
let mut builder = UpdateBuilder::new(0)
|
||||
.settings(txn, &self);
|
||||
let mut builder = UpdateBuilder::new(0).settings(txn, &self);
|
||||
builder.set_primary_key(primary_key.to_string());
|
||||
builder.execute(|_, _| ())
|
||||
.map_err(|e| IndexError::Internal(Box::new(e)))?;
|
||||
builder.execute(|_, _| ())?;
|
||||
}
|
||||
|
||||
let mut builder = update_builder.index_documents(txn, self);
|
||||
@ -222,15 +219,9 @@ impl Index {
|
||||
|
||||
let gzipped = false;
|
||||
let addition = match content {
|
||||
Some(content) if gzipped => builder
|
||||
.execute(GzDecoder::new(content), indexing_callback)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?,
|
||||
Some(content) => builder
|
||||
.execute(content, indexing_callback)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?,
|
||||
None => builder
|
||||
.execute(std::io::empty(), indexing_callback)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?,
|
||||
Some(content) if gzipped => builder.execute(GzDecoder::new(content), indexing_callback)?,
|
||||
Some(content) => builder.execute(content, indexing_callback)?,
|
||||
None => builder.execute(std::io::empty(), indexing_callback)?,
|
||||
};
|
||||
|
||||
info!("document addition done: {:?}", addition);
|
||||
@ -243,13 +234,11 @@ impl Index {
|
||||
let mut wtxn = self.write_txn()?;
|
||||
let builder = update_builder.clear_documents(&mut wtxn, self);
|
||||
|
||||
match builder.execute() {
|
||||
Ok(_count) => wtxn
|
||||
.commit()
|
||||
let _count = builder.execute()?;
|
||||
|
||||
wtxn.commit()
|
||||
.and(Ok(UpdateResult::Other))
|
||||
.map_err(Into::into),
|
||||
Err(e) => Err(IndexError::Internal(Box::new(e))),
|
||||
}
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn update_settings_txn<'a, 'b>(
|
||||
@ -308,9 +297,7 @@ impl Index {
|
||||
}
|
||||
}
|
||||
|
||||
builder
|
||||
.execute(|indexing_step, update_id| info!("update {}: {:?}", update_id, indexing_step))
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
builder.execute(|indexing_step, update_id| info!("update {}: {:?}", update_id, indexing_step))?;
|
||||
|
||||
Ok(UpdateResult::Other)
|
||||
}
|
||||
@ -332,22 +319,17 @@ impl Index {
|
||||
update_builder: UpdateBuilder,
|
||||
) -> Result<UpdateResult> {
|
||||
let mut txn = self.write_txn()?;
|
||||
let mut builder = update_builder
|
||||
.delete_documents(&mut txn, self)
|
||||
.map_err(|e| IndexError::Internal(e.into()))?;
|
||||
let mut builder = update_builder.delete_documents(&mut txn, self)?;
|
||||
|
||||
// We ignore unexisting document ids
|
||||
document_ids.iter().for_each(|id| {
|
||||
builder.delete_external_id(id);
|
||||
});
|
||||
|
||||
match builder.execute() {
|
||||
Ok(deleted) => txn
|
||||
.commit()
|
||||
let deleted = builder.execute()?;
|
||||
txn.commit()
|
||||
.and(Ok(UpdateResult::DocumentDeletion { deleted }))
|
||||
.map_err(Into::into),
|
||||
Err(e) => Err(IndexError::Internal(Box::new(e))),
|
||||
}
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -268,9 +268,7 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
||||
}
|
||||
let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index);
|
||||
builder.set_primary_key(primary_key);
|
||||
builder
|
||||
.execute(|_, _| ())
|
||||
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
|
||||
builder.execute(|_, _| ())?;
|
||||
let meta = IndexMeta::new_txn(&index, &txn)?;
|
||||
txn.commit()?;
|
||||
Ok(meta)
|
||||
@ -340,13 +338,9 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
||||
|
||||
Ok(IndexStats {
|
||||
size: index.size(),
|
||||
number_of_documents: index
|
||||
.number_of_documents(&rtxn)
|
||||
.map_err(|e| IndexActorError::Internal(Box::new(e)))?,
|
||||
number_of_documents: index.number_of_documents(&rtxn)?,
|
||||
is_indexing: None,
|
||||
fields_distribution: index
|
||||
.fields_distribution(&rtxn)
|
||||
.map_err(|e| IndexActorError::Internal(e.into()))?,
|
||||
fields_distribution: index.fields_distribution(&rtxn)?,
|
||||
})
|
||||
})
|
||||
.await?
|
||||
|
@ -1,6 +1,6 @@
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
|
||||
use crate::index::error::IndexError;
|
||||
use crate::{error::MilliError, index::error::IndexError};
|
||||
|
||||
pub type Result<T> = std::result::Result<T, IndexActorError>;
|
||||
|
||||
@ -16,6 +16,8 @@ pub enum IndexActorError {
|
||||
ExistingPrimaryKey,
|
||||
#[error("Internal Index Error: {0}")]
|
||||
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||
#[error("{0}")]
|
||||
Milli(#[from] milli::Error),
|
||||
}
|
||||
|
||||
macro_rules! internal_error {
|
||||
@ -40,6 +42,7 @@ impl ErrorCode for IndexActorError {
|
||||
IndexActorError::UnexistingIndex => Code::IndexNotFound,
|
||||
IndexActorError::ExistingPrimaryKey => Code::PrimaryKeyAlreadyPresent,
|
||||
IndexActorError::Internal(_) => Code::Internal,
|
||||
IndexActorError::Milli(e) => MilliError(e).error_code(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,8 +17,6 @@ use crate::index::{Checked, Document, Index, SearchQuery, SearchResult, Settings
|
||||
use crate::index_controller::{Failed, IndexStats, Processed, Processing};
|
||||
use error::Result;
|
||||
|
||||
use self::error::IndexActorError;
|
||||
|
||||
use super::IndexSettings;
|
||||
|
||||
mod actor;
|
||||
@ -42,12 +40,8 @@ impl IndexMeta {
|
||||
}
|
||||
|
||||
fn new_txn(index: &Index, txn: &heed::RoTxn) -> Result<Self> {
|
||||
let created_at = index
|
||||
.created_at(&txn)
|
||||
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
|
||||
let updated_at = index
|
||||
.updated_at(&txn)
|
||||
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
|
||||
let created_at = index.created_at(&txn)?;
|
||||
let updated_at = index.updated_at(&txn)?;
|
||||
let primary_key = index.primary_key(&txn)?.map(String::from);
|
||||
Ok(Self {
|
||||
created_at,
|
||||
|
@ -61,8 +61,7 @@ impl IndexStore for MapIndexStore {
|
||||
|
||||
let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index);
|
||||
builder.set_primary_key(primary_key);
|
||||
builder.execute(|_, _| ())
|
||||
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
|
||||
builder.execute(|_, _| ())?;
|
||||
|
||||
txn.commit()?;
|
||||
}
|
||||
|
@ -39,8 +39,6 @@ async fn stats() {
|
||||
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 0);
|
||||
assert!(response["indexes"]["test"]["isIndexing"] == false);
|
||||
|
||||
let last_update = response["lastUpdate"].as_str().unwrap();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
|
Loading…
Reference in New Issue
Block a user