210: Error handling r=MarinPostma a=MarinPostma

This pr implements the error handling for meilisearch.

Rather than grouping errors by types, this implementation groups them by scope, each scope enclosing errors from a scope further down, or new errors within this scope. This makes the tracking of the origins of errors easier , and error handling easier at the module level.

All errors that are eventually returned to the user implement the `Into<ResponseError>` trait. `ReponseError` in turn implements the `ErrorCode` trait from `meilisearch-error`.

Some new errors have been introduced with the new engine for which we haven't defined error codes yet. It has been decided with @gmourier that those would return the `internal-error` code until the correct error code is specified.


Co-authored-by: marin postma <postma.marin@protonmail.com>
This commit is contained in:
bors[bot] 2021-06-22 13:21:33 +00:00 committed by GitHub
commit 25af262e79
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
59 changed files with 849 additions and 1105 deletions

4
Cargo.lock generated
View File

@ -1707,8 +1707,8 @@ dependencies = [
[[package]] [[package]]
name = "milli" name = "milli"
version = "0.4.0" version = "0.4.1"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.4.0#3bd4cf94cc60733393b94021fca77eb100bfe17a" source = "git+https://github.com/meilisearch/milli.git?tag=v0.4.1#a67ccfdf3ac093b51bdf5ada3621fd6663897497"
dependencies = [ dependencies = [
"bstr", "bstr",
"byteorder", "byteorder",

View File

@ -51,7 +51,7 @@ main_error = "0.1.0"
meilisearch-error = { path = "../meilisearch-error" } meilisearch-error = { path = "../meilisearch-error" }
meilisearch-tokenizer = { git = "https://github.com/meilisearch/Tokenizer.git", tag = "v0.2.2" } meilisearch-tokenizer = { git = "https://github.com/meilisearch/Tokenizer.git", tag = "v0.2.2" }
memmap = "0.7.0" memmap = "0.7.0"
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.4.0" } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.4.1" }
mime = "0.3.16" mime = "0.3.16"
once_cell = "1.5.2" once_cell = "1.5.2"
oxidized-json-checker = "0.3.2" oxidized-json-checker = "0.3.2"

View File

@ -5,7 +5,7 @@ use sha2::Digest;
use crate::index::{Checked, Settings}; use crate::index::{Checked, Settings};
use crate::index_controller::{ use crate::index_controller::{
DumpInfo, IndexController, IndexMetadata, IndexSettings, IndexStats, Stats, error::Result, DumpInfo, IndexController, IndexMetadata, IndexSettings, IndexStats, Stats,
}; };
use crate::option::Opt; use crate::option::Opt;
@ -79,15 +79,15 @@ impl Data {
Ok(Data { inner }) Ok(Data { inner })
} }
pub async fn settings(&self, uid: String) -> anyhow::Result<Settings<Checked>> { pub async fn settings(&self, uid: String) -> Result<Settings<Checked>> {
self.index_controller.settings(uid).await self.index_controller.settings(uid).await
} }
pub async fn list_indexes(&self) -> anyhow::Result<Vec<IndexMetadata>> { pub async fn list_indexes(&self) -> Result<Vec<IndexMetadata>> {
self.index_controller.list_indexes().await self.index_controller.list_indexes().await
} }
pub async fn index(&self, uid: String) -> anyhow::Result<IndexMetadata> { pub async fn index(&self, uid: String) -> Result<IndexMetadata> {
self.index_controller.get_index(uid).await self.index_controller.get_index(uid).await
} }
@ -95,7 +95,7 @@ impl Data {
&self, &self,
uid: String, uid: String,
primary_key: Option<String>, primary_key: Option<String>,
) -> anyhow::Result<IndexMetadata> { ) -> Result<IndexMetadata> {
let settings = IndexSettings { let settings = IndexSettings {
uid: Some(uid), uid: Some(uid),
primary_key, primary_key,
@ -105,19 +105,19 @@ impl Data {
Ok(meta) Ok(meta)
} }
pub async fn get_index_stats(&self, uid: String) -> anyhow::Result<IndexStats> { pub async fn get_index_stats(&self, uid: String) -> Result<IndexStats> {
Ok(self.index_controller.get_index_stats(uid).await?) Ok(self.index_controller.get_index_stats(uid).await?)
} }
pub async fn get_all_stats(&self) -> anyhow::Result<Stats> { pub async fn get_all_stats(&self) -> Result<Stats> {
Ok(self.index_controller.get_all_stats().await?) Ok(self.index_controller.get_all_stats().await?)
} }
pub async fn create_dump(&self) -> anyhow::Result<DumpInfo> { pub async fn create_dump(&self) -> Result<DumpInfo> {
Ok(self.index_controller.create_dump().await?) Ok(self.index_controller.create_dump().await?)
} }
pub async fn dump_status(&self, uid: String) -> anyhow::Result<DumpInfo> { pub async fn dump_status(&self, uid: String) -> Result<DumpInfo> {
Ok(self.index_controller.dump_info(uid).await?) Ok(self.index_controller.dump_info(uid).await?)
} }

View File

@ -2,13 +2,10 @@ use serde_json::{Map, Value};
use super::Data; use super::Data;
use crate::index::{SearchQuery, SearchResult}; use crate::index::{SearchQuery, SearchResult};
use crate::index_controller::error::Result;
impl Data { impl Data {
pub async fn search( pub async fn search(&self, index: String, search_query: SearchQuery) -> Result<SearchResult> {
&self,
index: String,
search_query: SearchQuery,
) -> anyhow::Result<SearchResult> {
self.index_controller.search(index, search_query).await self.index_controller.search(index, search_query).await
} }
@ -18,7 +15,7 @@ impl Data {
offset: usize, offset: usize,
limit: usize, limit: usize,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> anyhow::Result<Vec<Map<String, Value>>> { ) -> Result<Vec<Map<String, Value>>> {
self.index_controller self.index_controller
.documents(index, offset, limit, attributes_to_retrieve) .documents(index, offset, limit, attributes_to_retrieve)
.await .await
@ -29,7 +26,7 @@ impl Data {
index: String, index: String,
document_id: String, document_id: String,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> anyhow::Result<Map<String, Value>> { ) -> Result<Map<String, Value>> {
self.index_controller self.index_controller
.document(index, document_id, attributes_to_retrieve) .document(index, document_id, attributes_to_retrieve)
.await .await

View File

@ -3,7 +3,7 @@ use milli::update::{IndexDocumentsMethod, UpdateFormat};
use super::Data; use super::Data;
use crate::index::{Checked, Settings}; use crate::index::{Checked, Settings};
use crate::index_controller::{IndexMetadata, IndexSettings, UpdateStatus}; use crate::index_controller::{error::Result, IndexMetadata, IndexSettings, UpdateStatus};
impl Data { impl Data {
pub async fn add_documents( pub async fn add_documents(
@ -13,7 +13,7 @@ impl Data {
format: UpdateFormat, format: UpdateFormat,
stream: Payload, stream: Payload,
primary_key: Option<String>, primary_key: Option<String>,
) -> anyhow::Result<UpdateStatus> { ) -> Result<UpdateStatus> {
let update_status = self let update_status = self
.index_controller .index_controller
.add_documents(index, method, format, stream, primary_key) .add_documents(index, method, format, stream, primary_key)
@ -26,7 +26,7 @@ impl Data {
index: String, index: String,
settings: Settings<Checked>, settings: Settings<Checked>,
create: bool, create: bool,
) -> anyhow::Result<UpdateStatus> { ) -> Result<UpdateStatus> {
let update = self let update = self
.index_controller .index_controller
.update_settings(index, settings, create) .update_settings(index, settings, create)
@ -34,7 +34,7 @@ impl Data {
Ok(update) Ok(update)
} }
pub async fn clear_documents(&self, index: String) -> anyhow::Result<UpdateStatus> { pub async fn clear_documents(&self, index: String) -> Result<UpdateStatus> {
let update = self.index_controller.clear_documents(index).await?; let update = self.index_controller.clear_documents(index).await?;
Ok(update) Ok(update)
} }
@ -43,7 +43,7 @@ impl Data {
&self, &self,
index: String, index: String,
document_ids: Vec<String>, document_ids: Vec<String>,
) -> anyhow::Result<UpdateStatus> { ) -> Result<UpdateStatus> {
let update = self let update = self
.index_controller .index_controller
.delete_documents(index, document_ids) .delete_documents(index, document_ids)
@ -51,16 +51,16 @@ impl Data {
Ok(update) Ok(update)
} }
pub async fn delete_index(&self, index: String) -> anyhow::Result<()> { pub async fn delete_index(&self, index: String) -> Result<()> {
self.index_controller.delete_index(index).await?; self.index_controller.delete_index(index).await?;
Ok(()) Ok(())
} }
pub async fn get_update_status(&self, index: String, uid: u64) -> anyhow::Result<UpdateStatus> { pub async fn get_update_status(&self, index: String, uid: u64) -> Result<UpdateStatus> {
self.index_controller.update_status(index, uid).await self.index_controller.update_status(index, uid).await
} }
pub async fn get_updates_status(&self, index: String) -> anyhow::Result<Vec<UpdateStatus>> { pub async fn get_updates_status(&self, index: String) -> Result<Vec<UpdateStatus>> {
self.index_controller.all_update_status(index).await self.index_controller.all_update_status(index).await
} }
@ -69,7 +69,7 @@ impl Data {
uid: String, uid: String,
primary_key: Option<String>, primary_key: Option<String>,
new_uid: Option<String>, new_uid: Option<String>,
) -> anyhow::Result<IndexMetadata> { ) -> Result<IndexMetadata> {
let settings = IndexSettings { let settings = IndexSettings {
uid: new_uid, uid: new_uid,
primary_key, primary_key,

View File

@ -1,321 +1,182 @@
use std::error; use std::error::Error;
use std::fmt; use std::fmt;
use actix_web as aweb; use actix_web as aweb;
use actix_web::body::Body; use actix_web::body::Body;
use actix_web::dev::BaseHttpResponseBuilder; use actix_web::dev::BaseHttpResponseBuilder;
use actix_web::error::{JsonPayloadError, QueryPayloadError};
use actix_web::http::Error as HttpError;
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use aweb::error::{JsonPayloadError, QueryPayloadError};
use meilisearch_error::{Code, ErrorCode}; use meilisearch_error::{Code, ErrorCode};
use serde::ser::{Serialize, SerializeStruct, Serializer}; use milli::UserError;
use serde::{Deserialize, Serialize};
#[derive(Debug)] #[derive(Debug, thiserror::Error)]
pub struct ResponseError { pub enum AuthenticationError {
inner: Box<dyn ErrorCode>, #[error("you must have an authorization token")]
MissingAuthorizationHeader,
#[error("invalid API key")]
InvalidToken(String),
} }
impl error::Error for ResponseError {} impl ErrorCode for AuthenticationError {
impl ErrorCode for ResponseError {
fn error_code(&self) -> Code { fn error_code(&self) -> Code {
self.inner.error_code() match self {
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
AuthenticationError::InvalidToken(_) => Code::InvalidToken,
}
} }
} }
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ResponseError {
#[serde(skip)]
code: StatusCode,
message: String,
error_code: String,
error_type: String,
error_link: String,
}
impl fmt::Display for ResponseError { impl fmt::Display for ResponseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.inner.fmt(f) self.message.fmt(f)
} }
} }
// TODO: remove this when implementing actual error handling impl<T> From<T> for ResponseError
impl From<anyhow::Error> for ResponseError { where
fn from(other: anyhow::Error) -> ResponseError { T: ErrorCode,
ResponseError { {
inner: Box::new(Error::NotFound(other.to_string())), fn from(other: T) -> Self {
Self {
code: other.http_status(),
message: other.to_string(),
error_code: other.error_name(),
error_type: other.error_type(),
error_link: other.error_url(),
} }
} }
} }
impl From<Error> for ResponseError {
fn from(error: Error) -> ResponseError {
ResponseError {
inner: Box::new(error),
}
}
}
impl From<FacetCountError> for ResponseError {
fn from(err: FacetCountError) -> ResponseError {
ResponseError {
inner: Box::new(err),
}
}
}
impl Serialize for ResponseError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let struct_name = "ResponseError";
let field_count = 4;
let mut state = serializer.serialize_struct(struct_name, field_count)?;
state.serialize_field("message", &self.to_string())?;
state.serialize_field("errorCode", &self.error_name())?;
state.serialize_field("errorType", &self.error_type())?;
state.serialize_field("errorLink", &self.error_url())?;
state.end()
}
}
impl aweb::error::ResponseError for ResponseError { impl aweb::error::ResponseError for ResponseError {
fn error_response(&self) -> aweb::BaseHttpResponse<Body> { fn error_response(&self) -> aweb::BaseHttpResponse<Body> {
let json = serde_json::to_vec(self).unwrap(); let json = serde_json::to_vec(self).unwrap();
BaseHttpResponseBuilder::new(self.status_code()).body(json) BaseHttpResponseBuilder::new(self.status_code())
.content_type("application/json")
.body(json)
} }
fn status_code(&self) -> StatusCode { fn status_code(&self) -> StatusCode {
self.http_status() self.code
}
}
macro_rules! internal_error {
($target:ty : $($other:path), *) => {
$(
impl From<$other> for $target {
fn from(other: $other) -> Self {
Self::Internal(Box::new(other))
}
}
)*
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub struct MilliError<'a>(pub &'a milli::Error);
BadParameter(String, String),
BadRequest(String), impl Error for MilliError<'_> {}
CreateIndex(String),
DocumentNotFound(String), impl fmt::Display for MilliError<'_> {
IndexNotFound(String), fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
IndexAlreadyExists(String), self.0.fmt(f)
Internal(String), }
InvalidIndexUid,
InvalidToken(String),
MissingAuthorizationHeader,
NotFound(String),
OpenIndex(String),
RetrieveDocument(u32, String),
SearchDocuments(String),
PayloadTooLarge,
UnsupportedMediaType,
DumpAlreadyInProgress,
DumpProcessFailed(String),
} }
impl error::Error for Error {} impl ErrorCode for MilliError<'_> {
impl ErrorCode for Error {
fn error_code(&self) -> Code { fn error_code(&self) -> Code {
use Error::*; match self.0 {
milli::Error::InternalError(_) => Code::Internal,
milli::Error::IoError(_) => Code::Internal,
milli::Error::UserError(ref error) => {
match error {
// TODO: wait for spec for new error codes.
| UserError::Csv(_)
| UserError::SerdeJson(_)
| UserError::MaxDatabaseSizeReached
| UserError::InvalidCriterionName { .. }
| UserError::InvalidDocumentId { .. }
| UserError::InvalidStoreFile
| UserError::NoSpaceLeftOnDevice
| UserError::DocumentLimitReached => Code::Internal,
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
UserError::InvalidFilter(_) => Code::Filter,
UserError::InvalidFilterAttribute(_) => Code::Filter,
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
UserError::MissingPrimaryKey => Code::MissingPrimaryKey,
UserError::PrimaryKeyCannotBeChanged => Code::PrimaryKeyAlreadyPresent,
UserError::PrimaryKeyCannotBeReset => Code::PrimaryKeyAlreadyPresent,
UserError::UnknownInternalDocumentId { .. } => Code::DocumentNotFound,
}
}
}
}
}
impl fmt::Display for PayloadError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
BadParameter(_, _) => Code::BadParameter, PayloadError::Json(e) => e.fmt(f),
BadRequest(_) => Code::BadRequest, PayloadError::Query(e) => e.fmt(f),
CreateIndex(_) => Code::CreateIndex,
DocumentNotFound(_) => Code::DocumentNotFound,
IndexNotFound(_) => Code::IndexNotFound,
IndexAlreadyExists(_) => Code::IndexAlreadyExists,
Internal(_) => Code::Internal,
InvalidIndexUid => Code::InvalidIndexUid,
InvalidToken(_) => Code::InvalidToken,
MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
NotFound(_) => Code::NotFound,
OpenIndex(_) => Code::OpenIndex,
RetrieveDocument(_, _) => Code::RetrieveDocument,
SearchDocuments(_) => Code::SearchDocuments,
PayloadTooLarge => Code::PayloadTooLarge,
UnsupportedMediaType => Code::UnsupportedMediaType,
_ => unreachable!()
//DumpAlreadyInProgress => Code::DumpAlreadyInProgress,
//DumpProcessFailed(_) => Code::DumpProcessFailed,
} }
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub enum FacetCountError { pub enum PayloadError {
AttributeNotSet(String), Json(JsonPayloadError),
SyntaxError(String), Query(QueryPayloadError),
UnexpectedToken {
found: String,
expected: &'static [&'static str],
},
NoFacetSet,
} }
impl error::Error for FacetCountError {} impl Error for PayloadError {}
impl ErrorCode for FacetCountError { impl ErrorCode for PayloadError {
fn error_code(&self) -> Code { fn error_code(&self) -> Code {
Code::BadRequest
}
}
impl FacetCountError {
pub fn unexpected_token(
found: impl ToString,
expected: &'static [&'static str],
) -> FacetCountError {
let found = found.to_string();
FacetCountError::UnexpectedToken { expected, found }
}
}
impl From<serde_json::error::Error> for FacetCountError {
fn from(other: serde_json::error::Error) -> FacetCountError {
FacetCountError::SyntaxError(other.to_string())
}
}
impl fmt::Display for FacetCountError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use FacetCountError::*;
match self { match self {
AttributeNotSet(attr) => write!(f, "Attribute {} is not set as facet", attr), PayloadError::Json(err) => match err {
SyntaxError(msg) => write!(f, "Syntax error: {}", msg), JsonPayloadError::Overflow => Code::PayloadTooLarge,
UnexpectedToken { expected, found } => { JsonPayloadError::ContentType => Code::UnsupportedMediaType,
write!(f, "Unexpected {} found, expected {:?}", found, expected) JsonPayloadError::Payload(aweb::error::PayloadError::Overflow) => Code::PayloadTooLarge,
} JsonPayloadError::Deserialize(_)
NoFacetSet => write!(f, "Can't perform facet count, as no facet is set"), | JsonPayloadError::Payload(_) => Code::BadRequest,
JsonPayloadError::Serialize(_) => Code::Internal,
_ => Code::Internal,
},
PayloadError::Query(err) => match err {
QueryPayloadError::Deserialize(_) => Code::BadRequest,
_ => Code::Internal,
},
} }
} }
} }
impl Error { impl From<JsonPayloadError> for PayloadError {
pub fn internal(err: impl fmt::Display) -> Error { fn from(other: JsonPayloadError) -> Self {
Error::Internal(err.to_string()) Self::Json(other)
}
pub fn bad_request(err: impl fmt::Display) -> Error {
Error::BadRequest(err.to_string())
}
pub fn missing_authorization_header() -> Error {
Error::MissingAuthorizationHeader
}
pub fn invalid_token(err: impl fmt::Display) -> Error {
Error::InvalidToken(err.to_string())
}
pub fn not_found(err: impl fmt::Display) -> Error {
Error::NotFound(err.to_string())
}
pub fn index_not_found(err: impl fmt::Display) -> Error {
Error::IndexNotFound(err.to_string())
}
pub fn document_not_found(err: impl fmt::Display) -> Error {
Error::DocumentNotFound(err.to_string())
}
pub fn bad_parameter(param: impl fmt::Display, err: impl fmt::Display) -> Error {
Error::BadParameter(param.to_string(), err.to_string())
}
pub fn open_index(err: impl fmt::Display) -> Error {
Error::OpenIndex(err.to_string())
}
pub fn create_index(err: impl fmt::Display) -> Error {
Error::CreateIndex(err.to_string())
}
pub fn invalid_index_uid() -> Error {
Error::InvalidIndexUid
}
pub fn retrieve_document(doc_id: u32, err: impl fmt::Display) -> Error {
Error::RetrieveDocument(doc_id, err.to_string())
}
pub fn search_documents(err: impl fmt::Display) -> Error {
Error::SearchDocuments(err.to_string())
}
pub fn dump_conflict() -> Error {
Error::DumpAlreadyInProgress
}
pub fn dump_failed(message: String) -> Error {
Error::DumpProcessFailed(message)
} }
} }
impl fmt::Display for Error { impl From<QueryPayloadError> for PayloadError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn from(other: QueryPayloadError) -> Self {
match self { Self::Query(other)
Self::BadParameter(param, err) => write!(f, "Url parameter {} error: {}", param, err),
Self::BadRequest(err) => f.write_str(err),
Self::CreateIndex(err) => write!(f, "Impossible to create index; {}", err),
Self::DocumentNotFound(document_id) => write!(f, "Document with id {} not found", document_id),
Self::IndexNotFound(index_uid) => write!(f, "Index {} not found", index_uid),
Self::IndexAlreadyExists(index_uid) => write!(f, "Index {} already exists", index_uid),
Self::Internal(err) => f.write_str(err),
Self::InvalidIndexUid => f.write_str("Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."),
Self::InvalidToken(err) => write!(f, "Invalid API key: {}", err),
Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"),
Self::NotFound(err) => write!(f, "{} not found", err),
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
Self::RetrieveDocument(id, err) => write!(f, "Impossible to retrieve the document with id: {}; {}", id, err),
Self::SearchDocuments(err) => write!(f, "Impossible to search documents; {}", err),
Self::PayloadTooLarge => f.write_str("Payload too large"),
Self::UnsupportedMediaType => f.write_str("Unsupported media type"),
Self::DumpAlreadyInProgress => f.write_str("Another dump is already in progress"),
Self::DumpProcessFailed(message) => write!(f, "Dump process failed: {}", message),
}
} }
} }
impl From<std::io::Error> for Error { pub fn payload_error_handler<E>(err: E) -> ResponseError
fn from(err: std::io::Error) -> Error { where
Error::Internal(err.to_string()) E: Into<PayloadError>,
} {
} err.into().into()
impl From<HttpError> for Error {
fn from(err: HttpError) -> Error {
Error::Internal(err.to_string())
}
}
impl From<serde_json::error::Error> for Error {
fn from(err: serde_json::error::Error) -> Error {
Error::Internal(err.to_string())
}
}
impl From<JsonPayloadError> for Error {
fn from(err: JsonPayloadError) -> Error {
match err {
JsonPayloadError::Deserialize(err) => {
Error::BadRequest(format!("Invalid JSON: {}", err))
}
JsonPayloadError::Overflow => Error::PayloadTooLarge,
JsonPayloadError::ContentType => Error::UnsupportedMediaType,
JsonPayloadError::Payload(err) => {
Error::BadRequest(format!("Problem while decoding the request: {}", err))
}
e => Error::Internal(format!("Unexpected Json error: {}", e)),
}
}
}
impl From<QueryPayloadError> for Error {
fn from(err: QueryPayloadError) -> Error {
match err {
QueryPayloadError::Deserialize(err) => {
Error::BadRequest(format!("Invalid query parameters: {}", err))
}
e => Error::Internal(format!("Unexpected query payload error: {}", e)),
}
}
}
pub fn payload_error_handler<E: Into<Error>>(err: E) -> ResponseError {
let error: Error = err.into();
error.into()
} }

View File

@ -9,7 +9,7 @@ use futures::future::{ok, Future, Ready};
use futures::ready; use futures::ready;
use pin_project::pin_project; use pin_project::pin_project;
use crate::error::{Error, ResponseError}; use crate::error::{AuthenticationError, ResponseError};
use crate::Data; use crate::Data;
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -117,7 +117,8 @@ where
AuthProj::NoHeader(req) => { AuthProj::NoHeader(req) => {
match req.take() { match req.take() {
Some(req) => { Some(req) => {
let response = ResponseError::from(Error::MissingAuthorizationHeader); let response =
ResponseError::from(AuthenticationError::MissingAuthorizationHeader);
let response = response.error_response(); let response = response.error_response();
let response = req.into_response(response); let response = req.into_response(response);
Poll::Ready(Ok(response)) Poll::Ready(Ok(response))
@ -134,7 +135,8 @@ where
.get("X-Meili-API-Key") .get("X-Meili-API-Key")
.map(|h| h.to_str().map(String::from).unwrap_or_default()) .map(|h| h.to_str().map(String::from).unwrap_or_default())
.unwrap_or_default(); .unwrap_or_default();
let response = ResponseError::from(Error::InvalidToken(bad_token)); let response =
ResponseError::from(AuthenticationError::InvalidToken(bad_token));
let response = response.error_response(); let response = response.error_response();
let response = req.into_response(response); let response = req.into_response(response);
Poll::Ready(Ok(response)) Poll::Ready(Ok(response))

View File

@ -3,7 +3,7 @@ use std::io::{BufRead, BufReader, Write};
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use anyhow::{bail, Context}; use anyhow::{Context, bail};
use heed::RoTxn; use heed::RoTxn;
use indexmap::IndexMap; use indexmap::IndexMap;
use milli::update::{IndexDocumentsMethod, UpdateFormat::JsonStream}; use milli::update::{IndexDocumentsMethod, UpdateFormat::JsonStream};
@ -11,6 +11,7 @@ use serde::{Deserialize, Serialize};
use crate::option::IndexerOpts; use crate::option::IndexerOpts;
use super::error::Result;
use super::{update_handler::UpdateHandler, Index, Settings, Unchecked}; use super::{update_handler::UpdateHandler, Index, Settings, Unchecked};
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -23,7 +24,7 @@ const META_FILE_NAME: &str = "meta.json";
const DATA_FILE_NAME: &str = "documents.jsonl"; const DATA_FILE_NAME: &str = "documents.jsonl";
impl Index { impl Index {
pub fn dump(&self, path: impl AsRef<Path>) -> anyhow::Result<()> { pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
// acquire write txn make sure any ongoing write is finished before we start. // acquire write txn make sure any ongoing write is finished before we start.
let txn = self.env.write_txn()?; let txn = self.env.write_txn()?;
@ -33,7 +34,7 @@ impl Index {
Ok(()) Ok(())
} }
fn dump_documents(&self, txn: &RoTxn, path: impl AsRef<Path>) -> anyhow::Result<()> { fn dump_documents(&self, txn: &RoTxn, path: impl AsRef<Path>) -> Result<()> {
let document_file_path = path.as_ref().join(DATA_FILE_NAME); let document_file_path = path.as_ref().join(DATA_FILE_NAME);
let mut document_file = File::create(&document_file_path)?; let mut document_file = File::create(&document_file_path)?;
@ -60,7 +61,7 @@ impl Index {
Ok(()) Ok(())
} }
fn dump_meta(&self, txn: &RoTxn, path: impl AsRef<Path>) -> anyhow::Result<()> { fn dump_meta(&self, txn: &RoTxn, path: impl AsRef<Path>) -> Result<()> {
let meta_file_path = path.as_ref().join(META_FILE_NAME); let meta_file_path = path.as_ref().join(META_FILE_NAME);
let mut meta_file = File::create(&meta_file_path)?; let mut meta_file = File::create(&meta_file_path)?;
@ -86,6 +87,7 @@ impl Index {
.as_ref() .as_ref()
.file_name() .file_name()
.with_context(|| format!("invalid dump index: {}", src.as_ref().display()))?; .with_context(|| format!("invalid dump index: {}", src.as_ref().display()))?;
let dst_dir_path = dst.as_ref().join("indexes").join(dir_name); let dst_dir_path = dst.as_ref().join("indexes").join(dir_name);
create_dir_all(&dst_dir_path)?; create_dir_all(&dst_dir_path)?;

View File

@ -0,0 +1,52 @@
use std::error::Error;
use meilisearch_error::{Code, ErrorCode};
use serde_json::Value;
use crate::error::MilliError;
pub type Result<T> = std::result::Result<T, IndexError>;
#[derive(Debug, thiserror::Error)]
pub enum IndexError {
#[error("internal error: {0}")]
Internal(Box<dyn Error + Send + Sync + 'static>),
#[error("document with id {0} not found.")]
DocumentNotFound(String),
#[error("error with facet: {0}")]
Facet(#[from] FacetError),
#[error("{0}")]
Milli(#[from] milli::Error),
}
internal_error!(
IndexError: std::io::Error,
heed::Error,
fst::Error,
serde_json::Error
);
impl ErrorCode for IndexError {
fn error_code(&self) -> Code {
match self {
IndexError::Internal(_) => Code::Internal,
IndexError::DocumentNotFound(_) => Code::DocumentNotFound,
IndexError::Facet(e) => e.error_code(),
IndexError::Milli(e) => MilliError(e).error_code(),
}
}
}
#[derive(Debug, thiserror::Error)]
pub enum FacetError {
#[error("invalid facet expression, expected {}, found: {1}", .0.join(", "))]
InvalidExpression(&'static [&'static str], Value),
}
impl ErrorCode for FacetError {
fn error_code(&self) -> Code {
match self {
FacetError::InvalidExpression(_, _) => Code::Facet,
}
}
}

View File

@ -5,19 +5,24 @@ use std::ops::Deref;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use anyhow::{bail, Context};
use heed::{EnvOpenOptions, RoTxn}; use heed::{EnvOpenOptions, RoTxn};
use milli::obkv_to_json; use milli::obkv_to_json;
use serde::{de::Deserializer, Deserialize};
use serde_json::{Map, Value}; use serde_json::{Map, Value};
use crate::helpers::EnvSizer; use crate::helpers::EnvSizer;
use error::Result;
pub use search::{SearchQuery, SearchResult, DEFAULT_SEARCH_LIMIT}; pub use search::{SearchQuery, SearchResult, DEFAULT_SEARCH_LIMIT};
use serde::{de::Deserializer, Deserialize};
pub use updates::{Checked, Facets, Settings, Unchecked}; pub use updates::{Checked, Facets, Settings, Unchecked};
use self::error::IndexError;
pub mod error;
pub mod update_handler;
mod dump; mod dump;
mod search; mod search;
pub mod update_handler;
mod updates; mod updates;
pub type Document = Map<String, Value>; pub type Document = Map<String, Value>;
@ -33,7 +38,7 @@ impl Deref for Index {
} }
} }
pub fn deserialize_some<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error> pub fn deserialize_some<'de, T, D>(deserializer: D) -> std::result::Result<Option<T>, D::Error>
where where
T: Deserialize<'de>, T: Deserialize<'de>,
D: Deserializer<'de>, D: Deserializer<'de>,
@ -42,7 +47,7 @@ where
} }
impl Index { impl Index {
pub fn open(path: impl AsRef<Path>, size: usize) -> anyhow::Result<Self> { pub fn open(path: impl AsRef<Path>, size: usize) -> Result<Self> {
create_dir_all(&path)?; create_dir_all(&path)?;
let mut options = EnvOpenOptions::new(); let mut options = EnvOpenOptions::new();
options.map_size(size); options.map_size(size);
@ -50,12 +55,12 @@ impl Index {
Ok(Index(Arc::new(index))) Ok(Index(Arc::new(index)))
} }
pub fn settings(&self) -> anyhow::Result<Settings<Checked>> { pub fn settings(&self) -> Result<Settings<Checked>> {
let txn = self.read_txn()?; let txn = self.read_txn()?;
self.settings_txn(&txn) self.settings_txn(&txn)
} }
pub fn settings_txn(&self, txn: &RoTxn) -> anyhow::Result<Settings<Checked>> { pub fn settings_txn(&self, txn: &RoTxn) -> Result<Settings<Checked>> {
let displayed_attributes = self let displayed_attributes = self
.displayed_fields(&txn)? .displayed_fields(&txn)?
.map(|fields| fields.into_iter().map(String::from).collect()); .map(|fields| fields.into_iter().map(String::from).collect());
@ -64,10 +69,7 @@ impl Index {
.searchable_fields(&txn)? .searchable_fields(&txn)?
.map(|fields| fields.into_iter().map(String::from).collect()); .map(|fields| fields.into_iter().map(String::from).collect());
let faceted_attributes = self let faceted_attributes = self.faceted_fields(&txn)?.into_iter().collect();
.faceted_fields(&txn)?
.into_iter()
.collect();
let criteria = self let criteria = self
.criteria(&txn)? .criteria(&txn)?
@ -77,7 +79,7 @@ impl Index {
let stop_words = self let stop_words = self
.stop_words(&txn)? .stop_words(&txn)?
.map(|stop_words| -> anyhow::Result<BTreeSet<_>> { .map(|stop_words| -> Result<BTreeSet<_>> {
Ok(stop_words.stream().into_strs()?.into_iter().collect()) Ok(stop_words.stream().into_strs()?.into_iter().collect())
}) })
.transpose()? .transpose()?
@ -114,7 +116,7 @@ impl Index {
offset: usize, offset: usize,
limit: usize, limit: usize,
attributes_to_retrieve: Option<Vec<S>>, attributes_to_retrieve: Option<Vec<S>>,
) -> anyhow::Result<Vec<Map<String, Value>>> { ) -> Result<Vec<Map<String, Value>>> {
let txn = self.read_txn()?; let txn = self.read_txn()?;
let fields_ids_map = self.fields_ids_map(&txn)?; let fields_ids_map = self.fields_ids_map(&txn)?;
@ -138,7 +140,7 @@ impl Index {
&self, &self,
doc_id: String, doc_id: String,
attributes_to_retrieve: Option<Vec<S>>, attributes_to_retrieve: Option<Vec<S>>,
) -> anyhow::Result<Map<String, Value>> { ) -> Result<Map<String, Value>> {
let txn = self.read_txn()?; let txn = self.read_txn()?;
let fields_ids_map = self.fields_ids_map(&txn)?; let fields_ids_map = self.fields_ids_map(&txn)?;
@ -149,18 +151,18 @@ impl Index {
let internal_id = self let internal_id = self
.external_documents_ids(&txn)? .external_documents_ids(&txn)?
.get(doc_id.as_bytes()) .get(doc_id.as_bytes())
.with_context(|| format!("Document with id {} not found", doc_id))?; .ok_or_else(|| IndexError::DocumentNotFound(doc_id.clone()))?;
let document = self let document = self
.documents(&txn, std::iter::once(internal_id))? .documents(&txn, std::iter::once(internal_id))?
.into_iter() .into_iter()
.next() .next()
.map(|(_, d)| d); .map(|(_, d)| d)
.ok_or(IndexError::DocumentNotFound(doc_id))?;
match document { let document = obkv_to_json(&fields_to_display, &fields_ids_map, document)?;
Some(document) => Ok(obkv_to_json(&fields_to_display, &fields_ids_map, document)?),
None => bail!("Document with id {} not found", doc_id), Ok(document)
}
} }
pub fn size(&self) -> u64 { pub fn size(&self) -> u64 {
@ -172,7 +174,7 @@ impl Index {
txn: &heed::RoTxn, txn: &heed::RoTxn,
attributes_to_retrieve: &Option<Vec<S>>, attributes_to_retrieve: &Option<Vec<S>>,
fields_ids_map: &milli::FieldsIdsMap, fields_ids_map: &milli::FieldsIdsMap,
) -> anyhow::Result<Vec<u8>> { ) -> Result<Vec<u8>> {
let mut displayed_fields_ids = match self.displayed_fields_ids(&txn)? { let mut displayed_fields_ids = match self.displayed_fields_ids(&txn)? {
Some(ids) => ids.into_iter().collect::<Vec<_>>(), Some(ids) => ids.into_iter().collect::<Vec<_>>(),
None => fields_ids_map.iter().map(|(id, _)| id).collect(), None => fields_ids_map.iter().map(|(id, _)| id).collect(),

View File

@ -2,7 +2,6 @@ use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::time::Instant; use std::time::Instant;
use anyhow::bail;
use either::Either; use either::Either;
use heed::RoTxn; use heed::RoTxn;
use indexmap::IndexMap; use indexmap::IndexMap;
@ -11,6 +10,9 @@ use milli::{FilterCondition, FieldId, FieldsIdsMap, MatchingWords};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use crate::index::error::FacetError;
use super::error::Result;
use super::Index; use super::Index;
pub type Document = IndexMap<String, Value>; pub type Document = IndexMap<String, Value>;
@ -71,7 +73,7 @@ struct FormatOptions {
} }
impl Index { impl Index {
pub fn perform_search(&self, query: SearchQuery) -> anyhow::Result<SearchResult> { pub fn perform_search(&self, query: SearchQuery) -> Result<SearchResult> {
let before_search = Instant::now(); let before_search = Instant::now();
let rtxn = self.read_txn()?; let rtxn = self.read_txn()?;
@ -96,7 +98,7 @@ impl Index {
candidates, candidates,
.. ..
} = search.execute()?; } = search.execute()?;
let mut documents = Vec::new();
let fields_ids_map = self.fields_ids_map(&rtxn).unwrap(); let fields_ids_map = self.fields_ids_map(&rtxn).unwrap();
let displayed_ids = self let displayed_ids = self
@ -158,7 +160,11 @@ impl Index {
let formatter = let formatter =
Formatter::new(&stop_words, (String::from("<em>"), String::from("</em>"))); Formatter::new(&stop_words, (String::from("<em>"), String::from("</em>")));
for (_id, obkv) in self.documents(&rtxn, documents_ids)? { let mut documents = Vec::new();
let documents_iter = self.documents(&rtxn, documents_ids)?;
for (_id, obkv) in documents_iter {
let document = make_document(&to_retrieve_ids, &fields_ids_map, obkv)?; let document = make_document(&to_retrieve_ids, &fields_ids_map, obkv)?;
let formatted = format_fields( let formatted = format_fields(
&fields_ids_map, &fields_ids_map,
@ -167,6 +173,7 @@ impl Index {
&matching_words, &matching_words,
&formatted_options, &formatted_options,
)?; )?;
let hit = SearchHit { let hit = SearchHit {
document, document,
formatted, formatted,
@ -182,7 +189,9 @@ impl Index {
if fields.iter().all(|f| f != "*") { if fields.iter().all(|f| f != "*") {
facet_distribution.facets(fields); facet_distribution.facets(fields);
} }
Some(facet_distribution.candidates(candidates).execute()?) let distribution = facet_distribution.candidates(candidates).execute()?;
Some(distribution)
} }
None => None, None => None,
}; };
@ -326,7 +335,7 @@ fn make_document(
attributes_to_retrieve: &BTreeSet<FieldId>, attributes_to_retrieve: &BTreeSet<FieldId>,
field_ids_map: &FieldsIdsMap, field_ids_map: &FieldsIdsMap,
obkv: obkv::KvReader, obkv: obkv::KvReader,
) -> anyhow::Result<Document> { ) -> Result<Document> {
let mut document = Document::new(); let mut document = Document::new();
for attr in attributes_to_retrieve { for attr in attributes_to_retrieve {
if let Some(value) = obkv.get(*attr) { if let Some(value) = obkv.get(*attr) {
@ -351,7 +360,7 @@ fn format_fields<A: AsRef<[u8]>>(
formatter: &Formatter<A>, formatter: &Formatter<A>,
matching_words: &impl Matcher, matching_words: &impl Matcher,
formatted_options: &BTreeMap<FieldId, FormatOptions>, formatted_options: &BTreeMap<FieldId, FormatOptions>,
) -> anyhow::Result<Document> { ) -> Result<Document> {
let mut document = Document::new(); let mut document = Document::new();
for (id, format) in formatted_options { for (id, format) in formatted_options {
@ -514,15 +523,14 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
} }
} }
fn parse_filter( fn parse_filter(facets: &Value, index: &Index, txn: &RoTxn) -> Result<Option<FilterCondition>> {
facets: &Value,
index: &Index,
txn: &RoTxn,
) -> anyhow::Result<Option<FilterCondition>> {
match facets { match facets {
Value::String(expr) => Ok(Some(FilterCondition::from_str(txn, index, expr)?)), Value::String(expr) => {
let condition = FilterCondition::from_str(txn, index, expr)?;
Ok(Some(condition))
}
Value::Array(arr) => parse_filter_array(txn, index, arr), Value::Array(arr) => parse_filter_array(txn, index, arr),
v => bail!("Invalid facet expression, expected Array, found: {:?}", v), v => Err(FacetError::InvalidExpression(&["Array"], v.clone()).into()),
} }
} }
@ -530,7 +538,7 @@ fn parse_filter_array(
txn: &RoTxn, txn: &RoTxn,
index: &Index, index: &Index,
arr: &[Value], arr: &[Value],
) -> anyhow::Result<Option<FilterCondition>> { ) -> Result<Option<FilterCondition>> {
let mut ands = Vec::new(); let mut ands = Vec::new();
for value in arr { for value in arr {
match value { match value {
@ -540,15 +548,18 @@ fn parse_filter_array(
for value in arr { for value in arr {
match value { match value {
Value::String(s) => ors.push(s.clone()), Value::String(s) => ors.push(s.clone()),
v => bail!("Invalid facet expression, expected String, found: {:?}", v), v => {
return Err(FacetError::InvalidExpression(&["String"], v.clone()).into())
}
} }
} }
ands.push(Either::Left(ors)); ands.push(Either::Left(ors));
} }
v => bail!( v => {
"Invalid facet expression, expected String or [String], found: {:?}", return Err(
v FacetError::InvalidExpression(&["String", "[String]"], v.clone()).into(),
), )
}
} }
} }

View File

@ -1,7 +1,6 @@
use std::fs::File; use std::fs::File;
use crate::index::Index; use crate::index::Index;
use anyhow::Result;
use grenad::CompressionType; use grenad::CompressionType;
use milli::update::UpdateBuilder; use milli::update::UpdateBuilder;
use rayon::ThreadPool; use rayon::ThreadPool;
@ -87,7 +86,7 @@ impl UpdateHandler {
match result { match result {
Ok(result) => Ok(meta.process(result)), Ok(result) => Ok(meta.process(result)),
Err(e) => Err(meta.fail(e.to_string())), Err(e) => Err(meta.fail(e.into())),
} }
} }
} }

View File

@ -1,4 +1,4 @@
use std::collections::{BTreeSet, BTreeMap, HashSet}; use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::io; use std::io;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
@ -10,9 +10,13 @@ use serde::{Deserialize, Serialize, Serializer};
use crate::index_controller::UpdateResult; use crate::index_controller::UpdateResult;
use super::error::Result;
use super::{deserialize_some, Index}; use super::{deserialize_some, Index};
fn serialize_with_wildcard<S>(field: &Option<Option<Vec<String>>>, s: S) -> Result<S::Ok, S::Error> fn serialize_with_wildcard<S>(
field: &Option<Option<Vec<String>>>,
s: S,
) -> std::result::Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
@ -174,7 +178,7 @@ impl Index {
content: Option<impl io::Read>, content: Option<impl io::Read>,
update_builder: UpdateBuilder, update_builder: UpdateBuilder,
primary_key: Option<&str>, primary_key: Option<&str>,
) -> anyhow::Result<UpdateResult> { ) -> Result<UpdateResult> {
let mut txn = self.write_txn()?; let mut txn = self.write_txn()?;
let result = self.update_documents_txn( let result = self.update_documents_txn(
&mut txn, &mut txn,
@ -196,13 +200,12 @@ impl Index {
content: Option<impl io::Read>, content: Option<impl io::Read>,
update_builder: UpdateBuilder, update_builder: UpdateBuilder,
primary_key: Option<&str>, primary_key: Option<&str>,
) -> anyhow::Result<UpdateResult> { ) -> Result<UpdateResult> {
info!("performing document addition"); info!("performing document addition");
// Set the primary key if not set already, ignore if already set. // Set the primary key if not set already, ignore if already set.
if let (None, Some(primary_key)) = (self.primary_key(txn)?, primary_key) { if let (None, Some(primary_key)) = (self.primary_key(txn)?, primary_key) {
let mut builder = UpdateBuilder::new(0) let mut builder = UpdateBuilder::new(0).settings(txn, &self);
.settings(txn, &self);
builder.set_primary_key(primary_key.to_string()); builder.set_primary_key(primary_key.to_string());
builder.execute(|_, _| ())?; builder.execute(|_, _| ())?;
} }
@ -228,18 +231,16 @@ impl Index {
Ok(UpdateResult::DocumentsAddition(addition)) Ok(UpdateResult::DocumentsAddition(addition))
} }
pub fn clear_documents(&self, update_builder: UpdateBuilder) -> anyhow::Result<UpdateResult> { pub fn clear_documents(&self, update_builder: UpdateBuilder) -> Result<UpdateResult> {
// We must use the write transaction of the update here. // We must use the write transaction of the update here.
let mut wtxn = self.write_txn()?; let mut wtxn = self.write_txn()?;
let builder = update_builder.clear_documents(&mut wtxn, self); let builder = update_builder.clear_documents(&mut wtxn, self);
match builder.execute() { let _count = builder.execute()?;
Ok(_count) => wtxn
.commit() wtxn.commit()
.and(Ok(UpdateResult::Other)) .and(Ok(UpdateResult::Other))
.map_err(Into::into), .map_err(Into::into)
Err(e) => Err(e.into()),
}
} }
pub fn update_settings_txn<'a, 'b>( pub fn update_settings_txn<'a, 'b>(
@ -247,7 +248,7 @@ impl Index {
txn: &mut heed::RwTxn<'a, 'b>, txn: &mut heed::RwTxn<'a, 'b>,
settings: &Settings<Checked>, settings: &Settings<Checked>,
update_builder: UpdateBuilder, update_builder: UpdateBuilder,
) -> anyhow::Result<UpdateResult> { ) -> Result<UpdateResult> {
// We must use the write transaction of the update here. // We must use the write transaction of the update here.
let mut builder = update_builder.settings(txn, self); let mut builder = update_builder.settings(txn, self);
@ -309,7 +310,7 @@ impl Index {
&self, &self,
settings: &Settings<Checked>, settings: &Settings<Checked>,
update_builder: UpdateBuilder, update_builder: UpdateBuilder,
) -> anyhow::Result<UpdateResult> { ) -> Result<UpdateResult> {
let mut txn = self.write_txn()?; let mut txn = self.write_txn()?;
let result = self.update_settings_txn(&mut txn, settings, update_builder)?; let result = self.update_settings_txn(&mut txn, settings, update_builder)?;
txn.commit()?; txn.commit()?;
@ -320,7 +321,7 @@ impl Index {
&self, &self,
document_ids: &[String], document_ids: &[String],
update_builder: UpdateBuilder, update_builder: UpdateBuilder,
) -> anyhow::Result<UpdateResult> { ) -> Result<UpdateResult> {
let mut txn = self.write_txn()?; let mut txn = self.write_txn()?;
let mut builder = update_builder.delete_documents(&mut txn, self)?; let mut builder = update_builder.delete_documents(&mut txn, self)?;
@ -329,13 +330,10 @@ impl Index {
builder.delete_external_id(id); builder.delete_external_id(id);
}); });
match builder.execute() { let deleted = builder.execute()?;
Ok(deleted) => txn txn.commit()
.commit() .and(Ok(UpdateResult::DocumentDeletion { deleted }))
.and(Ok(UpdateResult::DocumentDeletion { deleted })) .map_err(Into::into)
.map_err(Into::into),
Err(e) => Err(e.into()),
}
} }
} }

View File

@ -10,7 +10,8 @@ use tokio::sync::{mpsc, oneshot, RwLock};
use update_actor::UpdateActorHandle; use update_actor::UpdateActorHandle;
use uuid_resolver::UuidResolverHandle; use uuid_resolver::UuidResolverHandle;
use super::{DumpError, DumpInfo, DumpMsg, DumpResult, DumpStatus, DumpTask}; use super::error::{DumpActorError, Result};
use super::{DumpInfo, DumpMsg, DumpStatus, DumpTask};
use crate::index_controller::{update_actor, uuid_resolver}; use crate::index_controller::{update_actor, uuid_resolver};
pub const CONCURRENT_DUMP_MSG: usize = 10; pub const CONCURRENT_DUMP_MSG: usize = 10;
@ -95,14 +96,14 @@ where
} }
} }
async fn handle_create_dump(&self, ret: oneshot::Sender<DumpResult<DumpInfo>>) { async fn handle_create_dump(&self, ret: oneshot::Sender<Result<DumpInfo>>) {
let uid = generate_uid(); let uid = generate_uid();
let info = DumpInfo::new(uid.clone(), DumpStatus::InProgress); let info = DumpInfo::new(uid.clone(), DumpStatus::InProgress);
let _lock = match self.lock.try_lock() { let _lock = match self.lock.try_lock() {
Some(lock) => lock, Some(lock) => lock,
None => { None => {
ret.send(Err(DumpError::DumpAlreadyRunning)) ret.send(Err(DumpActorError::DumpAlreadyRunning))
.expect("Dump actor is dead"); .expect("Dump actor is dead");
return; return;
} }
@ -147,10 +148,10 @@ where
}; };
} }
async fn handle_dump_info(&self, uid: String) -> DumpResult<DumpInfo> { async fn handle_dump_info(&self, uid: String) -> Result<DumpInfo> {
match self.dump_infos.read().await.get(&uid) { match self.dump_infos.read().await.get(&uid) {
Some(info) => Ok(info.clone()), Some(info) => Ok(info.clone()),
_ => Err(DumpError::DumpDoesNotExist(uid)), _ => Err(DumpActorError::DumpDoesNotExist(uid)),
} }
} }
} }

View File

@ -0,0 +1,52 @@
use meilisearch_error::{Code, ErrorCode};
use crate::index_controller::update_actor::error::UpdateActorError;
use crate::index_controller::uuid_resolver::error::UuidResolverError;
pub type Result<T> = std::result::Result<T, DumpActorError>;
#[derive(thiserror::Error, Debug)]
pub enum DumpActorError {
#[error("dump already running")]
DumpAlreadyRunning,
#[error("dump `{0}` does not exist")]
DumpDoesNotExist(String),
#[error("internal error: {0}")]
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
#[error("error while dumping uuids: {0}")]
UuidResolver(#[from] UuidResolverError),
#[error("error while dumping updates: {0}")]
UpdateActor(#[from] UpdateActorError),
}
macro_rules! internal_error {
($($other:path), *) => {
$(
impl From<$other> for DumpActorError {
fn from(other: $other) -> Self {
Self::Internal(Box::new(other))
}
}
)*
}
}
internal_error!(
heed::Error,
std::io::Error,
tokio::task::JoinError,
serde_json::error::Error,
tempfile::PersistError
);
impl ErrorCode for DumpActorError {
fn error_code(&self) -> Code {
match self {
DumpActorError::DumpAlreadyRunning => Code::DumpAlreadyInProgress,
DumpActorError::DumpDoesNotExist(_) => Code::DocumentNotFound,
DumpActorError::Internal(_) => Code::Internal,
DumpActorError::UuidResolver(e) => e.error_code(),
DumpActorError::UpdateActor(e) => e.error_code(),
}
}
}

View File

@ -3,7 +3,8 @@ use std::path::Path;
use actix_web::web::Bytes; use actix_web::web::Bytes;
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use super::{DumpActor, DumpActorHandle, DumpInfo, DumpMsg, DumpResult}; use super::error::Result;
use super::{DumpActor, DumpActorHandle, DumpInfo, DumpMsg};
#[derive(Clone)] #[derive(Clone)]
pub struct DumpActorHandleImpl { pub struct DumpActorHandleImpl {
@ -12,14 +13,14 @@ pub struct DumpActorHandleImpl {
#[async_trait::async_trait] #[async_trait::async_trait]
impl DumpActorHandle for DumpActorHandleImpl { impl DumpActorHandle for DumpActorHandleImpl {
async fn create_dump(&self) -> DumpResult<DumpInfo> { async fn create_dump(&self) -> Result<DumpInfo> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = DumpMsg::CreateDump { ret }; let msg = DumpMsg::CreateDump { ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
receiver.await.expect("IndexActor has been killed") receiver.await.expect("IndexActor has been killed")
} }
async fn dump_info(&self, uid: String) -> DumpResult<DumpInfo> { async fn dump_info(&self, uid: String) -> Result<DumpInfo> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = DumpMsg::DumpInfo { ret, uid }; let msg = DumpMsg::DumpInfo { ret, uid };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;

View File

@ -1,13 +1,14 @@
use tokio::sync::oneshot; use tokio::sync::oneshot;
use super::{DumpInfo, DumpResult}; use super::error::Result;
use super::DumpInfo;
pub enum DumpMsg { pub enum DumpMsg {
CreateDump { CreateDump {
ret: oneshot::Sender<DumpResult<DumpInfo>>, ret: oneshot::Sender<Result<DumpInfo>>,
}, },
DumpInfo { DumpInfo {
uid: String, uid: String,
ret: oneshot::Sender<DumpResult<DumpInfo>>, ret: oneshot::Sender<Result<DumpInfo>>,
}, },
} }

View File

@ -3,11 +3,10 @@ use std::path::{Path, PathBuf};
use anyhow::Context; use anyhow::Context;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use log::{error, info, warn}; use log::{info, warn};
#[cfg(test)] #[cfg(test)]
use mockall::automock; use mockall::automock;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::fs::create_dir_all; use tokio::fs::create_dir_all;
use loaders::v1::MetadataV1; use loaders::v1::MetadataV1;
@ -18,39 +17,28 @@ pub use handle_impl::*;
pub use message::DumpMsg; pub use message::DumpMsg;
use super::{update_actor::UpdateActorHandle, uuid_resolver::UuidResolverHandle}; use super::{update_actor::UpdateActorHandle, uuid_resolver::UuidResolverHandle};
use crate::index_controller::dump_actor::error::DumpActorError;
use crate::{helpers::compression, option::IndexerOpts}; use crate::{helpers::compression, option::IndexerOpts};
use error::Result;
mod actor; mod actor;
pub mod error;
mod handle_impl; mod handle_impl;
mod loaders; mod loaders;
mod message; mod message;
const META_FILE_NAME: &str = "metadata.json"; const META_FILE_NAME: &str = "metadata.json";
pub type DumpResult<T> = std::result::Result<T, DumpError>;
#[derive(Error, Debug)]
pub enum DumpError {
#[error("error with index: {0}")]
Error(#[from] anyhow::Error),
#[error("Heed error: {0}")]
HeedError(#[from] heed::Error),
#[error("dump already running")]
DumpAlreadyRunning,
#[error("dump `{0}` does not exist")]
DumpDoesNotExist(String),
}
#[async_trait::async_trait] #[async_trait::async_trait]
#[cfg_attr(test, automock)] #[cfg_attr(test, automock)]
pub trait DumpActorHandle { pub trait DumpActorHandle {
/// Start the creation of a dump /// Start the creation of a dump
/// Implementation: [handle_impl::DumpActorHandleImpl::create_dump] /// Implementation: [handle_impl::DumpActorHandleImpl::create_dump]
async fn create_dump(&self) -> DumpResult<DumpInfo>; async fn create_dump(&self) -> Result<DumpInfo>;
/// Return the status of an already created dump /// Return the status of an already created dump
/// Implementation: [handle_impl::DumpActorHandleImpl::dump_status] /// Implementation: [handle_impl::DumpActorHandleImpl::dump_status]
async fn dump_info(&self, uid: String) -> DumpResult<DumpInfo>; async fn dump_info(&self, uid: String) -> Result<DumpInfo>;
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -175,7 +163,7 @@ where
U: UuidResolverHandle + Send + Sync + Clone + 'static, U: UuidResolverHandle + Send + Sync + Clone + 'static,
P: UpdateActorHandle + Send + Sync + Clone + 'static, P: UpdateActorHandle + Send + Sync + Clone + 'static,
{ {
async fn run(self) -> anyhow::Result<()> { async fn run(self) -> Result<()> {
info!("Performing dump."); info!("Performing dump.");
create_dir_all(&self.path).await?; create_dir_all(&self.path).await?;
@ -196,9 +184,10 @@ where
.dump(uuids, temp_dump_path.clone()) .dump(uuids, temp_dump_path.clone())
.await?; .await?;
let dump_path = tokio::task::spawn_blocking(move || -> anyhow::Result<PathBuf> { let dump_path = tokio::task::spawn_blocking(move || -> Result<PathBuf> {
let temp_dump_file = tempfile::NamedTempFile::new_in(&self.path)?; let temp_dump_file = tempfile::NamedTempFile::new_in(&self.path)?;
compression::to_tar_gz(temp_dump_path, temp_dump_file.path())?; compression::to_tar_gz(temp_dump_path, temp_dump_file.path())
.map_err(|e| DumpActorError::Internal(e.into()))?;
let dump_path = self.path.join(self.uid).with_extension("dump"); let dump_path = self.path.join(self.uid).with_extension("dump");
temp_dump_file.persist(&dump_path)?; temp_dump_file.persist(&dump_path)?;

View File

@ -0,0 +1,40 @@
use meilisearch_error::Code;
use meilisearch_error::ErrorCode;
use crate::index::error::IndexError;
use super::dump_actor::error::DumpActorError;
use super::index_actor::error::IndexActorError;
use super::update_actor::error::UpdateActorError;
use super::uuid_resolver::error::UuidResolverError;
pub type Result<T> = std::result::Result<T, IndexControllerError>;
#[derive(Debug, thiserror::Error)]
pub enum IndexControllerError {
#[error("missing index uid")]
MissingUid,
#[error("index resolution error: {0}")]
Uuid(#[from] UuidResolverError),
#[error("error with index: {0}")]
IndexActor(#[from] IndexActorError),
#[error("error with update: {0}")]
UpdateActor(#[from] UpdateActorError),
#[error("error with dump: {0}")]
DumpActor(#[from] DumpActorError),
#[error("error with index: {0}")]
IndexError(#[from] IndexError),
}
impl ErrorCode for IndexControllerError {
fn error_code(&self) -> Code {
match self {
IndexControllerError::MissingUid => Code::InvalidIndexUid,
IndexControllerError::Uuid(e) => e.error_code(),
IndexControllerError::IndexActor(e) => e.error_code(),
IndexControllerError::UpdateActor(e) => e.error_code(),
IndexControllerError::DumpActor(e) => e.error_code(),
IndexControllerError::IndexError(e) => e.error_code(),
}
}
}

View File

@ -19,7 +19,8 @@ use crate::index_controller::{
}; };
use crate::option::IndexerOpts; use crate::option::IndexerOpts;
use super::{IndexError, IndexMeta, IndexMsg, IndexResult, IndexSettings, IndexStore}; use super::error::{IndexActorError, Result};
use super::{IndexMeta, IndexMsg, IndexSettings, IndexStore};
pub const CONCURRENT_INDEX_MSG: usize = 10; pub const CONCURRENT_INDEX_MSG: usize = 10;
@ -30,7 +31,7 @@ pub struct IndexActor<S> {
} }
impl<S: IndexStore + Sync + Send> IndexActor<S> { impl<S: IndexStore + Sync + Send> IndexActor<S> {
pub fn new(receiver: mpsc::Receiver<IndexMsg>, store: S) -> IndexResult<Self> { pub fn new(receiver: mpsc::Receiver<IndexMsg>, store: S) -> anyhow::Result<Self> {
let options = IndexerOpts::default(); let options = IndexerOpts::default();
let update_handler = UpdateHandler::new(&options)?; let update_handler = UpdateHandler::new(&options)?;
let update_handler = Arc::new(update_handler); let update_handler = Arc::new(update_handler);
@ -137,20 +138,21 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
} }
} }
async fn handle_search(&self, uuid: Uuid, query: SearchQuery) -> anyhow::Result<SearchResult> { async fn handle_search(&self, uuid: Uuid, query: SearchQuery) -> Result<SearchResult> {
let index = self let index = self
.store .store
.get(uuid) .get(uuid)
.await? .await?
.ok_or(IndexError::UnexistingIndex)?; .ok_or(IndexActorError::UnexistingIndex)?;
spawn_blocking(move || index.perform_search(query)).await? let result = spawn_blocking(move || index.perform_search(query)).await??;
Ok(result)
} }
async fn handle_create_index( async fn handle_create_index(
&self, &self,
uuid: Uuid, uuid: Uuid,
primary_key: Option<String>, primary_key: Option<String>,
) -> IndexResult<IndexMeta> { ) -> Result<IndexMeta> {
let index = self.store.create(uuid, primary_key).await?; let index = self.store.create(uuid, primary_key).await?;
let meta = spawn_blocking(move || IndexMeta::new(&index)).await??; let meta = spawn_blocking(move || IndexMeta::new(&index)).await??;
Ok(meta) Ok(meta)
@ -161,7 +163,7 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
uuid: Uuid, uuid: Uuid,
meta: Processing, meta: Processing,
data: Option<File>, data: Option<File>,
) -> IndexResult<Result<Processed, Failed>> { ) -> Result<std::result::Result<Processed, Failed>> {
debug!("Processing update {}", meta.id()); debug!("Processing update {}", meta.id());
let update_handler = self.update_handler.clone(); let update_handler = self.update_handler.clone();
let index = match self.store.get(uuid).await? { let index = match self.store.get(uuid).await? {
@ -172,12 +174,12 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
Ok(spawn_blocking(move || update_handler.handle_update(meta, data, index)).await?) Ok(spawn_blocking(move || update_handler.handle_update(meta, data, index)).await?)
} }
async fn handle_settings(&self, uuid: Uuid) -> IndexResult<Settings<Checked>> { async fn handle_settings(&self, uuid: Uuid) -> Result<Settings<Checked>> {
let index = self let index = self
.store .store
.get(uuid) .get(uuid)
.await? .await?
.ok_or(IndexError::UnexistingIndex)?; .ok_or(IndexActorError::UnexistingIndex)?;
let result = spawn_blocking(move || index.settings()).await??; let result = spawn_blocking(move || index.settings()).await??;
Ok(result) Ok(result)
} }
@ -188,12 +190,12 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
offset: usize, offset: usize,
limit: usize, limit: usize,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> IndexResult<Vec<Document>> { ) -> Result<Vec<Document>> {
let index = self let index = self
.store .store
.get(uuid) .get(uuid)
.await? .await?
.ok_or(IndexError::UnexistingIndex)?; .ok_or(IndexActorError::UnexistingIndex)?;
let result = let result =
spawn_blocking(move || index.retrieve_documents(offset, limit, attributes_to_retrieve)) spawn_blocking(move || index.retrieve_documents(offset, limit, attributes_to_retrieve))
.await??; .await??;
@ -206,12 +208,12 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
uuid: Uuid, uuid: Uuid,
doc_id: String, doc_id: String,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> IndexResult<Document> { ) -> Result<Document> {
let index = self let index = self
.store .store
.get(uuid) .get(uuid)
.await? .await?
.ok_or(IndexError::UnexistingIndex)?; .ok_or(IndexActorError::UnexistingIndex)?;
let result = let result =
spawn_blocking(move || index.retrieve_document(doc_id, attributes_to_retrieve)) spawn_blocking(move || index.retrieve_document(doc_id, attributes_to_retrieve))
@ -220,7 +222,7 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
Ok(result) Ok(result)
} }
async fn handle_delete(&self, uuid: Uuid) -> IndexResult<()> { async fn handle_delete(&self, uuid: Uuid) -> Result<()> {
let index = self.store.delete(uuid).await?; let index = self.store.delete(uuid).await?;
if let Some(index) = index { if let Some(index) = index {
@ -237,13 +239,13 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
Ok(()) Ok(())
} }
async fn handle_get_meta(&self, uuid: Uuid) -> IndexResult<IndexMeta> { async fn handle_get_meta(&self, uuid: Uuid) -> Result<IndexMeta> {
match self.store.get(uuid).await? { match self.store.get(uuid).await? {
Some(index) => { Some(index) => {
let meta = spawn_blocking(move || IndexMeta::new(&index)).await??; let meta = spawn_blocking(move || IndexMeta::new(&index)).await??;
Ok(meta) Ok(meta)
} }
None => Err(IndexError::UnexistingIndex), None => Err(IndexActorError::UnexistingIndex),
} }
} }
@ -251,23 +253,22 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
&self, &self,
uuid: Uuid, uuid: Uuid,
index_settings: IndexSettings, index_settings: IndexSettings,
) -> IndexResult<IndexMeta> { ) -> Result<IndexMeta> {
let index = self let index = self
.store .store
.get(uuid) .get(uuid)
.await? .await?
.ok_or(IndexError::UnexistingIndex)?; .ok_or(IndexActorError::UnexistingIndex)?;
let result = spawn_blocking(move || match index_settings.primary_key { let result = spawn_blocking(move || match index_settings.primary_key {
Some(primary_key) => { Some(primary_key) => {
let mut txn = index.write_txn()?; let mut txn = index.write_txn()?;
if index.primary_key(&txn)?.is_some() { if index.primary_key(&txn)?.is_some() {
return Err(IndexError::ExistingPrimaryKey); return Err(IndexActorError::ExistingPrimaryKey);
} }
let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index); let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index);
builder.set_primary_key(primary_key); builder.set_primary_key(primary_key);
builder.execute(|_, _| ()) builder.execute(|_, _| ())?;
.map_err(|e| IndexError::Internal(e.to_string()))?;
let meta = IndexMeta::new_txn(&index, &txn)?; let meta = IndexMeta::new_txn(&index, &txn)?;
txn.commit()?; txn.commit()?;
Ok(meta) Ok(meta)
@ -282,7 +283,7 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
Ok(result) Ok(result)
} }
async fn handle_snapshot(&self, uuid: Uuid, mut path: PathBuf) -> IndexResult<()> { async fn handle_snapshot(&self, uuid: Uuid, mut path: PathBuf) -> Result<()> {
use tokio::fs::create_dir_all; use tokio::fs::create_dir_all;
path.push("indexes"); path.push("indexes");
@ -294,7 +295,7 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
create_dir_all(&index_path).await?; create_dir_all(&index_path).await?;
index_path.push("data.mdb"); index_path.push("data.mdb");
spawn_blocking(move || -> anyhow::Result<()> { spawn_blocking(move || -> Result<()> {
// Get write txn to wait for ongoing write transaction before snapshot. // Get write txn to wait for ongoing write transaction before snapshot.
let _txn = index.write_txn()?; let _txn = index.write_txn()?;
index index
@ -310,12 +311,12 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
/// Create a `documents.jsonl` and a `settings.json` in `path/uid/` with a dump of all the /// Create a `documents.jsonl` and a `settings.json` in `path/uid/` with a dump of all the
/// documents and all the settings. /// documents and all the settings.
async fn handle_dump(&self, uuid: Uuid, path: PathBuf) -> IndexResult<()> { async fn handle_dump(&self, uuid: Uuid, path: PathBuf) -> Result<()> {
let index = self let index = self
.store .store
.get(uuid) .get(uuid)
.await? .await?
.ok_or(IndexError::UnexistingIndex)?; .ok_or(IndexActorError::UnexistingIndex)?;
let path = path.join(format!("indexes/index-{}/", uuid)); let path = path.join(format!("indexes/index-{}/", uuid));
fs::create_dir_all(&path).await?; fs::create_dir_all(&path).await?;
@ -325,20 +326,19 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
Ok(()) Ok(())
} }
async fn handle_get_stats(&self, uuid: Uuid) -> IndexResult<IndexStats> { async fn handle_get_stats(&self, uuid: Uuid) -> Result<IndexStats> {
let index = self let index = self
.store .store
.get(uuid) .get(uuid)
.await? .await?
.ok_or(IndexError::UnexistingIndex)?; .ok_or(IndexActorError::UnexistingIndex)?;
spawn_blocking(move || { spawn_blocking(move || {
let rtxn = index.read_txn()?; let rtxn = index.read_txn()?;
Ok(IndexStats { Ok(IndexStats {
size: index.size(), size: index.size(),
number_of_documents: index.number_of_documents(&rtxn) number_of_documents: index.number_of_documents(&rtxn)?,
.map_err(|e| IndexError::Internal(e.to_string()))?,
is_indexing: None, is_indexing: None,
fields_distribution: index.fields_distribution(&rtxn)?, fields_distribution: index.fields_distribution(&rtxn)?,
}) })

View File

@ -0,0 +1,48 @@
use meilisearch_error::{Code, ErrorCode};
use crate::{error::MilliError, index::error::IndexError};
pub type Result<T> = std::result::Result<T, IndexActorError>;
#[derive(thiserror::Error, Debug)]
pub enum IndexActorError {
#[error("index error: {0}")]
IndexError(#[from] IndexError),
#[error("index already exists")]
IndexAlreadyExists,
#[error("index doesn't exists")]
UnexistingIndex,
#[error("existing primary key")]
ExistingPrimaryKey,
#[error("internal Index Error: {0}")]
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
#[error("{0}")]
Milli(#[from] milli::Error),
}
macro_rules! internal_error {
($($other:path), *) => {
$(
impl From<$other> for IndexActorError {
fn from(other: $other) -> Self {
Self::Internal(Box::new(other))
}
}
)*
}
}
internal_error!(heed::Error, tokio::task::JoinError, std::io::Error);
impl ErrorCode for IndexActorError {
fn error_code(&self) -> Code {
match self {
IndexActorError::IndexError(e) => e.error_code(),
IndexActorError::IndexAlreadyExists => Code::IndexAlreadyExists,
IndexActorError::UnexistingIndex => Code::IndexNotFound,
IndexActorError::ExistingPrimaryKey => Code::PrimaryKeyAlreadyPresent,
IndexActorError::Internal(_) => Code::Internal,
IndexActorError::Milli(e) => MilliError(e).error_code(),
}
}
}

View File

@ -12,7 +12,8 @@ use crate::{
index_controller::{Failed, Processed}, index_controller::{Failed, Processed},
}; };
use super::{IndexActor, IndexActorHandle, IndexMeta, IndexMsg, IndexResult, MapIndexStore}; use super::error::Result;
use super::{IndexActor, IndexActorHandle, IndexMeta, IndexMsg, MapIndexStore};
#[derive(Clone)] #[derive(Clone)]
pub struct IndexActorHandleImpl { pub struct IndexActorHandleImpl {
@ -21,11 +22,7 @@ pub struct IndexActorHandleImpl {
#[async_trait::async_trait] #[async_trait::async_trait]
impl IndexActorHandle for IndexActorHandleImpl { impl IndexActorHandle for IndexActorHandleImpl {
async fn create_index( async fn create_index(&self, uuid: Uuid, primary_key: Option<String>) -> Result<IndexMeta> {
&self,
uuid: Uuid,
primary_key: Option<String>,
) -> IndexResult<IndexMeta> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::CreateIndex { let msg = IndexMsg::CreateIndex {
ret, ret,
@ -41,7 +38,7 @@ impl IndexActorHandle for IndexActorHandleImpl {
uuid: Uuid, uuid: Uuid,
meta: Processing, meta: Processing,
data: Option<std::fs::File>, data: Option<std::fs::File>,
) -> anyhow::Result<Result<Processed, Failed>> { ) -> Result<std::result::Result<Processed, Failed>> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::Update { let msg = IndexMsg::Update {
ret, ret,
@ -53,14 +50,14 @@ impl IndexActorHandle for IndexActorHandleImpl {
Ok(receiver.await.expect("IndexActor has been killed")?) Ok(receiver.await.expect("IndexActor has been killed")?)
} }
async fn search(&self, uuid: Uuid, query: SearchQuery) -> IndexResult<SearchResult> { async fn search(&self, uuid: Uuid, query: SearchQuery) -> Result<SearchResult> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::Search { uuid, query, ret }; let msg = IndexMsg::Search { uuid, query, ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
Ok(receiver.await.expect("IndexActor has been killed")?) Ok(receiver.await.expect("IndexActor has been killed")?)
} }
async fn settings(&self, uuid: Uuid) -> IndexResult<Settings<Checked>> { async fn settings(&self, uuid: Uuid) -> Result<Settings<Checked>> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::Settings { uuid, ret }; let msg = IndexMsg::Settings { uuid, ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
@ -73,7 +70,7 @@ impl IndexActorHandle for IndexActorHandleImpl {
offset: usize, offset: usize,
limit: usize, limit: usize,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> IndexResult<Vec<Document>> { ) -> Result<Vec<Document>> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::Documents { let msg = IndexMsg::Documents {
uuid, uuid,
@ -91,7 +88,7 @@ impl IndexActorHandle for IndexActorHandleImpl {
uuid: Uuid, uuid: Uuid,
doc_id: String, doc_id: String,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> IndexResult<Document> { ) -> Result<Document> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::Document { let msg = IndexMsg::Document {
uuid, uuid,
@ -103,25 +100,21 @@ impl IndexActorHandle for IndexActorHandleImpl {
Ok(receiver.await.expect("IndexActor has been killed")?) Ok(receiver.await.expect("IndexActor has been killed")?)
} }
async fn delete(&self, uuid: Uuid) -> IndexResult<()> { async fn delete(&self, uuid: Uuid) -> Result<()> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::Delete { uuid, ret }; let msg = IndexMsg::Delete { uuid, ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
Ok(receiver.await.expect("IndexActor has been killed")?) Ok(receiver.await.expect("IndexActor has been killed")?)
} }
async fn get_index_meta(&self, uuid: Uuid) -> IndexResult<IndexMeta> { async fn get_index_meta(&self, uuid: Uuid) -> Result<IndexMeta> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::GetMeta { uuid, ret }; let msg = IndexMsg::GetMeta { uuid, ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
Ok(receiver.await.expect("IndexActor has been killed")?) Ok(receiver.await.expect("IndexActor has been killed")?)
} }
async fn update_index( async fn update_index(&self, uuid: Uuid, index_settings: IndexSettings) -> Result<IndexMeta> {
&self,
uuid: Uuid,
index_settings: IndexSettings,
) -> IndexResult<IndexMeta> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::UpdateIndex { let msg = IndexMsg::UpdateIndex {
uuid, uuid,
@ -132,21 +125,21 @@ impl IndexActorHandle for IndexActorHandleImpl {
Ok(receiver.await.expect("IndexActor has been killed")?) Ok(receiver.await.expect("IndexActor has been killed")?)
} }
async fn snapshot(&self, uuid: Uuid, path: PathBuf) -> IndexResult<()> { async fn snapshot(&self, uuid: Uuid, path: PathBuf) -> Result<()> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::Snapshot { uuid, path, ret }; let msg = IndexMsg::Snapshot { uuid, path, ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
Ok(receiver.await.expect("IndexActor has been killed")?) Ok(receiver.await.expect("IndexActor has been killed")?)
} }
async fn dump(&self, uuid: Uuid, path: PathBuf) -> IndexResult<()> { async fn dump(&self, uuid: Uuid, path: PathBuf) -> Result<()> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::Dump { uuid, path, ret }; let msg = IndexMsg::Dump { uuid, path, ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
Ok(receiver.await.expect("IndexActor has been killed")?) Ok(receiver.await.expect("IndexActor has been killed")?)
} }
async fn get_index_stats(&self, uuid: Uuid) -> IndexResult<IndexStats> { async fn get_index_stats(&self, uuid: Uuid) -> Result<IndexStats> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = IndexMsg::GetStats { uuid, ret }; let msg = IndexMsg::GetStats { uuid, ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;

View File

@ -3,10 +3,11 @@ use std::path::PathBuf;
use tokio::sync::oneshot; use tokio::sync::oneshot;
use uuid::Uuid; use uuid::Uuid;
use super::error::Result as IndexResult;
use crate::index::{Checked, Document, SearchQuery, SearchResult, Settings}; use crate::index::{Checked, Document, SearchQuery, SearchResult, Settings};
use crate::index_controller::{Failed, IndexStats, Processed, Processing}; use crate::index_controller::{Failed, IndexStats, Processed, Processing};
use super::{IndexMeta, IndexResult, IndexSettings}; use super::{IndexMeta, IndexSettings};
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
pub enum IndexMsg { pub enum IndexMsg {
@ -24,7 +25,7 @@ pub enum IndexMsg {
Search { Search {
uuid: Uuid, uuid: Uuid,
query: SearchQuery, query: SearchQuery,
ret: oneshot::Sender<anyhow::Result<SearchResult>>, ret: oneshot::Sender<IndexResult<SearchResult>>,
}, },
Settings { Settings {
uuid: Uuid, uuid: Uuid,

View File

@ -5,7 +5,6 @@ use chrono::{DateTime, Utc};
#[cfg(test)] #[cfg(test)]
use mockall::automock; use mockall::automock;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use thiserror::Error;
use uuid::Uuid; use uuid::Uuid;
use actor::IndexActor; use actor::IndexActor;
@ -16,16 +15,16 @@ use store::{IndexStore, MapIndexStore};
use crate::index::{Checked, Document, Index, SearchQuery, SearchResult, Settings}; use crate::index::{Checked, Document, Index, SearchQuery, SearchResult, Settings};
use crate::index_controller::{Failed, IndexStats, Processed, Processing}; use crate::index_controller::{Failed, IndexStats, Processed, Processing};
use error::Result;
use super::IndexSettings; use super::IndexSettings;
mod actor; mod actor;
pub mod error;
mod handle_impl; mod handle_impl;
mod message; mod message;
mod store; mod store;
pub type IndexResult<T> = std::result::Result<T, IndexError>;
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct IndexMeta { pub struct IndexMeta {
@ -35,18 +34,14 @@ pub struct IndexMeta {
} }
impl IndexMeta { impl IndexMeta {
fn new(index: &Index) -> IndexResult<Self> { fn new(index: &Index) -> Result<Self> {
let txn = index.read_txn()?; let txn = index.read_txn()?;
Self::new_txn(index, &txn) Self::new_txn(index, &txn)
} }
fn new_txn(index: &Index, txn: &heed::RoTxn) -> IndexResult<Self> { fn new_txn(index: &Index, txn: &heed::RoTxn) -> Result<Self> {
let created_at = index let created_at = index.created_at(&txn)?;
.created_at(&txn) let updated_at = index.updated_at(&txn)?;
.map_err(|e| IndexError::Internal(e.to_string()))?;
let updated_at = index
.updated_at(&txn)
.map_err(|e| IndexError::Internal(e.to_string()))?;
let primary_key = index.primary_key(&txn)?.map(String::from); let primary_key = index.primary_key(&txn)?.map(String::from);
Ok(Self { Ok(Self {
created_at, created_at,
@ -56,50 +51,18 @@ impl IndexMeta {
} }
} }
#[derive(Error, Debug)]
pub enum IndexError {
#[error("index already exists")]
IndexAlreadyExists,
#[error("Index doesn't exists")]
UnexistingIndex,
#[error("Existing primary key")]
ExistingPrimaryKey,
#[error("Internal Index Error: {0}")]
Internal(String),
}
macro_rules! internal_error {
($($other:path), *) => {
$(
impl From<$other> for IndexError {
fn from(other: $other) -> Self {
Self::Internal(other.to_string())
}
}
)*
}
}
internal_error!(
anyhow::Error,
heed::Error,
tokio::task::JoinError,
std::io::Error
);
#[async_trait::async_trait] #[async_trait::async_trait]
#[cfg_attr(test, automock)] #[cfg_attr(test, automock)]
pub trait IndexActorHandle { pub trait IndexActorHandle {
async fn create_index(&self, uuid: Uuid, primary_key: Option<String>) async fn create_index(&self, uuid: Uuid, primary_key: Option<String>) -> Result<IndexMeta>;
-> IndexResult<IndexMeta>;
async fn update( async fn update(
&self, &self,
uuid: Uuid, uuid: Uuid,
meta: Processing, meta: Processing,
data: Option<File>, data: Option<File>,
) -> anyhow::Result<Result<Processed, Failed>>; ) -> Result<std::result::Result<Processed, Failed>>;
async fn search(&self, uuid: Uuid, query: SearchQuery) -> IndexResult<SearchResult>; async fn search(&self, uuid: Uuid, query: SearchQuery) -> Result<SearchResult>;
async fn settings(&self, uuid: Uuid) -> IndexResult<Settings<Checked>>; async fn settings(&self, uuid: Uuid) -> Result<Settings<Checked>>;
async fn documents( async fn documents(
&self, &self,
@ -107,23 +70,19 @@ pub trait IndexActorHandle {
offset: usize, offset: usize,
limit: usize, limit: usize,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> IndexResult<Vec<Document>>; ) -> Result<Vec<Document>>;
async fn document( async fn document(
&self, &self,
uuid: Uuid, uuid: Uuid,
doc_id: String, doc_id: String,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> IndexResult<Document>; ) -> Result<Document>;
async fn delete(&self, uuid: Uuid) -> IndexResult<()>; async fn delete(&self, uuid: Uuid) -> Result<()>;
async fn get_index_meta(&self, uuid: Uuid) -> IndexResult<IndexMeta>; async fn get_index_meta(&self, uuid: Uuid) -> Result<IndexMeta>;
async fn update_index( async fn update_index(&self, uuid: Uuid, index_settings: IndexSettings) -> Result<IndexMeta>;
&self, async fn snapshot(&self, uuid: Uuid, path: PathBuf) -> Result<()>;
uuid: Uuid, async fn dump(&self, uuid: Uuid, path: PathBuf) -> Result<()>;
index_settings: IndexSettings, async fn get_index_stats(&self, uuid: Uuid) -> Result<IndexStats>;
) -> IndexResult<IndexMeta>;
async fn snapshot(&self, uuid: Uuid, path: PathBuf) -> IndexResult<()>;
async fn dump(&self, uuid: Uuid, path: PathBuf) -> IndexResult<()>;
async fn get_index_stats(&self, uuid: Uuid) -> IndexResult<IndexStats>;
} }
#[cfg(test)] #[cfg(test)]
@ -135,11 +94,7 @@ mod test {
#[async_trait::async_trait] #[async_trait::async_trait]
/// Useful for passing around an `Arc<MockIndexActorHandle>` in tests. /// Useful for passing around an `Arc<MockIndexActorHandle>` in tests.
impl IndexActorHandle for Arc<MockIndexActorHandle> { impl IndexActorHandle for Arc<MockIndexActorHandle> {
async fn create_index( async fn create_index(&self, uuid: Uuid, primary_key: Option<String>) -> Result<IndexMeta> {
&self,
uuid: Uuid,
primary_key: Option<String>,
) -> IndexResult<IndexMeta> {
self.as_ref().create_index(uuid, primary_key).await self.as_ref().create_index(uuid, primary_key).await
} }
@ -148,15 +103,15 @@ mod test {
uuid: Uuid, uuid: Uuid,
meta: Processing, meta: Processing,
data: Option<std::fs::File>, data: Option<std::fs::File>,
) -> anyhow::Result<Result<Processed, Failed>> { ) -> Result<std::result::Result<Processed, Failed>> {
self.as_ref().update(uuid, meta, data).await self.as_ref().update(uuid, meta, data).await
} }
async fn search(&self, uuid: Uuid, query: SearchQuery) -> IndexResult<SearchResult> { async fn search(&self, uuid: Uuid, query: SearchQuery) -> Result<SearchResult> {
self.as_ref().search(uuid, query).await self.as_ref().search(uuid, query).await
} }
async fn settings(&self, uuid: Uuid) -> IndexResult<Settings<Checked>> { async fn settings(&self, uuid: Uuid) -> Result<Settings<Checked>> {
self.as_ref().settings(uuid).await self.as_ref().settings(uuid).await
} }
@ -166,7 +121,7 @@ mod test {
offset: usize, offset: usize,
limit: usize, limit: usize,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> IndexResult<Vec<Document>> { ) -> Result<Vec<Document>> {
self.as_ref() self.as_ref()
.documents(uuid, offset, limit, attributes_to_retrieve) .documents(uuid, offset, limit, attributes_to_retrieve)
.await .await
@ -177,17 +132,17 @@ mod test {
uuid: Uuid, uuid: Uuid,
doc_id: String, doc_id: String,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> IndexResult<Document> { ) -> Result<Document> {
self.as_ref() self.as_ref()
.document(uuid, doc_id, attributes_to_retrieve) .document(uuid, doc_id, attributes_to_retrieve)
.await .await
} }
async fn delete(&self, uuid: Uuid) -> IndexResult<()> { async fn delete(&self, uuid: Uuid) -> Result<()> {
self.as_ref().delete(uuid).await self.as_ref().delete(uuid).await
} }
async fn get_index_meta(&self, uuid: Uuid) -> IndexResult<IndexMeta> { async fn get_index_meta(&self, uuid: Uuid) -> Result<IndexMeta> {
self.as_ref().get_index_meta(uuid).await self.as_ref().get_index_meta(uuid).await
} }
@ -195,19 +150,19 @@ mod test {
&self, &self,
uuid: Uuid, uuid: Uuid,
index_settings: IndexSettings, index_settings: IndexSettings,
) -> IndexResult<IndexMeta> { ) -> Result<IndexMeta> {
self.as_ref().update_index(uuid, index_settings).await self.as_ref().update_index(uuid, index_settings).await
} }
async fn snapshot(&self, uuid: Uuid, path: PathBuf) -> IndexResult<()> { async fn snapshot(&self, uuid: Uuid, path: PathBuf) -> Result<()> {
self.as_ref().snapshot(uuid, path).await self.as_ref().snapshot(uuid, path).await
} }
async fn dump(&self, uuid: Uuid, path: PathBuf) -> IndexResult<()> { async fn dump(&self, uuid: Uuid, path: PathBuf) -> Result<()> {
self.as_ref().dump(uuid, path).await self.as_ref().dump(uuid, path).await
} }
async fn get_index_stats(&self, uuid: Uuid) -> IndexResult<IndexStats> { async fn get_index_stats(&self, uuid: Uuid) -> Result<IndexStats> {
self.as_ref().get_index_stats(uuid).await self.as_ref().get_index_stats(uuid).await
} }
} }

View File

@ -8,16 +8,16 @@ use tokio::sync::RwLock;
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use uuid::Uuid; use uuid::Uuid;
use super::{IndexError, IndexResult}; use super::error::{IndexActorError, Result};
use crate::index::Index; use crate::index::Index;
type AsyncMap<K, V> = Arc<RwLock<HashMap<K, V>>>; type AsyncMap<K, V> = Arc<RwLock<HashMap<K, V>>>;
#[async_trait::async_trait] #[async_trait::async_trait]
pub trait IndexStore { pub trait IndexStore {
async fn create(&self, uuid: Uuid, primary_key: Option<String>) -> IndexResult<Index>; async fn create(&self, uuid: Uuid, primary_key: Option<String>) -> Result<Index>;
async fn get(&self, uuid: Uuid) -> IndexResult<Option<Index>>; async fn get(&self, uuid: Uuid) -> Result<Option<Index>>;
async fn delete(&self, uuid: Uuid) -> IndexResult<Option<Index>>; async fn delete(&self, uuid: Uuid) -> Result<Option<Index>>;
} }
pub struct MapIndexStore { pub struct MapIndexStore {
@ -40,7 +40,7 @@ impl MapIndexStore {
#[async_trait::async_trait] #[async_trait::async_trait]
impl IndexStore for MapIndexStore { impl IndexStore for MapIndexStore {
async fn create(&self, uuid: Uuid, primary_key: Option<String>) -> IndexResult<Index> { async fn create(&self, uuid: Uuid, primary_key: Option<String>) -> Result<Index> {
// We need to keep the lock until we are sure the db file has been opened correclty, to // We need to keep the lock until we are sure the db file has been opened correclty, to
// ensure that another db is not created at the same time. // ensure that another db is not created at the same time.
let mut lock = self.index_store.write().await; let mut lock = self.index_store.write().await;
@ -50,19 +50,18 @@ impl IndexStore for MapIndexStore {
} }
let path = self.path.join(format!("index-{}", uuid)); let path = self.path.join(format!("index-{}", uuid));
if path.exists() { if path.exists() {
return Err(IndexError::IndexAlreadyExists); return Err(IndexActorError::IndexAlreadyExists);
} }
let index_size = self.index_size; let index_size = self.index_size;
let index = spawn_blocking(move || -> IndexResult<Index> { let index = spawn_blocking(move || -> Result<Index> {
let index = Index::open(path, index_size)?; let index = Index::open(path, index_size)?;
if let Some(primary_key) = primary_key { if let Some(primary_key) = primary_key {
let mut txn = index.write_txn()?; let mut txn = index.write_txn()?;
let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index); let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index);
builder.set_primary_key(primary_key); builder.set_primary_key(primary_key);
builder.execute(|_, _| ()) builder.execute(|_, _| ())?;
.map_err(|e| IndexError::Internal(e.to_string()))?;
txn.commit()?; txn.commit()?;
} }
@ -75,7 +74,7 @@ impl IndexStore for MapIndexStore {
Ok(index) Ok(index)
} }
async fn get(&self, uuid: Uuid) -> IndexResult<Option<Index>> { async fn get(&self, uuid: Uuid) -> Result<Option<Index>> {
let guard = self.index_store.read().await; let guard = self.index_store.read().await;
match guard.get(&uuid) { match guard.get(&uuid) {
Some(index) => Ok(Some(index.clone())), Some(index) => Ok(Some(index.clone())),
@ -95,7 +94,7 @@ impl IndexStore for MapIndexStore {
} }
} }
async fn delete(&self, uuid: Uuid) -> IndexResult<Option<Index>> { async fn delete(&self, uuid: Uuid) -> Result<Option<Index>> {
let db_path = self.path.join(format!("index-{}", uuid)); let db_path = self.path.join(format!("index-{}", uuid));
fs::remove_dir_all(db_path).await?; fs::remove_dir_all(db_path).await?;
let index = self.index_store.write().await.remove(&uuid); let index = self.index_store.write().await.remove(&uuid);

View File

@ -4,7 +4,6 @@ use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use actix_web::web::{Bytes, Payload}; use actix_web::web::{Bytes, Payload};
use anyhow::bail;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use futures::stream::StreamExt; use futures::stream::StreamExt;
use log::info; use log::info;
@ -20,15 +19,18 @@ use index_actor::IndexActorHandle;
use snapshot::{load_snapshot, SnapshotService}; use snapshot::{load_snapshot, SnapshotService};
use update_actor::UpdateActorHandle; use update_actor::UpdateActorHandle;
pub use updates::*; pub use updates::*;
use uuid_resolver::{UuidResolverError, UuidResolverHandle}; use uuid_resolver::{error::UuidResolverError, UuidResolverHandle};
use crate::index::{Checked, Document, SearchQuery, SearchResult, Settings}; use crate::index::{Checked, Document, SearchQuery, SearchResult, Settings};
use crate::option::Opt; use crate::option::Opt;
use error::Result;
use self::dump_actor::load_dump; use self::dump_actor::load_dump;
use self::error::IndexControllerError;
mod dump_actor; mod dump_actor;
mod index_actor; pub mod error;
pub mod index_actor;
mod snapshot; mod snapshot;
mod update_actor; mod update_actor;
mod updates; mod updates;
@ -151,7 +153,7 @@ impl IndexController {
format: milli::update::UpdateFormat, format: milli::update::UpdateFormat,
payload: Payload, payload: Payload,
primary_key: Option<String>, primary_key: Option<String>,
) -> anyhow::Result<UpdateStatus> { ) -> Result<UpdateStatus> {
let perform_update = |uuid| async move { let perform_update = |uuid| async move {
let meta = UpdateMeta::DocumentsAddition { let meta = UpdateMeta::DocumentsAddition {
method, method,
@ -189,7 +191,7 @@ impl IndexController {
} }
} }
pub async fn clear_documents(&self, uid: String) -> anyhow::Result<UpdateStatus> { pub async fn clear_documents(&self, uid: String) -> Result<UpdateStatus> {
let uuid = self.uuid_resolver.get(uid).await?; let uuid = self.uuid_resolver.get(uid).await?;
let meta = UpdateMeta::ClearDocuments; let meta = UpdateMeta::ClearDocuments;
let (_, receiver) = mpsc::channel(1); let (_, receiver) = mpsc::channel(1);
@ -201,7 +203,7 @@ impl IndexController {
&self, &self,
uid: String, uid: String,
documents: Vec<String>, documents: Vec<String>,
) -> anyhow::Result<UpdateStatus> { ) -> Result<UpdateStatus> {
let uuid = self.uuid_resolver.get(uid).await?; let uuid = self.uuid_resolver.get(uid).await?;
let meta = UpdateMeta::DeleteDocuments { ids: documents }; let meta = UpdateMeta::DeleteDocuments { ids: documents };
let (_, receiver) = mpsc::channel(1); let (_, receiver) = mpsc::channel(1);
@ -214,7 +216,7 @@ impl IndexController {
uid: String, uid: String,
settings: Settings<Checked>, settings: Settings<Checked>,
create: bool, create: bool,
) -> anyhow::Result<UpdateStatus> { ) -> Result<UpdateStatus> {
let perform_udpate = |uuid| async move { let perform_udpate = |uuid| async move {
let meta = UpdateMeta::Settings(settings.into_unchecked()); let meta = UpdateMeta::Settings(settings.into_unchecked());
// Nothing so send, drop the sender right away, as not to block the update actor. // Nothing so send, drop the sender right away, as not to block the update actor.
@ -236,12 +238,9 @@ impl IndexController {
} }
} }
pub async fn create_index( pub async fn create_index(&self, index_settings: IndexSettings) -> Result<IndexMetadata> {
&self,
index_settings: IndexSettings,
) -> anyhow::Result<IndexMetadata> {
let IndexSettings { uid, primary_key } = index_settings; let IndexSettings { uid, primary_key } = index_settings;
let uid = uid.ok_or_else(|| anyhow::anyhow!("Can't create an index without a uid."))?; let uid = uid.ok_or(IndexControllerError::MissingUid)?;
let uuid = Uuid::new_v4(); let uuid = Uuid::new_v4();
let meta = self.index_handle.create_index(uuid, primary_key).await?; let meta = self.index_handle.create_index(uuid, primary_key).await?;
self.uuid_resolver.insert(uid.clone(), uuid).await?; self.uuid_resolver.insert(uid.clone(), uuid).await?;
@ -255,26 +254,26 @@ impl IndexController {
Ok(meta) Ok(meta)
} }
pub async fn delete_index(&self, uid: String) -> anyhow::Result<()> { pub async fn delete_index(&self, uid: String) -> Result<()> {
let uuid = self.uuid_resolver.delete(uid).await?; let uuid = self.uuid_resolver.delete(uid).await?;
self.update_handle.delete(uuid).await?; self.update_handle.delete(uuid).await?;
self.index_handle.delete(uuid).await?; self.index_handle.delete(uuid).await?;
Ok(()) Ok(())
} }
pub async fn update_status(&self, uid: String, id: u64) -> anyhow::Result<UpdateStatus> { pub async fn update_status(&self, uid: String, id: u64) -> Result<UpdateStatus> {
let uuid = self.uuid_resolver.get(uid).await?; let uuid = self.uuid_resolver.get(uid).await?;
let result = self.update_handle.update_status(uuid, id).await?; let result = self.update_handle.update_status(uuid, id).await?;
Ok(result) Ok(result)
} }
pub async fn all_update_status(&self, uid: String) -> anyhow::Result<Vec<UpdateStatus>> { pub async fn all_update_status(&self, uid: String) -> Result<Vec<UpdateStatus>> {
let uuid = self.uuid_resolver.get(uid).await?; let uuid = self.uuid_resolver.get(uid).await?;
let result = self.update_handle.get_all_updates_status(uuid).await?; let result = self.update_handle.get_all_updates_status(uuid).await?;
Ok(result) Ok(result)
} }
pub async fn list_indexes(&self) -> anyhow::Result<Vec<IndexMetadata>> { pub async fn list_indexes(&self) -> Result<Vec<IndexMetadata>> {
let uuids = self.uuid_resolver.list().await?; let uuids = self.uuid_resolver.list().await?;
let mut ret = Vec::new(); let mut ret = Vec::new();
@ -293,7 +292,7 @@ impl IndexController {
Ok(ret) Ok(ret)
} }
pub async fn settings(&self, uid: String) -> anyhow::Result<Settings<Checked>> { pub async fn settings(&self, uid: String) -> Result<Settings<Checked>> {
let uuid = self.uuid_resolver.get(uid.clone()).await?; let uuid = self.uuid_resolver.get(uid.clone()).await?;
let settings = self.index_handle.settings(uuid).await?; let settings = self.index_handle.settings(uuid).await?;
Ok(settings) Ok(settings)
@ -305,7 +304,7 @@ impl IndexController {
offset: usize, offset: usize,
limit: usize, limit: usize,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> anyhow::Result<Vec<Document>> { ) -> Result<Vec<Document>> {
let uuid = self.uuid_resolver.get(uid.clone()).await?; let uuid = self.uuid_resolver.get(uid.clone()).await?;
let documents = self let documents = self
.index_handle .index_handle
@ -319,7 +318,7 @@ impl IndexController {
uid: String, uid: String,
doc_id: String, doc_id: String,
attributes_to_retrieve: Option<Vec<String>>, attributes_to_retrieve: Option<Vec<String>>,
) -> anyhow::Result<Document> { ) -> Result<Document> {
let uuid = self.uuid_resolver.get(uid.clone()).await?; let uuid = self.uuid_resolver.get(uid.clone()).await?;
let document = self let document = self
.index_handle .index_handle
@ -331,10 +330,10 @@ impl IndexController {
pub async fn update_index( pub async fn update_index(
&self, &self,
uid: String, uid: String,
index_settings: IndexSettings, mut index_settings: IndexSettings,
) -> anyhow::Result<IndexMetadata> { ) -> Result<IndexMetadata> {
if index_settings.uid.is_some() { if index_settings.uid.is_some() {
bail!("Can't change the index uid.") index_settings.uid.take();
} }
let uuid = self.uuid_resolver.get(uid.clone()).await?; let uuid = self.uuid_resolver.get(uid.clone()).await?;
@ -348,13 +347,13 @@ impl IndexController {
Ok(meta) Ok(meta)
} }
pub async fn search(&self, uid: String, query: SearchQuery) -> anyhow::Result<SearchResult> { pub async fn search(&self, uid: String, query: SearchQuery) -> Result<SearchResult> {
let uuid = self.uuid_resolver.get(uid).await?; let uuid = self.uuid_resolver.get(uid).await?;
let result = self.index_handle.search(uuid, query).await?; let result = self.index_handle.search(uuid, query).await?;
Ok(result) Ok(result)
} }
pub async fn get_index(&self, uid: String) -> anyhow::Result<IndexMetadata> { pub async fn get_index(&self, uid: String) -> Result<IndexMetadata> {
let uuid = self.uuid_resolver.get(uid.clone()).await?; let uuid = self.uuid_resolver.get(uid.clone()).await?;
let meta = self.index_handle.get_index_meta(uuid).await?; let meta = self.index_handle.get_index_meta(uuid).await?;
let meta = IndexMetadata { let meta = IndexMetadata {
@ -366,11 +365,11 @@ impl IndexController {
Ok(meta) Ok(meta)
} }
pub async fn get_uuids_size(&self) -> anyhow::Result<u64> { pub async fn get_uuids_size(&self) -> Result<u64> {
Ok(self.uuid_resolver.get_size().await?) Ok(self.uuid_resolver.get_size().await?)
} }
pub async fn get_index_stats(&self, uid: String) -> anyhow::Result<IndexStats> { pub async fn get_index_stats(&self, uid: String) -> Result<IndexStats> {
let uuid = self.uuid_resolver.get(uid).await?; let uuid = self.uuid_resolver.get(uid).await?;
let update_infos = self.update_handle.get_info().await?; let update_infos = self.update_handle.get_info().await?;
let mut stats = self.index_handle.get_index_stats(uuid).await?; let mut stats = self.index_handle.get_index_stats(uuid).await?;
@ -379,7 +378,7 @@ impl IndexController {
Ok(stats) Ok(stats)
} }
pub async fn get_all_stats(&self) -> anyhow::Result<Stats> { pub async fn get_all_stats(&self) -> Result<Stats> {
let update_infos = self.update_handle.get_info().await?; let update_infos = self.update_handle.get_info().await?;
let mut database_size = self.get_uuids_size().await? + update_infos.size; let mut database_size = self.get_uuids_size().await? + update_infos.size;
let mut last_update: Option<DateTime<_>> = None; let mut last_update: Option<DateTime<_>> = None;
@ -405,11 +404,11 @@ impl IndexController {
}) })
} }
pub async fn create_dump(&self) -> anyhow::Result<DumpInfo> { pub async fn create_dump(&self) -> Result<DumpInfo> {
Ok(self.dump_handle.create_dump().await?) Ok(self.dump_handle.create_dump().await?)
} }
pub async fn dump_info(&self, uid: String) -> anyhow::Result<DumpInfo> { pub async fn dump_info(&self, uid: String) -> Result<DumpInfo> {
Ok(self.dump_handle.dump_info(uid).await?) Ok(self.dump_handle.dump_info(uid).await?)
} }
} }

View File

@ -142,9 +142,11 @@ mod test {
use super::*; use super::*;
use crate::index_controller::index_actor::MockIndexActorHandle; use crate::index_controller::index_actor::MockIndexActorHandle;
use crate::index_controller::update_actor::{ use crate::index_controller::update_actor::{
MockUpdateActorHandle, UpdateActorHandleImpl, UpdateError, error::UpdateActorError, MockUpdateActorHandle, UpdateActorHandleImpl,
};
use crate::index_controller::uuid_resolver::{
error::UuidResolverError, MockUuidResolverHandle,
}; };
use crate::index_controller::uuid_resolver::{MockUuidResolverHandle, UuidResolverError};
#[actix_rt::test] #[actix_rt::test]
async fn test_normal() { async fn test_normal() {
@ -224,7 +226,7 @@ mod test {
update_handle update_handle
.expect_snapshot() .expect_snapshot()
// abitrary error // abitrary error
.returning(|_, _| Box::pin(err(UpdateError::UnexistingUpdate(0)))); .returning(|_, _| Box::pin(err(UpdateActorError::UnexistingUpdate(0))));
let snapshot_path = tempfile::tempdir_in(".").unwrap(); let snapshot_path = tempfile::tempdir_in(".").unwrap();
let snapshot_service = SnapshotService::new( let snapshot_service = SnapshotService::new(

View File

@ -13,7 +13,8 @@ use tokio::io::AsyncWriteExt;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use uuid::Uuid; use uuid::Uuid;
use super::{PayloadData, Result, UpdateError, UpdateMsg, UpdateStore, UpdateStoreInfo}; use super::error::{Result, UpdateActorError};
use super::{PayloadData, UpdateMsg, UpdateStore, UpdateStoreInfo};
use crate::index_controller::index_actor::IndexActorHandle; use crate::index_controller::index_actor::IndexActorHandle;
use crate::index_controller::{UpdateMeta, UpdateStatus}; use crate::index_controller::{UpdateMeta, UpdateStatus};
@ -172,7 +173,8 @@ where
if copy(&mut checker, &mut sink()).is_err() || checker.finish().is_err() { if copy(&mut checker, &mut sink()).is_err() || checker.finish().is_err() {
// The json file is invalid, we use Serde to get a nice error message: // The json file is invalid, we use Serde to get a nice error message:
file.seek(SeekFrom::Start(0))?; file.seek(SeekFrom::Start(0))?;
let _: serde_json::Value = serde_json::from_reader(file)?; let _: serde_json::Value = serde_json::from_reader(file)
.map_err(|e| UpdateActorError::InvalidPayload(Box::new(e)))?;
} }
Some(uuid) Some(uuid)
} else { } else {
@ -200,9 +202,9 @@ where
async fn handle_get_update(&self, uuid: Uuid, id: u64) -> Result<UpdateStatus> { async fn handle_get_update(&self, uuid: Uuid, id: u64) -> Result<UpdateStatus> {
let store = self.store.clone(); let store = self.store.clone();
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
let result = store let result = store
.meta(uuid, id)? .meta(uuid, id)?
.ok_or(UpdateError::UnexistingUpdate(id))?; .ok_or(UpdateActorError::UnexistingUpdate(id))?;
Ok(result) Ok(result)
}) })
.await? .await?
@ -230,7 +232,7 @@ where
let index_handle = self.index_handle.clone(); let index_handle = self.index_handle.clone();
let update_store = self.store.clone(); let update_store = self.store.clone();
tokio::task::spawn_blocking(move || -> anyhow::Result<()> { tokio::task::spawn_blocking(move || -> Result<()> {
update_store.dump(&uuids, path.to_path_buf(), index_handle)?; update_store.dump(&uuids, path.to_path_buf(), index_handle)?;
Ok(()) Ok(())
}) })
@ -241,7 +243,7 @@ where
async fn handle_get_info(&self) -> Result<UpdateStoreInfo> { async fn handle_get_info(&self) -> Result<UpdateStoreInfo> {
let update_store = self.store.clone(); let update_store = self.store.clone();
let info = tokio::task::spawn_blocking(move || -> anyhow::Result<UpdateStoreInfo> { let info = tokio::task::spawn_blocking(move || -> Result<UpdateStoreInfo> {
let info = update_store.get_info()?; let info = update_store.get_info()?;
Ok(info) Ok(info)
}) })

View File

@ -0,0 +1,56 @@
use std::error::Error;
use meilisearch_error::{Code, ErrorCode};
use crate::index_controller::index_actor::error::IndexActorError;
pub type Result<T> = std::result::Result<T, UpdateActorError>;
#[derive(Debug, thiserror::Error)]
#[allow(clippy::large_enum_variant)]
pub enum UpdateActorError {
#[error("update {0} doesn't exist.")]
UnexistingUpdate(u64),
#[error("internal error processing update: {0}")]
Internal(Box<dyn Error + Send + Sync + 'static>),
#[error("error with index: {0}")]
IndexActor(#[from] IndexActorError),
#[error(
"update store was shut down due to a fatal error, please check your logs for more info."
)]
FatalUpdateStoreError,
#[error("invalid payload: {0}")]
InvalidPayload(Box<dyn Error + Send + Sync + 'static>),
}
impl<T> From<tokio::sync::mpsc::error::SendError<T>> for UpdateActorError {
fn from(_: tokio::sync::mpsc::error::SendError<T>) -> Self {
Self::FatalUpdateStoreError
}
}
impl From<tokio::sync::oneshot::error::RecvError> for UpdateActorError {
fn from(_: tokio::sync::oneshot::error::RecvError) -> Self {
Self::FatalUpdateStoreError
}
}
internal_error!(
UpdateActorError: heed::Error,
std::io::Error,
serde_json::Error,
actix_http::error::PayloadError,
tokio::task::JoinError
);
impl ErrorCode for UpdateActorError {
fn error_code(&self) -> Code {
match self {
UpdateActorError::UnexistingUpdate(_) => Code::NotFound,
UpdateActorError::Internal(_) => Code::Internal,
UpdateActorError::IndexActor(e) => e.error_code(),
UpdateActorError::FatalUpdateStoreError => Code::Internal,
UpdateActorError::InvalidPayload(_) => Code::BadRequest,
}
}
}

View File

@ -6,10 +6,8 @@ use uuid::Uuid;
use crate::index_controller::{IndexActorHandle, UpdateStatus}; use crate::index_controller::{IndexActorHandle, UpdateStatus};
use super::{ use super::error::Result;
PayloadData, Result, UpdateActor, UpdateActorHandle, UpdateError, UpdateMeta, UpdateMsg, use super::{PayloadData, UpdateActor, UpdateActorHandle, UpdateMeta, UpdateMsg, UpdateStoreInfo};
UpdateStoreInfo,
};
#[derive(Clone)] #[derive(Clone)]
pub struct UpdateActorHandleImpl<D> { pub struct UpdateActorHandleImpl<D> {
@ -48,72 +46,42 @@ where
async fn get_all_updates_status(&self, uuid: Uuid) -> Result<Vec<UpdateStatus>> { async fn get_all_updates_status(&self, uuid: Uuid) -> Result<Vec<UpdateStatus>> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UpdateMsg::ListUpdates { uuid, ret }; let msg = UpdateMsg::ListUpdates { uuid, ret };
self.sender self.sender.send(msg).await?;
.send(msg) receiver.await?
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?;
receiver
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?
} }
async fn update_status(&self, uuid: Uuid, id: u64) -> Result<UpdateStatus> { async fn update_status(&self, uuid: Uuid, id: u64) -> Result<UpdateStatus> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UpdateMsg::GetUpdate { uuid, id, ret }; let msg = UpdateMsg::GetUpdate { uuid, id, ret };
self.sender self.sender.send(msg).await?;
.send(msg) receiver.await?
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?;
receiver
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?
} }
async fn delete(&self, uuid: Uuid) -> Result<()> { async fn delete(&self, uuid: Uuid) -> Result<()> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UpdateMsg::Delete { uuid, ret }; let msg = UpdateMsg::Delete { uuid, ret };
self.sender self.sender.send(msg).await?;
.send(msg) receiver.await?
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?;
receiver
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?
} }
async fn snapshot(&self, uuids: HashSet<Uuid>, path: PathBuf) -> Result<()> { async fn snapshot(&self, uuids: HashSet<Uuid>, path: PathBuf) -> Result<()> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UpdateMsg::Snapshot { uuids, path, ret }; let msg = UpdateMsg::Snapshot { uuids, path, ret };
self.sender self.sender.send(msg).await?;
.send(msg) receiver.await?
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?;
receiver
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?
} }
async fn dump(&self, uuids: HashSet<Uuid>, path: PathBuf) -> Result<()> { async fn dump(&self, uuids: HashSet<Uuid>, path: PathBuf) -> Result<()> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UpdateMsg::Dump { uuids, path, ret }; let msg = UpdateMsg::Dump { uuids, path, ret };
self.sender self.sender.send(msg).await?;
.send(msg) receiver.await?
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?;
receiver
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?
} }
async fn get_info(&self) -> Result<UpdateStoreInfo> { async fn get_info(&self) -> Result<UpdateStoreInfo> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UpdateMsg::GetInfo { ret }; let msg = UpdateMsg::GetInfo { ret };
self.sender self.sender.send(msg).await?;
.send(msg) receiver.await?
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?;
receiver
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?
} }
async fn update( async fn update(
@ -129,12 +97,7 @@ where
meta, meta,
ret, ret,
}; };
self.sender self.sender.send(msg).await?;
.send(msg) receiver.await?
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?;
receiver
.await
.map_err(|_| UpdateError::FatalUpdateStoreError)?
} }
} }

View File

@ -4,7 +4,8 @@ use std::path::PathBuf;
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use uuid::Uuid; use uuid::Uuid;
use super::{PayloadData, Result, UpdateMeta, UpdateStatus, UpdateStoreInfo}; use super::error::Result;
use super::{PayloadData, UpdateMeta, UpdateStatus, UpdateStoreInfo};
pub enum UpdateMsg<D> { pub enum UpdateMsg<D> {
Update { Update {

View File

@ -1,62 +1,29 @@
mod actor;
mod handle_impl;
mod message;
pub mod store;
use std::{collections::HashSet, path::PathBuf}; use std::{collections::HashSet, path::PathBuf};
use actix_http::error::PayloadError; use actix_http::error::PayloadError;
use thiserror::Error;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use uuid::Uuid; use uuid::Uuid;
use crate::index_controller::{UpdateMeta, UpdateStatus}; use crate::index_controller::{UpdateMeta, UpdateStatus};
use actor::UpdateActor; use actor::UpdateActor;
use error::Result;
use message::UpdateMsg; use message::UpdateMsg;
pub use handle_impl::UpdateActorHandleImpl; pub use handle_impl::UpdateActorHandleImpl;
pub use store::{UpdateStore, UpdateStoreInfo}; pub use store::{UpdateStore, UpdateStoreInfo};
pub type Result<T> = std::result::Result<T, UpdateError>; mod actor;
pub mod error;
mod handle_impl;
mod message;
pub mod store;
type PayloadData<D> = std::result::Result<D, PayloadError>; type PayloadData<D> = std::result::Result<D, PayloadError>;
#[cfg(test)] #[cfg(test)]
use mockall::automock; use mockall::automock;
#[derive(Debug, Error)]
pub enum UpdateError {
#[error("Update {0} doesn't exist.")]
UnexistingUpdate(u64),
#[error("Internal error processing update: {0}")]
Internal(String),
#[error(
"Update store was shut down due to a fatal error, please check your logs for more info."
)]
FatalUpdateStoreError,
}
macro_rules! internal_error {
($($other:path), *) => {
$(
impl From<$other> for UpdateError {
fn from(other: $other) -> Self {
Self::Internal(other.to_string())
}
}
)*
}
}
internal_error!(
heed::Error,
std::io::Error,
serde_json::Error,
PayloadError,
tokio::task::JoinError,
anyhow::Error
);
#[async_trait::async_trait] #[async_trait::async_trait]
#[cfg_attr(test, automock(type Data=Vec<u8>;))] #[cfg_attr(test, automock(type Data=Vec<u8>;))]
pub trait UpdateActorHandle { pub trait UpdateActorHandle {

View File

@ -9,7 +9,7 @@ use heed::{EnvOpenOptions, RoTxn};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use super::{State, UpdateStore}; use super::{Result, State, UpdateStore};
use crate::index_controller::{ use crate::index_controller::{
index_actor::IndexActorHandle, update_actor::store::update_uuid_to_file_path, Enqueued, index_actor::IndexActorHandle, update_actor::store::update_uuid_to_file_path, Enqueued,
UpdateStatus, UpdateStatus,
@ -27,7 +27,7 @@ impl UpdateStore {
uuids: &HashSet<Uuid>, uuids: &HashSet<Uuid>,
path: PathBuf, path: PathBuf,
handle: impl IndexActorHandle, handle: impl IndexActorHandle,
) -> anyhow::Result<()> { ) -> Result<()> {
let state_lock = self.state.write(); let state_lock = self.state.write();
state_lock.swap(State::Dumping); state_lock.swap(State::Dumping);
@ -52,7 +52,7 @@ impl UpdateStore {
txn: &RoTxn, txn: &RoTxn,
uuids: &HashSet<Uuid>, uuids: &HashSet<Uuid>,
path: impl AsRef<Path>, path: impl AsRef<Path>,
) -> anyhow::Result<()> { ) -> Result<()> {
let dump_data_path = path.as_ref().join("data.jsonl"); let dump_data_path = path.as_ref().join("data.jsonl");
let mut dump_data_file = File::create(dump_data_path)?; let mut dump_data_file = File::create(dump_data_path)?;
@ -71,7 +71,7 @@ impl UpdateStore {
uuids: &HashSet<Uuid>, uuids: &HashSet<Uuid>,
mut file: &mut File, mut file: &mut File,
dst_path: impl AsRef<Path>, dst_path: impl AsRef<Path>,
) -> anyhow::Result<()> { ) -> Result<()> {
let pendings = self.pending_queue.iter(txn)?.lazily_decode_data(); let pendings = self.pending_queue.iter(txn)?.lazily_decode_data();
for pending in pendings { for pending in pendings {
@ -103,7 +103,7 @@ impl UpdateStore {
txn: &RoTxn, txn: &RoTxn,
uuids: &HashSet<Uuid>, uuids: &HashSet<Uuid>,
mut file: &mut File, mut file: &mut File,
) -> anyhow::Result<()> { ) -> Result<()> {
let updates = self.updates.iter(txn)?.lazily_decode_data(); let updates = self.updates.iter(txn)?.lazily_decode_data();
for update in updates { for update in updates {
@ -175,7 +175,7 @@ async fn dump_indexes(
uuids: &HashSet<Uuid>, uuids: &HashSet<Uuid>,
handle: impl IndexActorHandle, handle: impl IndexActorHandle,
path: impl AsRef<Path>, path: impl AsRef<Path>,
) -> anyhow::Result<()> { ) -> Result<()> {
for uuid in uuids { for uuid in uuids {
handle.dump(*uuid, path.as_ref().to_owned()).await?; handle.dump(*uuid, path.as_ref().to_owned()).await?;
} }

View File

@ -23,9 +23,10 @@ use uuid::Uuid;
use codec::*; use codec::*;
use super::error::Result;
use super::UpdateMeta; use super::UpdateMeta;
use crate::helpers::EnvSizer;
use crate::index_controller::{index_actor::CONCURRENT_INDEX_MSG, updates::*, IndexActorHandle}; use crate::index_controller::{index_actor::CONCURRENT_INDEX_MSG, updates::*, IndexActorHandle};
use crate::{helpers::EnvSizer, index_controller::index_actor::IndexResult};
#[allow(clippy::upper_case_acronyms)] #[allow(clippy::upper_case_acronyms)]
type BEU64 = U64<heed::byteorder::BE>; type BEU64 = U64<heed::byteorder::BE>;
@ -269,11 +270,8 @@ impl UpdateStore {
} }
_ => { _ => {
let _update_id = self.next_update_id_raw(wtxn, index_uuid)?; let _update_id = self.next_update_id_raw(wtxn, index_uuid)?;
self.updates.put( self.updates
wtxn, .put(wtxn, &(index_uuid, update.id()), &update)?;
&(index_uuid, update.id()),
&update,
)?;
} }
} }
Ok(()) Ok(())
@ -282,10 +280,7 @@ impl UpdateStore {
/// Executes the user provided function on the next pending update (the one with the lowest id). /// Executes the user provided function on the next pending update (the one with the lowest id).
/// This is asynchronous as it let the user process the update with a read-only txn and /// This is asynchronous as it let the user process the update with a read-only txn and
/// only writing the result meta to the processed-meta store *after* it has been processed. /// only writing the result meta to the processed-meta store *after* it has been processed.
fn process_pending_update( fn process_pending_update(&self, index_handle: impl IndexActorHandle) -> Result<Option<()>> {
&self,
index_handle: impl IndexActorHandle,
) -> anyhow::Result<Option<()>> {
// Create a read transaction to be able to retrieve the pending update in order. // Create a read transaction to be able to retrieve the pending update in order.
let rtxn = self.env.read_txn()?; let rtxn = self.env.read_txn()?;
let first_meta = self.pending_queue.first(&rtxn)?; let first_meta = self.pending_queue.first(&rtxn)?;
@ -320,7 +315,7 @@ impl UpdateStore {
index_handle: impl IndexActorHandle, index_handle: impl IndexActorHandle,
index_uuid: Uuid, index_uuid: Uuid,
global_id: u64, global_id: u64,
) -> anyhow::Result<Option<()>> { ) -> Result<Option<()>> {
let content_path = content.map(|uuid| update_uuid_to_file_path(&self.path, uuid)); let content_path = content.map(|uuid| update_uuid_to_file_path(&self.path, uuid));
let update_id = processing.id(); let update_id = processing.id();
@ -337,7 +332,7 @@ impl UpdateStore {
let result = let result =
match handle.block_on(index_handle.update(index_uuid, processing.clone(), file)) { match handle.block_on(index_handle.update(index_uuid, processing.clone(), file)) {
Ok(result) => result, Ok(result) => result,
Err(e) => Err(processing.fail(e.to_string())), Err(e) => Err(processing.fail(e.into())),
}; };
// Once the pending update have been successfully processed // Once the pending update have been successfully processed
@ -352,11 +347,8 @@ impl UpdateStore {
Err(res) => res.into(), Err(res) => res.into(),
}; };
self.updates.put( self.updates
&mut wtxn, .put(&mut wtxn, &(index_uuid, update_id), &result)?;
&(index_uuid, update_id),
&result,
)?;
wtxn.commit()?; wtxn.commit()?;
@ -368,7 +360,7 @@ impl UpdateStore {
} }
/// List the updates for `index_uuid`. /// List the updates for `index_uuid`.
pub fn list(&self, index_uuid: Uuid) -> anyhow::Result<Vec<UpdateStatus>> { pub fn list(&self, index_uuid: Uuid) -> Result<Vec<UpdateStatus>> {
let mut update_list = BTreeMap::<u64, UpdateStatus>::new(); let mut update_list = BTreeMap::<u64, UpdateStatus>::new();
let txn = self.env.read_txn()?; let txn = self.env.read_txn()?;
@ -437,7 +429,7 @@ impl UpdateStore {
} }
/// Delete all updates for an index from the update store. /// Delete all updates for an index from the update store.
pub fn delete_all(&self, index_uuid: Uuid) -> anyhow::Result<()> { pub fn delete_all(&self, index_uuid: Uuid) -> Result<()> {
let mut txn = self.env.write_txn()?; let mut txn = self.env.write_txn()?;
// Contains all the content file paths that we need to be removed if the deletion was successful. // Contains all the content file paths that we need to be removed if the deletion was successful.
let mut uuids_to_remove = Vec::new(); let mut uuids_to_remove = Vec::new();
@ -488,7 +480,7 @@ impl UpdateStore {
uuids: &HashSet<Uuid>, uuids: &HashSet<Uuid>,
path: impl AsRef<Path>, path: impl AsRef<Path>,
handle: impl IndexActorHandle + Clone, handle: impl IndexActorHandle + Clone,
) -> anyhow::Result<()> { ) -> Result<()> {
let state_lock = self.state.write(); let state_lock = self.state.write();
state_lock.swap(State::Snapshoting); state_lock.swap(State::Snapshoting);
@ -535,13 +527,13 @@ impl UpdateStore {
while let Some(res) = stream.next().await { while let Some(res) = stream.next().await {
res?; res?;
} }
Ok(()) as IndexResult<()> Ok(()) as Result<()>
})?; })?;
Ok(()) Ok(())
} }
pub fn get_info(&self) -> anyhow::Result<UpdateStoreInfo> { pub fn get_info(&self) -> Result<UpdateStoreInfo> {
let mut size = self.env.size(); let mut size = self.env.size();
let txn = self.env.read_txn()?; let txn = self.env.read_txn()?;
for entry in self.pending_queue.iter(&txn)? { for entry in self.pending_queue.iter(&txn)? {
@ -573,7 +565,7 @@ fn update_uuid_to_file_path(root: impl AsRef<Path>, uuid: Uuid) -> PathBuf {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use crate::index_controller::{index_actor::MockIndexActorHandle, UpdateResult}; use crate::index_controller::{UpdateResult, index_actor::{MockIndexActorHandle, error::IndexActorError}};
use futures::future::ok; use futures::future::ok;
@ -652,7 +644,7 @@ mod test {
if processing.id() == 0 { if processing.id() == 0 {
Box::pin(ok(Ok(processing.process(UpdateResult::Other)))) Box::pin(ok(Ok(processing.process(UpdateResult::Other))))
} else { } else {
Box::pin(ok(Err(processing.fail(String::from("err"))))) Box::pin(ok(Err(processing.fail(IndexActorError::ExistingPrimaryKey.into()))))
} }
}); });
@ -703,18 +695,10 @@ mod test {
let txn = store.env.read_txn().unwrap(); let txn = store.env.read_txn().unwrap();
assert!(store.pending_queue.first(&txn).unwrap().is_none()); assert!(store.pending_queue.first(&txn).unwrap().is_none());
let update = store let update = store.updates.get(&txn, &(uuid, 0)).unwrap().unwrap();
.updates
.get(&txn, &(uuid, 0))
.unwrap()
.unwrap();
assert!(matches!(update, UpdateStatus::Processed(_))); assert!(matches!(update, UpdateStatus::Processed(_)));
let update = store let update = store.updates.get(&txn, &(uuid, 1)).unwrap().unwrap();
.updates
.get(&txn, &(uuid, 1))
.unwrap()
.unwrap();
assert!(matches!(update, UpdateStatus::Failed(_))); assert!(matches!(update, UpdateStatus::Failed(_)));
} }

View File

@ -3,9 +3,7 @@ use milli::update::{DocumentAdditionResult, IndexDocumentsMethod, UpdateFormat};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::index::{Settings, Unchecked}; use crate::{error::ResponseError, index::{Settings, Unchecked}};
pub type UpdateError = String;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub enum UpdateResult { pub enum UpdateResult {
@ -25,7 +23,7 @@ pub enum UpdateMeta {
}, },
ClearDocuments, ClearDocuments,
DeleteDocuments { DeleteDocuments {
ids: Vec<String> ids: Vec<String>,
}, },
Settings(Settings<Unchecked>), Settings(Settings<Unchecked>),
} }
@ -116,7 +114,7 @@ impl Processing {
} }
} }
pub fn fail(self, error: UpdateError) -> Failed { pub fn fail(self, error: ResponseError) -> Failed {
Failed { Failed {
from: self, from: self,
error, error,
@ -143,12 +141,12 @@ impl Aborted {
} }
} }
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Failed { pub struct Failed {
#[serde(flatten)] #[serde(flatten)]
pub from: Processing, pub from: Processing,
pub error: UpdateError, pub error: ResponseError,
pub failed_at: DateTime<Utc>, pub failed_at: DateTime<Utc>,
} }
@ -162,7 +160,7 @@ impl Failed {
} }
} }
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "status", rename_all = "camelCase")] #[serde(tag = "status", rename_all = "camelCase")]
pub enum UpdateStatus { pub enum UpdateStatus {
Processing(Processing), Processing(Processing),

View File

@ -4,7 +4,7 @@ use log::{info, warn};
use tokio::sync::mpsc; use tokio::sync::mpsc;
use uuid::Uuid; use uuid::Uuid;
use super::{Result, UuidResolveMsg, UuidResolverError, UuidStore}; use super::{error::UuidResolverError, Result, UuidResolveMsg, UuidStore};
pub struct UuidResolverActor<S> { pub struct UuidResolverActor<S> {
inbox: mpsc::Receiver<UuidResolveMsg>, inbox: mpsc::Receiver<UuidResolveMsg>,

View File

@ -0,0 +1,34 @@
use meilisearch_error::{Code, ErrorCode};
pub type Result<T> = std::result::Result<T, UuidResolverError>;
#[derive(Debug, thiserror::Error)]
pub enum UuidResolverError {
#[error("name already exist.")]
NameAlreadyExist,
#[error("index \"{0}\" doesn't exist.")]
UnexistingIndex(String),
#[error("badly formatted index uid: {0}")]
BadlyFormatted(String),
#[error("internal error resolving index uid: {0}")]
Internal(Box<dyn std::error::Error + Sync + Send + 'static>),
}
internal_error!(
UuidResolverError: heed::Error,
uuid::Error,
std::io::Error,
tokio::task::JoinError,
serde_json::Error
);
impl ErrorCode for UuidResolverError {
fn error_code(&self) -> Code {
match self {
UuidResolverError::NameAlreadyExist => Code::IndexAlreadyExists,
UuidResolverError::UnexistingIndex(_) => Code::IndexNotFound,
UuidResolverError::BadlyFormatted(_) => Code::InvalidIndexUid,
UuidResolverError::Internal(_) => Code::Internal,
}
}
}

View File

@ -12,7 +12,7 @@ pub struct UuidResolverHandleImpl {
} }
impl UuidResolverHandleImpl { impl UuidResolverHandleImpl {
pub fn new(path: impl AsRef<Path>) -> anyhow::Result<Self> { pub fn new(path: impl AsRef<Path>) -> Result<Self> {
let (sender, reveiver) = mpsc::channel(100); let (sender, reveiver) = mpsc::channel(100);
let store = HeedUuidStore::new(path)?; let store = HeedUuidStore::new(path)?;
let actor = UuidResolverActor::new(reveiver, store); let actor = UuidResolverActor::new(reveiver, store);
@ -32,7 +32,7 @@ impl UuidResolverHandle for UuidResolverHandleImpl {
.expect("Uuid resolver actor has been killed")?) .expect("Uuid resolver actor has been killed")?)
} }
async fn delete(&self, name: String) -> anyhow::Result<Uuid> { async fn delete(&self, name: String) -> Result<Uuid> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UuidResolveMsg::Delete { uid: name, ret }; let msg = UuidResolveMsg::Delete { uid: name, ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
@ -41,7 +41,7 @@ impl UuidResolverHandle for UuidResolverHandleImpl {
.expect("Uuid resolver actor has been killed")?) .expect("Uuid resolver actor has been killed")?)
} }
async fn list(&self) -> anyhow::Result<Vec<(String, Uuid)>> { async fn list(&self) -> Result<Vec<(String, Uuid)>> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UuidResolveMsg::List { ret }; let msg = UuidResolveMsg::List { ret };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;
@ -50,7 +50,7 @@ impl UuidResolverHandle for UuidResolverHandleImpl {
.expect("Uuid resolver actor has been killed")?) .expect("Uuid resolver actor has been killed")?)
} }
async fn insert(&self, name: String, uuid: Uuid) -> anyhow::Result<()> { async fn insert(&self, name: String, uuid: Uuid) -> Result<()> {
let (ret, receiver) = oneshot::channel(); let (ret, receiver) = oneshot::channel();
let msg = UuidResolveMsg::Insert { ret, name, uuid }; let msg = UuidResolveMsg::Insert { ret, name, uuid };
let _ = self.sender.send(msg).await; let _ = self.sender.send(msg).await;

View File

@ -1,4 +1,5 @@
mod actor; mod actor;
pub mod error;
mod handle_impl; mod handle_impl;
mod message; mod message;
pub mod store; pub mod store;
@ -6,10 +7,10 @@ pub mod store;
use std::collections::HashSet; use std::collections::HashSet;
use std::path::PathBuf; use std::path::PathBuf;
use thiserror::Error;
use uuid::Uuid; use uuid::Uuid;
use actor::UuidResolverActor; use actor::UuidResolverActor;
use error::Result;
use message::UuidResolveMsg; use message::UuidResolveMsg;
use store::UuidStore; use store::UuidStore;
@ -21,48 +22,14 @@ pub use store::HeedUuidStore;
const UUID_STORE_SIZE: usize = 1_073_741_824; //1GiB const UUID_STORE_SIZE: usize = 1_073_741_824; //1GiB
pub type Result<T> = std::result::Result<T, UuidResolverError>;
#[async_trait::async_trait] #[async_trait::async_trait]
#[cfg_attr(test, automock)] #[cfg_attr(test, automock)]
pub trait UuidResolverHandle { pub trait UuidResolverHandle {
async fn get(&self, name: String) -> Result<Uuid>; async fn get(&self, name: String) -> Result<Uuid>;
async fn insert(&self, name: String, uuid: Uuid) -> anyhow::Result<()>; async fn insert(&self, name: String, uuid: Uuid) -> Result<()>;
async fn delete(&self, name: String) -> anyhow::Result<Uuid>; async fn delete(&self, name: String) -> Result<Uuid>;
async fn list(&self) -> anyhow::Result<Vec<(String, Uuid)>>; async fn list(&self) -> Result<Vec<(String, Uuid)>>;
async fn snapshot(&self, path: PathBuf) -> Result<HashSet<Uuid>>; async fn snapshot(&self, path: PathBuf) -> Result<HashSet<Uuid>>;
async fn get_size(&self) -> Result<u64>; async fn get_size(&self) -> Result<u64>;
async fn dump(&self, path: PathBuf) -> Result<HashSet<Uuid>>; async fn dump(&self, path: PathBuf) -> Result<HashSet<Uuid>>;
} }
#[derive(Debug, Error)]
pub enum UuidResolverError {
#[error("Name already exist.")]
NameAlreadyExist,
#[error("Index \"{0}\" doesn't exist.")]
UnexistingIndex(String),
#[error("Badly formatted index uid: {0}")]
BadlyFormatted(String),
#[error("Internal error resolving index uid: {0}")]
Internal(String),
}
macro_rules! internal_error {
($($other:path), *) => {
$(
impl From<$other> for UuidResolverError {
fn from(other: $other) -> Self {
Self::Internal(other.to_string())
}
}
)*
}
}
internal_error!(
heed::Error,
uuid::Error,
std::io::Error,
tokio::task::JoinError,
serde_json::Error
);

View File

@ -8,7 +8,7 @@ use heed::{CompactionOption, Database, Env, EnvOpenOptions};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use super::{Result, UuidResolverError, UUID_STORE_SIZE}; use super::{error::UuidResolverError, Result, UUID_STORE_SIZE};
use crate::helpers::EnvSizer; use crate::helpers::EnvSizer;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -39,7 +39,7 @@ pub struct HeedUuidStore {
} }
impl HeedUuidStore { impl HeedUuidStore {
pub fn new(path: impl AsRef<Path>) -> anyhow::Result<Self> { pub fn new(path: impl AsRef<Path>) -> Result<Self> {
let path = path.as_ref().join(UUIDS_DB_PATH); let path = path.as_ref().join(UUIDS_DB_PATH);
create_dir_all(&path)?; create_dir_all(&path)?;
let mut options = EnvOpenOptions::new(); let mut options = EnvOpenOptions::new();
@ -153,7 +153,7 @@ impl HeedUuidStore {
Ok(uuids) Ok(uuids)
} }
pub fn load_dump(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> anyhow::Result<()> { pub fn load_dump(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
let uuid_resolver_path = dst.as_ref().join(UUIDS_DB_PATH); let uuid_resolver_path = dst.as_ref().join(UUIDS_DB_PATH);
std::fs::create_dir_all(&uuid_resolver_path)?; std::fs::create_dir_all(&uuid_resolver_path)?;

View File

@ -1,4 +1,5 @@
pub mod data; pub mod data;
#[macro_use]
pub mod error; pub mod error;
pub mod helpers; pub mod helpers;
mod index; mod index;

View File

@ -30,6 +30,7 @@ async fn main() -> Result<(), MainError> {
.into(), .into(),
); );
} }
#[cfg(all(not(debug_assertions), feature = "analytics"))] #[cfg(all(not(debug_assertions), feature = "analytics"))]
if !opt.no_analytics { if !opt.no_analytics {
let logger = let logger =

View File

@ -61,15 +61,10 @@ async fn get_document(
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let index = path.index_uid.clone(); let index = path.index_uid.clone();
let id = path.document_id.clone(); let id = path.document_id.clone();
match data let document = data
.retrieve_document(index, id, None as Option<Vec<String>>) .retrieve_document(index, id, None as Option<Vec<String>>)
.await .await?;
{ Ok(HttpResponse::Ok().json(document))
Ok(document) => Ok(HttpResponse::Ok().json(document)),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[delete( #[delete(
@ -80,17 +75,10 @@ async fn delete_document(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<DocumentParam>, path: web::Path<DocumentParam>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
match data let update_status = data
.delete_documents(path.index_uid.clone(), vec![path.document_id.clone()]) .delete_documents(path.index_uid.clone(), vec![path.document_id.clone()])
.await .await?;
{ Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
Ok(update_status) => Ok(
HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() }))
),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -118,20 +106,15 @@ async fn get_all_documents(
Some(names) Some(names)
}); });
match data let documents = data
.retrieve_documents( .retrieve_documents(
path.index_uid.clone(), path.index_uid.clone(),
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET), params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT), params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
attributes_to_retrieve, attributes_to_retrieve,
) )
.await .await?;
{ Ok(HttpResponse::Ok().json(documents))
Ok(documents) => Ok(HttpResponse::Ok().json(documents)),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -149,7 +132,7 @@ async fn add_documents(
params: web::Query<UpdateDocumentsQuery>, params: web::Query<UpdateDocumentsQuery>,
body: Payload, body: Payload,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let addition_result = data let update_status = data
.add_documents( .add_documents(
path.into_inner().index_uid, path.into_inner().index_uid,
IndexDocumentsMethod::ReplaceDocuments, IndexDocumentsMethod::ReplaceDocuments,
@ -157,16 +140,9 @@ async fn add_documents(
body, body,
params.primary_key.clone(), params.primary_key.clone(),
) )
.await; .await?;
match addition_result { Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
Ok(update_status) => Ok(
HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() }))
),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
/// Default route for adding documents, this should return an error and redirect to the documentation /// Default route for adding documents, this should return an error and redirect to the documentation
@ -200,7 +176,7 @@ async fn update_documents(
params: web::Query<UpdateDocumentsQuery>, params: web::Query<UpdateDocumentsQuery>,
body: web::Payload, body: web::Payload,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let addition_result = data let update = data
.add_documents( .add_documents(
path.into_inner().index_uid, path.into_inner().index_uid,
IndexDocumentsMethod::UpdateDocuments, IndexDocumentsMethod::UpdateDocuments,
@ -208,16 +184,9 @@ async fn update_documents(
body, body,
params.primary_key.clone(), params.primary_key.clone(),
) )
.await; .await?;
match addition_result { Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update.id() })))
Ok(update) => {
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update.id() })))
}
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[post( #[post(
@ -238,14 +207,8 @@ async fn delete_documents(
}) })
.collect(); .collect();
match data.delete_documents(path.index_uid.clone(), ids).await { let update_status = data.delete_documents(path.index_uid.clone(), ids).await?;
Ok(update_status) => Ok( Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() }))
),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
/// delete all documents /// delete all documents
@ -254,12 +217,6 @@ async fn clear_all_documents(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
match data.clear_documents(path.index_uid.clone()).await { let update_status = data.clear_documents(path.index_uid.clone()).await?;
Ok(update_status) => Ok( Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() }))
),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }

View File

@ -3,9 +3,9 @@ use actix_web::{web, HttpResponse};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{IndexParam, UpdateStatusResponse};
use crate::error::ResponseError; use crate::error::ResponseError;
use crate::helpers::Authentication; use crate::helpers::Authentication;
use super::{UpdateStatusResponse, IndexParam};
use crate::Data; use crate::Data;
pub fn services(cfg: &mut web::ServiceConfig) { pub fn services(cfg: &mut web::ServiceConfig) {
@ -20,12 +20,8 @@ pub fn services(cfg: &mut web::ServiceConfig) {
#[get("/indexes", wrap = "Authentication::Private")] #[get("/indexes", wrap = "Authentication::Private")]
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> { async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
match data.list_indexes().await { let indexes = data.list_indexes().await?;
Ok(indexes) => Ok(HttpResponse::Ok().json(indexes)), Ok(HttpResponse::Ok().json(indexes))
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[get("/indexes/{index_uid}", wrap = "Authentication::Private")] #[get("/indexes/{index_uid}", wrap = "Authentication::Private")]
@ -33,12 +29,8 @@ async fn get_index(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
match data.index(path.index_uid.clone()).await { let meta = data.index(path.index_uid.clone()).await?;
Ok(meta) => Ok(HttpResponse::Ok().json(meta)), Ok(HttpResponse::Ok().json(meta))
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -54,12 +46,8 @@ async fn create_index(
body: web::Json<IndexCreateRequest>, body: web::Json<IndexCreateRequest>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let body = body.into_inner(); let body = body.into_inner();
match data.create_index(body.uid, body.primary_key).await { let meta = data.create_index(body.uid, body.primary_key).await?;
Ok(meta) => Ok(HttpResponse::Ok().json(meta)), Ok(HttpResponse::Ok().json(meta))
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -86,15 +74,10 @@ async fn update_index(
body: web::Json<UpdateIndexRequest>, body: web::Json<UpdateIndexRequest>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let body = body.into_inner(); let body = body.into_inner();
match data let meta = data
.update_index(path.into_inner().index_uid, body.primary_key, body.uid) .update_index(path.into_inner().index_uid, body.primary_key, body.uid)
.await .await?;
{ Ok(HttpResponse::Ok().json(meta))
Ok(meta) => Ok(HttpResponse::Ok().json(meta)),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[delete("/indexes/{index_uid}", wrap = "Authentication::Private")] #[delete("/indexes/{index_uid}", wrap = "Authentication::Private")]
@ -102,12 +85,8 @@ async fn delete_index(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
match data.delete_index(path.index_uid.clone()).await { data.delete_index(path.index_uid.clone()).await?;
Ok(_) => Ok(HttpResponse::NoContent().finish()), Ok(HttpResponse::NoContent().finish())
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -125,18 +104,11 @@ async fn get_update_status(
path: web::Path<UpdateParam>, path: web::Path<UpdateParam>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let params = path.into_inner(); let params = path.into_inner();
let result = data let meta = data
.get_update_status(params.index_uid, params.update_id) .get_update_status(params.index_uid, params.update_id)
.await; .await?;
match result { let meta = UpdateStatusResponse::from(meta);
Ok(meta) => { Ok(HttpResponse::Ok().json(meta))
let meta = UpdateStatusResponse::from(meta);
Ok(HttpResponse::Ok().json(meta))
},
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[get("/indexes/{index_uid}/updates", wrap = "Authentication::Private")] #[get("/indexes/{index_uid}/updates", wrap = "Authentication::Private")]
@ -144,18 +116,11 @@ async fn get_all_updates_status(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let result = data.get_updates_status(path.into_inner().index_uid).await; let metas = data.get_updates_status(path.into_inner().index_uid).await?;
match result { let metas = metas
Ok(metas) => { .into_iter()
let metas = metas .map(UpdateStatusResponse::from)
.into_iter() .collect::<Vec<_>>();
.map(UpdateStatusResponse::from)
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(metas)) Ok(HttpResponse::Ok().json(metas))
},
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }

View File

@ -4,6 +4,7 @@ use actix_web::{get, HttpResponse};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::error::ResponseError;
use crate::index::{Settings, Unchecked}; use crate::index::{Settings, Unchecked};
use crate::index_controller::{UpdateMeta, UpdateResult, UpdateStatus}; use crate::index_controller::{UpdateMeta, UpdateResult, UpdateStatus};
@ -24,17 +25,19 @@ pub enum UpdateType {
Customs, Customs,
DocumentsAddition { DocumentsAddition {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
number: Option<usize> number: Option<usize>,
}, },
DocumentsPartial { DocumentsPartial {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
number: Option<usize> number: Option<usize>,
}, },
DocumentsDeletion { DocumentsDeletion {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
number: Option<usize> number: Option<usize>,
},
Settings {
settings: Settings<Unchecked>,
}, },
Settings { settings: Settings<Unchecked> },
} }
impl From<&UpdateStatus> for UpdateType { impl From<&UpdateStatus> for UpdateType {
@ -60,9 +63,9 @@ impl From<&UpdateStatus> for UpdateType {
} }
} }
UpdateMeta::ClearDocuments => UpdateType::ClearAll, UpdateMeta::ClearDocuments => UpdateType::ClearAll,
UpdateMeta::DeleteDocuments { ids } => { UpdateMeta::DeleteDocuments { ids } => UpdateType::DocumentsDeletion {
UpdateType::DocumentsDeletion { number: Some(ids.len()) } number: Some(ids.len()),
} },
UpdateMeta::Settings(settings) => UpdateType::Settings { UpdateMeta::Settings(settings) => UpdateType::Settings {
settings: settings.clone(), settings: settings.clone(),
}, },
@ -87,10 +90,8 @@ pub struct FailedUpdateResult {
pub update_id: u64, pub update_id: u64,
#[serde(rename = "type")] #[serde(rename = "type")]
pub update_type: UpdateType, pub update_type: UpdateType,
pub error: String, #[serde(flatten)]
pub error_type: String, pub response: ResponseError,
pub error_code: String,
pub error_link: String,
pub duration: f64, // in seconds pub duration: f64, // in seconds
pub enqueued_at: DateTime<Utc>, pub enqueued_at: DateTime<Utc>,
pub processed_at: DateTime<Utc>, pub processed_at: DateTime<Utc>,
@ -179,13 +180,13 @@ impl From<UpdateStatus> for UpdateStatusResponse {
// necessary since chrono::duration don't expose a f64 secs method. // necessary since chrono::duration don't expose a f64 secs method.
let duration = Duration::from_millis(duration as u64).as_secs_f64(); let duration = Duration::from_millis(duration as u64).as_secs_f64();
let update_id = failed.id();
let response = failed.error;
let content = FailedUpdateResult { let content = FailedUpdateResult {
update_id: failed.id(), update_id,
update_type, update_type,
error: failed.error, response,
error_type: String::from("todo"),
error_code: String::from("todo"),
error_link: String::from("todo"),
duration, duration,
enqueued_at: failed.from.from.enqueued_at, enqueued_at: failed.from.from.enqueued_at,
processed_at: failed.failed_at, processed_at: failed.failed_at,

View File

@ -1,9 +1,8 @@
use std::collections::{BTreeSet, HashSet}; use std::collections::{BTreeSet, HashSet};
use std::convert::{TryFrom, TryInto};
use actix_web::{get, post, web, HttpResponse}; use actix_web::{get, post, web, HttpResponse};
use serde_json::Value;
use serde::Deserialize; use serde::Deserialize;
use serde_json::Value;
use crate::error::ResponseError; use crate::error::ResponseError;
use crate::helpers::Authentication; use crate::helpers::Authentication;
@ -30,10 +29,8 @@ pub struct SearchQueryGet {
facet_distributions: Option<String>, facet_distributions: Option<String>,
} }
impl TryFrom<SearchQueryGet> for SearchQuery { impl From<SearchQueryGet> for SearchQuery {
type Error = anyhow::Error; fn from(other: SearchQueryGet) -> Self {
fn try_from(other: SearchQueryGet) -> anyhow::Result<Self> {
let attributes_to_retrieve = other let attributes_to_retrieve = other
.attributes_to_retrieve .attributes_to_retrieve
.map(|attrs| attrs.split(',').map(String::from).collect::<BTreeSet<_>>()); .map(|attrs| attrs.split(',').map(String::from).collect::<BTreeSet<_>>());
@ -51,16 +48,14 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
.map(|attrs| attrs.split(',').map(String::from).collect::<Vec<_>>()); .map(|attrs| attrs.split(',').map(String::from).collect::<Vec<_>>());
let filter = match other.filter { let filter = match other.filter {
Some(f) => { Some(f) => match serde_json::from_str(&f) {
match serde_json::from_str(&f) { Ok(v) => Some(v),
Ok(v) => Some(v), _ => Some(Value::String(f)),
_ => Some(Value::String(f)),
}
}, },
None => None, None => None,
}; };
Ok(Self { Self {
q: other.q, q: other.q,
offset: other.offset, offset: other.offset,
limit: other.limit.unwrap_or(DEFAULT_SEARCH_LIMIT), limit: other.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
@ -71,7 +66,7 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
filter, filter,
matches: other.matches, matches: other.matches,
facet_distributions, facet_distributions,
}) }
} }
} }
@ -81,21 +76,9 @@ async fn search_with_url_query(
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
params: web::Query<SearchQueryGet>, params: web::Query<SearchQueryGet>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let query: SearchQuery = match params.into_inner().try_into() { let query = params.into_inner().into();
Ok(q) => q, let search_result = data.search(path.into_inner().index_uid, query).await?;
Err(e) => { Ok(HttpResponse::Ok().json(search_result))
return Ok(
HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() }))
)
}
};
let search_result = data.search(path.into_inner().index_uid, query).await;
match search_result {
Ok(docs) => Ok(HttpResponse::Ok().json(docs)),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[post("/indexes/{index_uid}/search", wrap = "Authentication::Public")] #[post("/indexes/{index_uid}/search", wrap = "Authentication::Public")]
@ -106,11 +89,6 @@ async fn search_with_post(
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let search_result = data let search_result = data
.search(path.into_inner().index_uid, params.into_inner()) .search(path.into_inner().index_uid, params.into_inner())
.await; .await?;
match search_result { Ok(HttpResponse::Ok().json(search_result))
Ok(docs) => Ok(HttpResponse::Ok().json(docs)),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }

View File

@ -26,14 +26,8 @@ macro_rules! make_setting_route {
$attr: Some(None), $attr: Some(None),
..Default::default() ..Default::default()
}; };
match data.update_settings(index_uid.into_inner(), settings, false).await { let update_status = data.update_settings(index_uid.into_inner(), settings, false).await?;
Ok(update_status) => { Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
}
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[actix_web::post($route, wrap = "Authentication::Private")] #[actix_web::post($route, wrap = "Authentication::Private")]
@ -47,14 +41,8 @@ macro_rules! make_setting_route {
..Default::default() ..Default::default()
}; };
match data.update_settings(index_uid.into_inner(), settings, true).await { let update_status = data.update_settings(index_uid.into_inner(), settings, true).await?;
Ok(update_status) => { Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
}
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[actix_web::get($route, wrap = "Authentication::Private")] #[actix_web::get($route, wrap = "Authentication::Private")]
@ -62,12 +50,8 @@ macro_rules! make_setting_route {
data: actix_web::web::Data<data::Data>, data: actix_web::web::Data<data::Data>,
index_uid: actix_web::web::Path<String>, index_uid: actix_web::web::Path<String>,
) -> std::result::Result<HttpResponse, ResponseError> { ) -> std::result::Result<HttpResponse, ResponseError> {
match data.settings(index_uid.into_inner()).await { let settings = data.settings(index_uid.into_inner()).await?;
Ok(settings) => Ok(HttpResponse::Ok().json(settings.$attr)), Ok(HttpResponse::Ok().json(settings.$attr))
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
} }
}; };
@ -148,17 +132,11 @@ async fn update_all(
body: web::Json<Settings<Unchecked>>, body: web::Json<Settings<Unchecked>>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let settings = body.into_inner().check(); let settings = body.into_inner().check();
match data let update_result = data
.update_settings(index_uid.into_inner(), settings, true) .update_settings(index_uid.into_inner(), settings, true)
.await .await?;
{ let json = serde_json::json!({ "updateId": update_result.id() });
Ok(update_result) => Ok( Ok(HttpResponse::Accepted().json(json))
HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_result.id() }))
),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")] #[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
@ -166,12 +144,8 @@ async fn get_all(
data: web::Data<Data>, data: web::Data<Data>,
index_uid: web::Path<String>, index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
match data.settings(index_uid.into_inner()).await { let settings = data.settings(index_uid.into_inner()).await?;
Ok(settings) => Ok(HttpResponse::Ok().json(settings)), Ok(HttpResponse::Ok().json(settings))
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }
#[delete("/indexes/{index_uid}/settings", wrap = "Authentication::Private")] #[delete("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
@ -180,15 +154,9 @@ async fn delete_all(
index_uid: web::Path<String>, index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let settings = Settings::cleared(); let settings = Settings::cleared();
match data let update_result = data
.update_settings(index_uid.into_inner(), settings, false) .update_settings(index_uid.into_inner(), settings, false)
.await .await?;
{ let json = serde_json::json!({ "updateId": update_result.id() });
Ok(update_result) => Ok( Ok(HttpResponse::Accepted().json(json))
HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_result.id() }))
),
Err(e) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": e.to_string() })))
}
}
} }

View File

@ -1,36 +0,0 @@
use crate::make_update_delete_routes;
use actix_web::{web, HttpResponse, get};
use crate::error::{Error, ResponseError};
use crate::helpers::Authentication;
use crate::Data;
#[get(
"/indexes/{index_uid}/settings/distinct-attribute",
wrap = "Authentication::Private"
)]
async fn get(
data: web::Data<Data>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.load()
.open_index(&index_uid.as_ref())
.ok_or(Error::index_not_found(&index_uid.as_ref()))?;
let reader = data.db.load().main_read_txn()?;
let distinct_attribute_id = index.main.distinct_attribute(&reader)?;
let schema = index.main.schema(&reader)?;
let distinct_attribute = match (schema, distinct_attribute_id) {
(Some(schema), Some(id)) => schema.name(id).map(str::to_string),
_ => None,
};
Ok(HttpResponse::Ok().json(distinct_attribute))
}
make_update_delete_routes!(
"/indexes/{index_uid}/settings/distinct-attribute",
String,
distinct_attribute
);

View File

@ -1,23 +0,0 @@
use crate::make_update_delete_routes;
use actix_web::{web, HttpResponse, get};
use crate::error::{Error, ResponseError};
use crate::helpers::Authentication;
use crate::Data;
#[get(
"/indexes/{index_uid}/settings/ranking-rules",
wrap = "Authentication::Private"
)]
async fn get(
data: web::Data<Data>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
todo!()
}
make_update_delete_routes!(
"/indexes/{index_uid}/settings/ranking-rules",
Vec<String>,
ranking_rules
);

View File

@ -1,43 +0,0 @@
use std::collections::BTreeMap;
use actix_web::{web, HttpResponse, get};
use indexmap::IndexMap;
use crate::error::{Error, ResponseError};
use crate::helpers::Authentication;
use crate::make_update_delete_routes;
use crate::Data;
#[get(
"/indexes/{index_uid}/settings/synonyms",
wrap = "Authentication::Private"
)]
async fn get(
data: web::Data<Data>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.load()
.open_index(&index_uid.as_ref())
.ok_or(Error::index_not_found(&index_uid.as_ref()))?;
let reader = data.db.load().main_read_txn()?;
let synonyms_list = index.main.synonyms(&reader)?;
let mut synonyms = IndexMap::new();
let index_synonyms = &index.synonyms;
for synonym in synonyms_list {
let list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
synonyms.insert(synonym, list);
}
Ok(HttpResponse::Ok().json(synonyms))
}
make_update_delete_routes!(
"/indexes/{index_uid}/settings/synonyms",
BTreeMap<String, Vec<String>>,
synonyms
);

View File

@ -6,7 +6,7 @@ use crate::common::{GetAllDocumentsOptions, Server};
async fn delete_one_document_unexisting_index() { async fn delete_one_document_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let (_response, code) = server.index("test").delete_document(0).await; let (_response, code) = server.index("test").delete_document(0).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -33,14 +33,14 @@ async fn delete_one_document() {
index.wait_update_id(1).await; index.wait_update_id(1).await;
let (_response, code) = index.get_document(0, None).await; let (_response, code) = index.get_document(0, None).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
async fn clear_all_documents_unexisting_index() { async fn clear_all_documents_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let (_response, code) = server.index("test").clear_all_documents().await; let (_response, code) = server.index("test").clear_all_documents().await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -86,7 +86,7 @@ async fn clear_all_documents_empty_index() {
async fn delete_batch_unexisting_index() { async fn delete_batch_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let (_response, code) = server.index("test").delete_batch(vec![]).await; let (_response, code) = server.index("test").delete_batch(vec![]).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]

View File

@ -9,7 +9,7 @@ use serde_json::json;
async fn get_unexisting_index_single_document() { async fn get_unexisting_index_single_document() {
let server = Server::new().await; let server = Server::new().await;
let (_response, code) = server.index("test").get_document(1, None).await; let (_response, code) = server.index("test").get_document(1, None).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -18,7 +18,7 @@ async fn get_unexisting_document() {
let index = server.index("test"); let index = server.index("test");
index.create(None).await; index.create(None).await;
let (_response, code) = index.get_document(1, None).await; let (_response, code) = index.get_document(1, None).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -53,7 +53,7 @@ async fn get_unexisting_index_all_documents() {
.index("test") .index("test")
.get_all_documents(GetAllDocumentsOptions::default()) .get_all_documents(GetAllDocumentsOptions::default())
.await; .await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]

View File

@ -70,5 +70,5 @@ async fn test_create_multiple_indexes() {
assert_eq!(index1.get().await.1, 200); assert_eq!(index1.get().await.1, 200);
assert_eq!(index2.get().await.1, 200); assert_eq!(index2.get().await.1, 200);
assert_eq!(index3.get().await.1, 200); assert_eq!(index3.get().await.1, 200);
assert_eq!(index4.get().await.1, 400); assert_eq!(index4.get().await.1, 404);
} }

View File

@ -12,7 +12,7 @@ async fn create_and_delete_index() {
assert_eq!(code, 204); assert_eq!(code, 204);
assert_eq!(index.get().await.1, 400); assert_eq!(index.get().await.1, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -21,5 +21,5 @@ async fn delete_unexisting_index() {
let index = server.index("test"); let index = server.index("test");
let (_response, code) = index.delete().await; let (_response, code) = index.delete().await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }

View File

@ -21,7 +21,7 @@ async fn create_and_get_index() {
assert_eq!(response.as_object().unwrap().len(), 5); assert_eq!(response.as_object().unwrap().len(), 5);
} }
// TODO: partial test since we are testing error, amd error is not yet fully implemented in // TODO: partial test since we are testing error, and error is not yet fully implemented in
// transplant // transplant
#[actix_rt::test] #[actix_rt::test]
async fn get_unexisting_index() { async fn get_unexisting_index() {
@ -30,7 +30,7 @@ async fn get_unexisting_index() {
let (_response, code) = index.get().await; let (_response, code) = index.get().await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]

View File

@ -60,5 +60,5 @@ async fn update_existing_primary_key() {
async fn test_unexisting_index() { async fn test_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let (_response, code) = server.index("test").update(None).await; let (_response, code) = server.index("test").update(None).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }

View File

@ -5,7 +5,7 @@ use serde_json::json;
async fn get_settings_unexisting_index() { async fn get_settings_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let (_response, code) = server.index("test").settings().await; let (_response, code) = server.index("test").settings().await;
assert_eq!(code, 400) assert_eq!(code, 404)
} }
#[actix_rt::test] #[actix_rt::test]
@ -65,7 +65,7 @@ async fn delete_settings_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test"); let index = server.index("test");
let (_response, code) = index.delete_settings().await; let (_response, code) = index.delete_settings().await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -152,7 +152,7 @@ macro_rules! test_setting_routes {
.map(|c| if c == '_' { '-' } else { c }) .map(|c| if c == '_' { '-' } else { c })
.collect::<String>()); .collect::<String>());
let (_response, code) = server.service.get(url).await; let (_response, code) = server.service.get(url).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -178,7 +178,7 @@ macro_rules! test_setting_routes {
.map(|c| if c == '_' { '-' } else { c }) .map(|c| if c == '_' { '-' } else { c })
.collect::<String>()); .collect::<String>());
let (_response, code) = server.service.delete(url).await; let (_response, code) = server.service.delete(url).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
} }
)* )*

View File

@ -39,8 +39,6 @@ async fn stats() {
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 0); assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 0);
assert!(response["indexes"]["test"]["isIndexing"] == false); assert!(response["indexes"]["test"]["isIndexing"] == false);
let last_update = response["lastUpdate"].as_str().unwrap();
let documents = json!([ let documents = json!([
{ {
"id": 1, "id": 1,
@ -66,7 +64,7 @@ async fn stats() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert!(response["databaseSize"].as_u64().unwrap() > 0); assert!(response["databaseSize"].as_u64().unwrap() > 0);
assert!(response["lastUpdate"].as_str().unwrap() > last_update); assert!(response.get("lastUpdate").is_some());
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 2); assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 2);
assert!(response["indexes"]["test"]["isIndexing"] == false); assert!(response["indexes"]["test"]["isIndexing"] == false);
assert_eq!(response["indexes"]["test"]["fieldsDistribution"]["id"], 2); assert_eq!(response["indexes"]["test"]["fieldsDistribution"]["id"], 2);

View File

@ -4,7 +4,7 @@ use crate::common::Server;
async fn get_update_unexisting_index() { async fn get_update_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let (_response, code) = server.index("test").get_update(0).await; let (_response, code) = server.index("test").get_update(0).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -13,7 +13,7 @@ async fn get_unexisting_update_status() {
let index = server.index("test"); let index = server.index("test");
index.create(None).await; index.create(None).await;
let (_response, code) = index.get_update(0).await; let (_response, code) = index.get_update(0).await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]
@ -39,7 +39,7 @@ async fn get_update_status() {
async fn list_updates_unexisting_index() { async fn list_updates_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let (_response, code) = server.index("test").list_updates().await; let (_response, code) = server.index("test").list_updates().await;
assert_eq!(code, 400); assert_eq!(code, 404);
} }
#[actix_rt::test] #[actix_rt::test]