diff --git a/Cargo.lock b/Cargo.lock index b55c1c8a3..19ef76d70 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1898,7 +1898,8 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pest" version = "2.1.3" -source = "git+https://github.com/pest-parser/pest.git?rev=51fd1d49f1041f7839975664ef71fe15c7dcaf67#51fd1d49f1041f7839975664ef71fe15c7dcaf67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" dependencies = [ "ucd-trie", ] @@ -1906,8 +1907,7 @@ dependencies = [ [[package]] name = "pest" version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" +source = "git+https://github.com/pest-parser/pest.git?rev=51fd1d49f1041f7839975664ef71fe15c7dcaf67#51fd1d49f1041f7839975664ef71fe15c7dcaf67" dependencies = [ "ucd-trie", ] diff --git a/meilisearch-http/src/dump.rs b/meilisearch-http/src/dump.rs index 5c0557e6b..87ed869f1 100644 --- a/meilisearch-http/src/dump.rs +++ b/meilisearch-http/src/dump.rs @@ -13,10 +13,11 @@ use meilisearch_core::settings::Settings; use meilisearch_core::update::{apply_settings_update, apply_documents_addition}; use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; +use serde_json::json; use tempfile::TempDir; use crate::Data; -use crate::error::Error; +use crate::error::{Error, ResponseError}; use crate::helpers::compression; use crate::routes::index; use crate::routes::index::IndexResponse; @@ -112,7 +113,7 @@ fn import_index_v1( // extract `settings.json` file and import content let settings = settings_from_path(&index_path)?; - let settings = settings.to_update().map_err(|_e| Error::dump_failed())?; + let settings = settings.to_update().map_err(|e| Error::dump_failed(format!("importing settings for index {}; {}", index_uid, e)))?; apply_settings_update(write_txn, &index, settings)?; // create iterator over documents in `documents.jsonl` to make batch importation @@ -199,17 +200,17 @@ pub fn import_dump( #[serde(rename_all = "snake_case")] pub enum DumpStatus { Done, - Processing, - DumpProcessFailed, + InProgress, + Failed, } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct DumpInfo { pub uid: String, pub status: DumpStatus, - #[serde(skip_serializing_if = "Option::is_none")] - pub error: Option, + #[serde(skip_serializing_if = "Option::is_none", flatten)] + pub error: Option, } impl DumpInfo { @@ -217,15 +218,15 @@ impl DumpInfo { Self { uid, status, error: None } } - pub fn with_error(mut self, error: String) -> Self { - self.status = DumpStatus::DumpProcessFailed; - self.error = Some(error); + pub fn with_error(mut self, error: ResponseError) -> Self { + self.status = DumpStatus::Failed; + self.error = Some(json!(error)); self } pub fn dump_already_in_progress(&self) -> bool { - self.status == DumpStatus::Processing + self.status == DumpStatus::InProgress } pub fn get_current() -> Option { @@ -299,10 +300,10 @@ fn dump_index_documents(data: &web::Data, reader: &MainReader, folder_path /// Write error with a context. fn fail_dump_process(dump_info: DumpInfo, context: &str, error: E) { - let error = format!("Something went wrong during dump process: {}; {}", context, error); + let error_message = format!("{}; {}", context, error); - error!("{}", &error); - dump_info.with_error(error).set_current(); + error!("Something went wrong during dump process: {}", &error_message); + dump_info.with_error(Error::dump_failed(error_message).into()).set_current(); } /// Main function of dump. @@ -395,7 +396,7 @@ fn dump_process(data: web::Data, dumps_folder: PathBuf, dump_info: DumpInf } pub fn init_dump_process(data: &web::Data, dumps_folder: &Path) -> Result { - create_dir_all(dumps_folder).or(Err(Error::dump_failed()))?; + create_dir_all(dumps_folder).map_err(|e| Error::dump_failed(format!("creating temporary directory {}", e)))?; // check if a dump is already in progress if let Some(resume) = DumpInfo::get_current() { @@ -407,7 +408,7 @@ pub fn init_dump_process(data: &web::Data, dumps_folder: &Path) -> Result< // generate a new dump info let info = DumpInfo::new( generate_uid(), - DumpStatus::Processing + DumpStatus::InProgress ); info.set_current(); diff --git a/meilisearch-http/src/error.rs b/meilisearch-http/src/error.rs index e3488df8a..4c6834e87 100644 --- a/meilisearch-http/src/error.rs +++ b/meilisearch-http/src/error.rs @@ -5,7 +5,7 @@ use actix_http::ResponseBuilder; use actix_web as aweb; use actix_web::error::{JsonPayloadError, QueryPayloadError}; use actix_web::http::StatusCode; -use serde_json::json; +use serde::ser::{Serialize, Serializer, SerializeStruct}; use meilisearch_error::{ErrorCode, Code}; @@ -54,7 +54,7 @@ pub enum Error { PayloadTooLarge, UnsupportedMediaType, DumpAlreadyInProgress, - DumpProcessFailed, + DumpProcessFailed(String), } impl error::Error for Error {} @@ -81,7 +81,7 @@ impl ErrorCode for Error { PayloadTooLarge => Code::PayloadTooLarge, UnsupportedMediaType => Code::UnsupportedMediaType, DumpAlreadyInProgress => Code::DumpAlreadyInProgress, - DumpProcessFailed => Code::DumpProcessFailed, + DumpProcessFailed(_) => Code::DumpProcessFailed, } } } @@ -189,8 +189,8 @@ impl Error { Error::DumpAlreadyInProgress } - pub fn dump_failed() -> Error { - Error::DumpProcessFailed + pub fn dump_failed(message: String) -> Error { + Error::DumpProcessFailed(message) } } @@ -215,19 +215,31 @@ impl fmt::Display for Error { Self::PayloadTooLarge => f.write_str("Payload too large"), Self::UnsupportedMediaType => f.write_str("Unsupported media type"), Self::DumpAlreadyInProgress => f.write_str("Another dump is already in progress"), - Self::DumpProcessFailed => f.write_str("Dump process failed"), + Self::DumpProcessFailed(message) => write!(f, "Dump process failed: {}", message), } } } +impl Serialize for ResponseError { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let struct_name = "ResponseError"; + let field_count = 4; + + let mut state = serializer.serialize_struct(struct_name, field_count)?; + state.serialize_field("message", &self.to_string())?; + state.serialize_field("errorCode", &self.error_name())?; + state.serialize_field("errorType", &self.error_type())?; + state.serialize_field("errorLink", &self.error_url())?; + state.end() + } +} + impl aweb::error::ResponseError for ResponseError { fn error_response(&self) -> aweb::HttpResponse { - ResponseBuilder::new(self.status_code()).json(json!({ - "message": self.to_string(), - "errorCode": self.error_name(), - "errorType": self.error_type(), - "errorLink": self.error_url(), - })) + ResponseBuilder::new(self.status_code()).json(&self) } fn status_code(&self) -> StatusCode { diff --git a/meilisearch-http/tests/dump.rs b/meilisearch-http/tests/dump.rs index 75803c7cd..5da91e095 100644 --- a/meilisearch-http/tests/dump.rs +++ b/meilisearch-http/tests/dump.rs @@ -101,7 +101,7 @@ async fn trigger_dump_concurently_should_return_conflict() { #[actix_rt::test] #[ignore] -async fn get_dump_status_early_should_return_processing() { +async fn get_dump_status_early_should_return_in_progress() { let mut server = common::Server::test_server().await; @@ -116,7 +116,7 @@ async fn get_dump_status_early_should_return_processing() { let expected = json!({ "uid": dump_uid, - "status": "processing" + "status": "in_progress" }); assert_eq!(status_code, 200); @@ -150,6 +150,39 @@ async fn get_dump_status_should_return_done() { assert_json_eq!(expected.clone(), value.clone(), ordered: false); } +#[actix_rt::test] +#[ignore] +async fn get_dump_status_should_return_error_provoking_it() { + let mut server = common::Server::test_server().await; + + + let (value, status_code) = server.trigger_dump().await; + + // removing destination directory provoking `No such file or directory` error + std::fs::remove_dir(server.data().dumps_folder.clone()).unwrap(); + + assert_eq!(status_code, 202); + + let dump_uid = value["uid"].as_str().unwrap().to_string(); + + let expected = json!({ + "uid": dump_uid.clone(), + "status": "failed", + "message": "Dump process failed: compressing dump; No such file or directory (os error 2)", + "errorCode": "dump_process_failed", + "errorType": "internal_error", + "errorLink": "https://docs.meilisearch.com/errors#dump_process_failed" + }); + + thread::sleep(Duration::from_secs(1)); // wait dump until process end + + let (value, status_code) = server.get_dump_status(&dump_uid).await; + + assert_eq!(status_code, 200); + + assert_json_eq!(expected.clone(), value.clone(), ordered: false); +} + #[actix_rt::test] #[ignore] async fn dump_metadata_should_be_valid() {