From 47e0288747894e117838d0465cffe46cee4e05bd Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 6 Oct 2022 19:44:50 +0200 Subject: [PATCH] rewrite the compat API to something more generic --- Cargo.lock | 1 + dump/Cargo.toml | 1 + dump/src/reader/compat/mod.rs | 144 +++++++++- dump/src/reader/compat/v4_to_v5.rs | 444 +++++++++++++++++++++++++++++ dump/src/reader/compat/v5_to_v6.rs | 135 ++++++--- dump/src/reader/mod.rs | 8 +- dump/src/reader/v4/errors.rs | 320 +++++++++++++++++++++ dump/src/reader/v4/mod.rs | 27 +- dump/src/reader/v4/tasks.rs | 14 +- dump/src/reader/v5/errors.rs | 285 ++++++++++++++++++ dump/src/reader/v5/mod.rs | 23 +- dump/src/reader/v5/tasks.rs | 14 +- dump/src/reader/v6.rs | 71 ++--- 13 files changed, 1355 insertions(+), 132 deletions(-) create mode 100644 dump/src/reader/compat/v4_to_v5.rs create mode 100644 dump/src/reader/v4/errors.rs create mode 100644 dump/src/reader/v5/errors.rs diff --git a/Cargo.lock b/Cargo.lock index 4dec9a239..94e59f487 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1147,6 +1147,7 @@ dependencies = [ "anyhow", "big_s", "flate2", + "http", "index", "index-scheduler", "insta", diff --git a/dump/Cargo.toml b/dump/Cargo.toml index 8cb8b028d..199bc1c79 100644 --- a/dump/Cargo.toml +++ b/dump/Cargo.toml @@ -20,6 +20,7 @@ log = "0.4.17" index-scheduler = { path = "../index-scheduler" } meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-types = { path = "../meilisearch-types" } +http = "0.2.8" [dev-dependencies] big_s = "1.0.2" diff --git a/dump/src/reader/compat/mod.rs b/dump/src/reader/compat/mod.rs index 291035d0e..361bbb90d 100644 --- a/dump/src/reader/compat/mod.rs +++ b/dump/src/reader/compat/mod.rs @@ -1,12 +1,150 @@ // pub mod v2; // pub mod v3; // pub mod v4; +use crate::Result; -// pub mod v4_to_v5; +use self::{ + v4_to_v5::CompatV4ToV5, + v5_to_v6::{CompatIndexV5ToV6, CompatV5ToV6}, +}; + +use super::{ + v5::V5Reader, + v6::{self, V6IndexReader, V6Reader}, +}; + +pub mod v4_to_v5; pub mod v5_to_v6; -pub struct Compat { - from: Box, +pub enum Compat { + Current(V6Reader), + Compat(CompatV5ToV6), +} + +impl Compat { + pub fn version(&self) -> crate::Version { + match self { + Compat::Current(current) => current.version(), + Compat::Compat(compat) => compat.version(), + } + } + + pub fn date(&self) -> Option { + match self { + Compat::Current(current) => current.date(), + Compat::Compat(compat) => compat.date(), + } + } + + pub fn instance_uid(&self) -> Result> { + match self { + Compat::Current(current) => current.instance_uid(), + Compat::Compat(compat) => compat.instance_uid(), + } + } + + pub fn indexes(&self) -> Result> + '_>> { + match self { + Compat::Current(current) => { + let indexes = Box::new(current.indexes()?.map(|res| res.map(CompatIndex::from))) + as Box> + '_>; + Ok(indexes) + } + Compat::Compat(compat) => { + let indexes = Box::new(compat.indexes()?.map(|res| res.map(CompatIndex::from))) + as Box> + '_>; + Ok(indexes) + } + } + } + + pub fn tasks( + &mut self, + ) -> Box)>> + '_> { + match self { + Compat::Current(current) => current.tasks(), + Compat::Compat(compat) => compat.tasks(), + } + } + + pub fn keys(&mut self) -> Box> + '_> { + match self { + Compat::Current(current) => current.keys(), + Compat::Compat(compat) => compat.keys(), + } + } +} + +impl From for Compat { + fn from(value: V6Reader) -> Self { + Compat::Current(value) + } +} + +impl From for Compat { + fn from(value: CompatV5ToV6) -> Self { + Compat::Compat(value) + } +} + +impl From for Compat { + fn from(value: V5Reader) -> Self { + Compat::Compat(value.to_v6()) + } +} + +impl From for Compat { + fn from(value: CompatV4ToV5) -> Self { + Compat::Compat(value.to_v6()) + } +} + +pub enum CompatIndex { + Current(v6::V6IndexReader), + Compat(CompatIndexV5ToV6), +} + +impl CompatIndex { + pub fn new_v6(v6: v6::V6IndexReader) -> CompatIndex { + CompatIndex::Current(v6) + } + + pub fn metadata(&self) -> &crate::IndexMetadata { + match self { + CompatIndex::Current(v6) => v6.metadata(), + CompatIndex::Compat(compat) => compat.metadata(), + } + } + + pub fn documents(&mut self) -> Result> + '_>> { + match self { + CompatIndex::Current(v6) => v6 + .documents() + .map(|iter| Box::new(iter) as Box> + '_>), + CompatIndex::Compat(compat) => compat + .documents() + .map(|iter| Box::new(iter) as Box> + '_>), + } + } + + pub fn settings(&mut self) -> Result> { + match self { + CompatIndex::Current(v6) => v6.settings(), + CompatIndex::Compat(compat) => compat.settings(), + } + } +} + +impl From for CompatIndex { + fn from(value: V6IndexReader) -> Self { + CompatIndex::Current(value) + } +} + +impl From for CompatIndex { + fn from(value: CompatIndexV5ToV6) -> Self { + CompatIndex::Compat(value) + } } /// Parses the v1 version of the Asc ranking rules `asc(price)`and returns the field name. diff --git a/dump/src/reader/compat/v4_to_v5.rs b/dump/src/reader/compat/v4_to_v5.rs new file mode 100644 index 000000000..8e1c01093 --- /dev/null +++ b/dump/src/reader/compat/v4_to_v5.rs @@ -0,0 +1,444 @@ +use std::fs::File; + +use crate::reader::{v4, v5, DumpReader, IndexReader}; +use crate::Result; + +use super::v5_to_v6::CompatV5ToV6; + +pub struct CompatV4ToV5 { + from: v4::V4Reader, +} + +impl CompatV4ToV5 { + pub fn new(v4: v4::V4Reader) -> CompatV4ToV5 { + CompatV4ToV5 { from: v4 } + } + + pub fn to_v6(self) -> CompatV5ToV6 { + CompatV5ToV6::Compat(self) + } + + pub fn version(&self) -> crate::Version { + self.from.version() + } + + pub fn date(&self) -> Option { + self.from.date() + } + + pub fn instance_uid(&self) -> Result> { + self.from.instance_uid() + } + + pub fn indexes(&self) -> Result> + '_> { + Ok(self.from.indexes()?.map(|index_reader| -> Result<_> { + let compat = CompatIndexV4ToV5::new(index_reader?); + Ok(compat) + })) + } + + pub fn tasks( + &mut self, + ) -> Box)>> + '_> { + Box::new(self.from.tasks().map(|task| { + task.map(|(task, content_file)| { + // let task_view: v4::tasks::TaskView = task.into(); + + let task = v5::Task { + id: task.id, + content: match task.content { + v4::tasks::TaskContent::DocumentAddition { + content_uuid, + merge_strategy, + primary_key, + documents_count, + allow_index_creation, + } => v5::tasks::TaskContent::DocumentAddition { + index_uid: v5::meta::IndexUid(task.index_uid.0), + content_uuid, + merge_strategy: match merge_strategy { + v4::tasks::IndexDocumentsMethod::ReplaceDocuments => { + v5::tasks::IndexDocumentsMethod::ReplaceDocuments + } + v4::tasks::IndexDocumentsMethod::UpdateDocuments => { + v5::tasks::IndexDocumentsMethod::UpdateDocuments + } + }, + primary_key, + documents_count, + allow_index_creation, + }, + v4::tasks::TaskContent::DocumentDeletion(deletion) => { + v5::tasks::TaskContent::DocumentDeletion { + index_uid: v5::meta::IndexUid(task.index_uid.0), + deletion: match deletion { + v4::tasks::DocumentDeletion::Clear => { + v5::tasks::DocumentDeletion::Clear + } + v4::tasks::DocumentDeletion::Ids(ids) => { + v5::tasks::DocumentDeletion::Ids(ids) + } + }, + } + } + v4::tasks::TaskContent::SettingsUpdate { + settings, + is_deletion, + allow_index_creation, + } => v5::tasks::TaskContent::SettingsUpdate { + index_uid: v5::meta::IndexUid(task.index_uid.0), + settings: settings.into(), + is_deletion, + allow_index_creation, + }, + v4::tasks::TaskContent::IndexDeletion => { + v5::tasks::TaskContent::IndexDeletion { + index_uid: v5::meta::IndexUid(task.index_uid.0), + } + } + v4::tasks::TaskContent::IndexCreation { primary_key } => { + v5::tasks::TaskContent::IndexCreation { + index_uid: v5::meta::IndexUid(task.index_uid.0), + primary_key, + } + } + v4::tasks::TaskContent::IndexUpdate { primary_key } => { + v5::tasks::TaskContent::IndexUpdate { + index_uid: v5::meta::IndexUid(task.index_uid.0), + primary_key, + } + } + }, + events: task + .events + .into_iter() + .map(|event| match event { + v4::tasks::TaskEvent::Created(date) => { + v5::tasks::TaskEvent::Created(date) + } + v4::tasks::TaskEvent::Batched { + timestamp, + batch_id, + } => v5::tasks::TaskEvent::Batched { + timestamp, + batch_id, + }, + v4::tasks::TaskEvent::Processing(date) => { + v5::tasks::TaskEvent::Processing(date) + } + v4::tasks::TaskEvent::Succeded { result, timestamp } => { + v5::tasks::TaskEvent::Succeeded { + result: match result { + v4::tasks::TaskResult::DocumentAddition { + indexed_documents, + } => v5::tasks::TaskResult::DocumentAddition { + indexed_documents, + }, + v4::tasks::TaskResult::DocumentDeletion { + deleted_documents, + } => v5::tasks::TaskResult::DocumentDeletion { + deleted_documents, + }, + v4::tasks::TaskResult::ClearAll { deleted_documents } => { + v5::tasks::TaskResult::ClearAll { deleted_documents } + } + v4::tasks::TaskResult::Other => { + v5::tasks::TaskResult::Other + } + }, + timestamp, + } + } + v4::tasks::TaskEvent::Failed { error, timestamp } => { + v5::tasks::TaskEvent::Failed { + error: v5::ResponseError::from(error), + timestamp, + } + } + }) + .collect(), + }; + + (task, content_file) + }) + })) + } + + pub fn keys(&mut self) -> Box> + '_> { + Box::new(self.from.keys().map(|key| { + key.map(|key| v5::Key { + description: key.description, + name: None, + uid: v5::keys::KeyId::new_v4(), + actions: key + .actions + .into_iter() + .filter_map(|action| action.into()) + .collect(), + indexes: key + .indexes + .into_iter() + .map(|index| match index.as_str() { + "*" => v5::StarOr::Star, + _ => v5::StarOr::Other(v5::meta::IndexUid(index)), + }) + .collect(), + expires_at: key.expires_at, + created_at: key.created_at, + updated_at: key.updated_at, + }) + })) + } +} + +pub struct CompatIndexV4ToV5 { + from: v4::V4IndexReader, +} + +impl CompatIndexV4ToV5 { + pub fn new(v4: v4::V4IndexReader) -> CompatIndexV4ToV5 { + CompatIndexV4ToV5 { from: v4 } + } + + pub fn metadata(&self) -> &crate::IndexMetadata { + self.from.metadata() + } + + pub fn documents(&mut self) -> Result> + '_>> { + self.from + .documents() + .map(|iter| Box::new(iter) as Box> + '_>) + } + + pub fn settings(&mut self) -> Result> { + Ok(v5::Settings::::from(self.from.settings()?).check()) + } +} + +impl From> for v5::Setting { + fn from(setting: v4::Setting) -> Self { + match setting { + v4::Setting::Set(t) => v5::Setting::Set(t), + v4::Setting::Reset => v5::Setting::Reset, + v4::Setting::NotSet => v5::Setting::NotSet, + } + } +} + +impl From for v5::ResponseError { + fn from(error: v4::ResponseError) -> Self { + let code = match error.error_code.as_ref() { + "CreateIndex" => v5::Code::CreateIndex, + "IndexAlreadyExists" => v5::Code::IndexAlreadyExists, + "IndexNotFound" => v5::Code::IndexNotFound, + "InvalidIndexUid" => v5::Code::InvalidIndexUid, + "InvalidMinWordLengthForTypo" => v5::Code::InvalidMinWordLengthForTypo, + "InvalidState" => v5::Code::InvalidState, + "MissingPrimaryKey" => v5::Code::MissingPrimaryKey, + "PrimaryKeyAlreadyPresent" => v5::Code::PrimaryKeyAlreadyPresent, + "MaxFieldsLimitExceeded" => v5::Code::MaxFieldsLimitExceeded, + "MissingDocumentId" => v5::Code::MissingDocumentId, + "InvalidDocumentId" => v5::Code::InvalidDocumentId, + "Filter" => v5::Code::Filter, + "Sort" => v5::Code::Sort, + "BadParameter" => v5::Code::BadParameter, + "BadRequest" => v5::Code::BadRequest, + "DatabaseSizeLimitReached" => v5::Code::DatabaseSizeLimitReached, + "DocumentNotFound" => v5::Code::DocumentNotFound, + "Internal" => v5::Code::Internal, + "InvalidGeoField" => v5::Code::InvalidGeoField, + "InvalidRankingRule" => v5::Code::InvalidRankingRule, + "InvalidStore" => v5::Code::InvalidStore, + "InvalidToken" => v5::Code::InvalidToken, + "MissingAuthorizationHeader" => v5::Code::MissingAuthorizationHeader, + "NoSpaceLeftOnDevice" => v5::Code::NoSpaceLeftOnDevice, + "DumpNotFound" => v5::Code::DumpNotFound, + "TaskNotFound" => v5::Code::TaskNotFound, + "PayloadTooLarge" => v5::Code::PayloadTooLarge, + "RetrieveDocument" => v5::Code::RetrieveDocument, + "SearchDocuments" => v5::Code::SearchDocuments, + "UnsupportedMediaType" => v5::Code::UnsupportedMediaType, + "DumpAlreadyInProgress" => v5::Code::DumpAlreadyInProgress, + "DumpProcessFailed" => v5::Code::DumpProcessFailed, + "InvalidContentType" => v5::Code::InvalidContentType, + "MissingContentType" => v5::Code::MissingContentType, + "MalformedPayload" => v5::Code::MalformedPayload, + "MissingPayload" => v5::Code::MissingPayload, + "ApiKeyNotFound" => v5::Code::ApiKeyNotFound, + "MissingParameter" => v5::Code::MissingParameter, + "InvalidApiKeyActions" => v5::Code::InvalidApiKeyActions, + "InvalidApiKeyIndexes" => v5::Code::InvalidApiKeyIndexes, + "InvalidApiKeyExpiresAt" => v5::Code::InvalidApiKeyExpiresAt, + "InvalidApiKeyDescription" => v5::Code::InvalidApiKeyDescription, + other => { + log::warn!("Unknown error code {}", other); + v5::Code::UnretrievableErrorCode + } + }; + v5::ResponseError::from_msg(error.message, code) + } +} + +impl From> for v5::Settings { + fn from(settings: v4::Settings) -> Self { + v5::Settings { + displayed_attributes: settings.displayed_attributes.into(), + searchable_attributes: settings.searchable_attributes.into(), + filterable_attributes: settings.filterable_attributes.into(), + sortable_attributes: settings.sortable_attributes.into(), + ranking_rules: settings.ranking_rules.into(), + stop_words: settings.stop_words.into(), + synonyms: settings.synonyms.into(), + distinct_attribute: settings.distinct_attribute.into(), + typo_tolerance: match settings.typo_tolerance { + v4::Setting::Set(typo) => v5::Setting::Set(v5::TypoTolerance { + enabled: typo.enabled.into(), + min_word_size_for_typos: match typo.min_word_size_for_typos { + v4::Setting::Set(t) => v5::Setting::Set(v5::MinWordSizeForTypos { + one_typo: t.one_typo.into(), + two_typos: t.two_typos.into(), + }), + v4::Setting::Reset => v5::Setting::Reset, + v4::Setting::NotSet => v5::Setting::NotSet, + }, + disable_on_words: typo.disable_on_words.into(), + disable_on_attributes: typo.disable_on_attributes.into(), + }), + v4::Setting::Reset => v5::Setting::Reset, + v4::Setting::NotSet => v5::Setting::NotSet, + }, + faceting: v5::Setting::NotSet, + pagination: v5::Setting::NotSet, + _kind: std::marker::PhantomData, + } + } +} + +impl From for Option { + fn from(key: v4::Action) -> Self { + match key { + v4::Action::All => Some(v5::Action::All), + v4::Action::Search => Some(v5::Action::Search), + v4::Action::DocumentsAdd => Some(v5::Action::DocumentsAdd), + v4::Action::DocumentsGet => Some(v5::Action::DocumentsGet), + v4::Action::DocumentsDelete => Some(v5::Action::DocumentsDelete), + v4::Action::IndexesAdd => Some(v5::Action::IndexesAdd), + v4::Action::IndexesGet => Some(v5::Action::IndexesGet), + v4::Action::IndexesUpdate => Some(v5::Action::IndexesUpdate), + v4::Action::IndexesDelete => Some(v5::Action::IndexesDelete), + v4::Action::TasksGet => Some(v5::Action::TasksGet), + v4::Action::SettingsGet => Some(v5::Action::SettingsGet), + v4::Action::SettingsUpdate => Some(v5::Action::SettingsUpdate), + v4::Action::StatsGet => Some(v5::Action::StatsGet), + v4::Action::DumpsCreate => Some(v5::Action::DumpsCreate), + v4::Action::DumpsGet => None, + v4::Action::Version => Some(v5::Action::Version), + } + } +} + +#[cfg(test)] +pub(crate) mod test { + use std::{fs::File, io::BufReader}; + + use flate2::bufread::GzDecoder; + use tempfile::TempDir; + + use super::*; + + #[test] + fn compat_v4_v5() { + let dump = File::open("tests/assets/v4.dump").unwrap(); + let dir = TempDir::new().unwrap(); + let mut dump = BufReader::new(dump); + let gz = GzDecoder::new(&mut dump); + let mut archive = tar::Archive::new(gz); + archive.unpack(dir.path()).unwrap(); + + let mut dump = v4::V4Reader::open(dir).unwrap().to_v5(); + + // top level infos + insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00"); + insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d"); + + // tasks + let tasks = dump.tasks().collect::>>().unwrap(); + let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); + insta::assert_json_snapshot!(tasks); + assert_eq!(update_files.len(), 22); + assert!(update_files[0].is_none()); // the dump creation + assert!(update_files[1].is_some()); // the enqueued document addition + assert!(update_files[2..].iter().all(|u| u.is_none())); // everything already processed + + // keys + let keys = dump.keys().collect::>>().unwrap(); + insta::assert_json_snapshot!(keys); + + // indexes + let mut indexes = dump.indexes().unwrap().collect::>>().unwrap(); + // the index are not ordered in any way by default + indexes.sort_by_key(|index| index.metadata().uid.to_string()); + + let mut products = indexes.pop().unwrap(); + let mut movies = indexes.pop().unwrap(); + let mut spells = indexes.pop().unwrap(); + assert!(indexes.is_empty()); + + // products + insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "products", + "primaryKey": "sku", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_debug_snapshot!(products.settings()); + let documents = products + .documents() + .unwrap() + .collect::>>() + .unwrap(); + assert_eq!(documents.len(), 10); + insta::assert_json_snapshot!(documents); + + // movies + insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "movies", + "primaryKey": "id", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_debug_snapshot!(movies.settings()); + let documents = movies + .documents() + .unwrap() + .collect::>>() + .unwrap(); + assert_eq!(documents.len(), 200); + insta::assert_debug_snapshot!(documents); + + // spells + insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "dnd_spells", + "primaryKey": "index", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_debug_snapshot!(spells.settings()); + let documents = spells + .documents() + .unwrap() + .collect::>>() + .unwrap(); + assert_eq!(documents.len(), 10); + insta::assert_json_snapshot!(documents); + } +} diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index 7c085a224..188910f48 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -1,46 +1,65 @@ use crate::reader::{v5, v6, DumpReader, IndexReader}; use crate::Result; -pub struct CompatV5ToV6 { - from: v5::V5Reader, +use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5}; + +pub enum CompatV5ToV6 { + V5(v5::V5Reader), + Compat(CompatV4ToV5), } impl CompatV5ToV6 { - pub fn new(v5: v5::V5Reader) -> CompatV5ToV6 { - CompatV5ToV6 { from: v5 } - } -} - -impl DumpReader for CompatV5ToV6 { - fn version(&self) -> crate::Version { - self.from.version() + pub fn new_v5(v5: v5::V5Reader) -> CompatV5ToV6 { + CompatV5ToV6::V5(v5) } - fn date(&self) -> Option { - self.from.date() + pub fn version(&self) -> crate::Version { + match self { + CompatV5ToV6::V5(v5) => v5.version(), + CompatV5ToV6::Compat(compat) => compat.version(), + } } - fn instance_uid(&self) -> Result> { - self.from.instance_uid() + pub fn date(&self) -> Option { + match self { + CompatV5ToV6::V5(v5) => v5.date(), + CompatV5ToV6::Compat(compat) => compat.date(), + } } - fn indexes( - &self, - ) -> Result>> + '_>> - { - Ok(Box::new(self.from.indexes()?.map( - |index_reader| -> Result<_> { - let compat = Box::new(CompatIndexV5ToV6::new(index_reader?)) - as Box; - Ok(compat) - }, - ))) + pub fn instance_uid(&self) -> Result> { + match self { + CompatV5ToV6::V5(v5) => v5.instance_uid(), + CompatV5ToV6::Compat(compat) => compat.instance_uid(), + } } - fn tasks( + pub fn indexes(&self) -> Result> + '_>> { + let indexes = match self { + CompatV5ToV6::V5(v5) => Box::new( + v5.indexes()? + .map(|index| index.map(CompatIndexV5ToV6::from)), + ) + as Box> + '_>, + + CompatV5ToV6::Compat(compat) => Box::new( + compat + .indexes()? + .map(|index| index.map(CompatIndexV5ToV6::from)), + ) + as Box> + '_>, + }; + Ok(indexes) + } + + pub fn tasks( &mut self, ) -> Box)>> + '_> { - Box::new(self.from.tasks().map(|task| { + let tasks = match self { + CompatV5ToV6::V5(v5) => v5.tasks(), + CompatV5ToV6::Compat(compat) => compat.tasks(), + }; + Box::new(tasks.map(|task| { task.map(|(task, content_file)| { let task_view: v5::tasks::TaskView = task.into(); @@ -101,8 +120,12 @@ impl DumpReader for CompatV5ToV6 { })) } - fn keys(&mut self) -> Box> + '_> { - Box::new(self.from.keys().map(|key| { + pub fn keys(&mut self) -> Box> + '_> { + let keys = match self { + CompatV5ToV6::V5(v5) => v5.keys(), + CompatV5ToV6::Compat(compat) => compat.keys(), + }; + Box::new(keys.map(|key| { key.map(|key| v6::Key { description: key.description, name: key.name, @@ -130,29 +153,51 @@ impl DumpReader for CompatV5ToV6 { } } -pub struct CompatIndexV5ToV6 { - from: v5::V5IndexReader, +pub enum CompatIndexV5ToV6 { + V5(v5::V5IndexReader), + Compat(CompatIndexV4ToV5), +} + +impl From for CompatIndexV5ToV6 { + fn from(index_reader: v5::V5IndexReader) -> Self { + Self::V5(index_reader) + } +} + +impl From for CompatIndexV5ToV6 { + fn from(index_reader: CompatIndexV4ToV5) -> Self { + Self::Compat(index_reader) + } } impl CompatIndexV5ToV6 { - pub fn new(v5: v5::V5IndexReader) -> CompatIndexV5ToV6 { - CompatIndexV5ToV6 { from: v5 } - } -} - -impl IndexReader for CompatIndexV5ToV6 { - fn metadata(&self) -> &crate::IndexMetadata { - self.from.metadata() + pub fn new_v5(v5: v5::V5IndexReader) -> CompatIndexV5ToV6 { + CompatIndexV5ToV6::V5(v5) } - fn documents(&mut self) -> Result> + '_>> { - self.from - .documents() - .map(|iter| Box::new(iter) as Box> + '_>) + pub fn metadata(&self) -> &crate::IndexMetadata { + match self { + CompatIndexV5ToV6::V5(v5) => v5.metadata(), + CompatIndexV5ToV6::Compat(compat) => compat.metadata(), + } } - fn settings(&mut self) -> Result> { - Ok(v6::Settings::::from(self.from.settings()?).check()) + pub fn documents(&mut self) -> Result> + '_>> { + match self { + CompatIndexV5ToV6::V5(v5) => v5 + .documents() + .map(|iter| Box::new(iter) as Box> + '_>), + CompatIndexV5ToV6::Compat(compat) => compat + .documents() + .map(|iter| Box::new(iter) as Box> + '_>), + } + } + + pub fn settings(&mut self) -> Result> { + match self { + CompatIndexV5ToV6::V5(v5) => Ok(v6::Settings::from(v5.settings()?).check()), + CompatIndexV5ToV6::Compat(compat) => Ok(v6::Settings::from(compat.settings()?).check()), + } } } diff --git a/dump/src/reader/mod.rs b/dump/src/reader/mod.rs index b0f1ad74a..df6f93cd7 100644 --- a/dump/src/reader/mod.rs +++ b/dump/src/reader/mod.rs @@ -13,6 +13,8 @@ use uuid::Uuid; // use crate::reader::compat::Compat; use crate::{IndexMetadata, Result, Version}; +use self::compat::Compat; + // use self::loaders::{v2, v3, v4, v5}; // pub mod error; @@ -23,7 +25,7 @@ pub(self) mod v4; pub(self) mod v5; pub(self) mod v6; -pub fn open(dump: impl Read) -> Result> { +pub fn open(dump: impl Read) -> Result { let path = TempDir::new()?; let mut dump = BufReader::new(dump); let gz = GzDecoder::new(&mut dump); @@ -44,8 +46,8 @@ pub fn open(dump: impl Read) -> Result> { Version::V2 => todo!(), Version::V3 => todo!(), Version::V4 => todo!(), - Version::V5 => Ok(Box::new(v5::V5Reader::open(path)?.to_v6())), - Version::V6 => Ok(Box::new(v6::V6Reader::open(path)?)), + Version::V5 => Ok(v5::V5Reader::open(path)?.to_v6().into()), + Version::V6 => Ok(v6::V6Reader::open(path)?.into()), } } diff --git a/dump/src/reader/v4/errors.rs b/dump/src/reader/v4/errors.rs new file mode 100644 index 000000000..56a91aca9 --- /dev/null +++ b/dump/src/reader/v4/errors.rs @@ -0,0 +1,320 @@ +use std::fmt; + +use http::StatusCode; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] +pub struct ResponseError { + #[serde(skip)] + #[cfg_attr( + feature = "test-traits", + proptest(strategy = "strategy::status_code_strategy()") + )] + pub code: StatusCode, + pub message: String, + #[serde(rename = "code")] + pub error_code: String, + #[serde(rename = "type")] + pub error_type: String, + #[serde(rename = "link")] + pub error_link: String, +} + +impl ResponseError { + pub fn from_msg(message: String, code: Code) -> Self { + Self { + code: code.http(), + message, + error_code: code.err_code().error_name.to_string(), + error_type: code.type_(), + error_link: code.url(), + } + } +} + +impl fmt::Display for ResponseError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.message.fmt(f) + } +} + +impl std::error::Error for ResponseError {} + +impl From for ResponseError +where + T: ErrorCode, +{ + fn from(other: T) -> Self { + Self { + code: other.http_status(), + message: other.to_string(), + error_code: other.error_name(), + error_type: other.error_type(), + error_link: other.error_url(), + } + } +} + +pub trait ErrorCode: std::error::Error { + fn error_code(&self) -> Code; + + /// returns the HTTP status code ascociated with the error + fn http_status(&self) -> StatusCode { + self.error_code().http() + } + + /// returns the doc url ascociated with the error + fn error_url(&self) -> String { + self.error_code().url() + } + + /// returns error name, used as error code + fn error_name(&self) -> String { + self.error_code().name() + } + + /// return the error type + fn error_type(&self) -> String { + self.error_code().type_() + } +} + +#[allow(clippy::enum_variant_names)] +enum ErrorType { + InternalError, + InvalidRequestError, + AuthenticationError, +} + +impl fmt::Display for ErrorType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use ErrorType::*; + + match self { + InternalError => write!(f, "internal"), + InvalidRequestError => write!(f, "invalid_request"), + AuthenticationError => write!(f, "auth"), + } + } +} + +#[derive(Serialize, Deserialize, Debug, Clone, Copy)] +pub enum Code { + // index related error + CreateIndex, + IndexAlreadyExists, + IndexNotFound, + InvalidIndexUid, + InvalidMinWordLengthForTypo, + + // invalid state error + InvalidState, + MissingPrimaryKey, + PrimaryKeyAlreadyPresent, + + MaxFieldsLimitExceeded, + MissingDocumentId, + InvalidDocumentId, + + Filter, + Sort, + + BadParameter, + BadRequest, + DatabaseSizeLimitReached, + DocumentNotFound, + Internal, + InvalidGeoField, + InvalidRankingRule, + InvalidStore, + InvalidToken, + MissingAuthorizationHeader, + NoSpaceLeftOnDevice, + DumpNotFound, + TaskNotFound, + PayloadTooLarge, + RetrieveDocument, + SearchDocuments, + UnsupportedMediaType, + + DumpAlreadyInProgress, + DumpProcessFailed, + + InvalidContentType, + MissingContentType, + MalformedPayload, + MissingPayload, + + ApiKeyNotFound, + MissingParameter, + InvalidApiKeyActions, + InvalidApiKeyIndexes, + InvalidApiKeyExpiresAt, + InvalidApiKeyDescription, +} + +impl Code { + /// ascociate a `Code` variant to the actual ErrCode + fn err_code(&self) -> ErrCode { + use Code::*; + + match self { + // index related errors + // create index is thrown on internal error while creating an index. + CreateIndex => { + ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR) + } + IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT), + // thrown when requesting an unexisting index + IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND), + InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST), + + // invalid state error + InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR), + // thrown when no primary key has been set + MissingPrimaryKey => { + ErrCode::invalid("primary_key_inference_failed", StatusCode::BAD_REQUEST) + } + // error thrown when trying to set an already existing primary key + PrimaryKeyAlreadyPresent => { + ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST) + } + // invalid ranking rule + InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST), + + // invalid database + InvalidStore => { + ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR) + } + + // invalid document + MaxFieldsLimitExceeded => { + ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST) + } + MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST), + InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST), + + // error related to filters + Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST), + // error related to sorts + Sort => ErrCode::invalid("invalid_sort", StatusCode::BAD_REQUEST), + + BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST), + BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST), + DatabaseSizeLimitReached => ErrCode::internal( + "database_size_limit_reached", + StatusCode::INTERNAL_SERVER_ERROR, + ), + DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND), + Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR), + InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST), + InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN), + MissingAuthorizationHeader => { + ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED) + } + TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND), + DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND), + NoSpaceLeftOnDevice => { + ErrCode::internal("no_space_left_on_device", StatusCode::INTERNAL_SERVER_ERROR) + } + PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE), + RetrieveDocument => { + ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST) + } + SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST), + UnsupportedMediaType => { + ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE) + } + + // error related to dump + DumpAlreadyInProgress => { + ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT) + } + DumpProcessFailed => { + ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR) + } + MissingContentType => { + ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE) + } + MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST), + InvalidContentType => { + ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE) + } + MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST), + + // error related to keys + ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND), + MissingParameter => ErrCode::invalid("missing_parameter", StatusCode::BAD_REQUEST), + InvalidApiKeyActions => { + ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST) + } + InvalidApiKeyIndexes => { + ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST) + } + InvalidApiKeyExpiresAt => { + ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST) + } + InvalidApiKeyDescription => { + ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST) + } + InvalidMinWordLengthForTypo => { + ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST) + } + } + } + + /// return the HTTP status code ascociated with the `Code` + fn http(&self) -> StatusCode { + self.err_code().status_code + } + + /// return error name, used as error code + fn name(&self) -> String { + self.err_code().error_name.to_string() + } + + /// return the error type + fn type_(&self) -> String { + self.err_code().error_type.to_string() + } + + /// return the doc url ascociated with the error + fn url(&self) -> String { + format!("https://docs.meilisearch.com/errors#{}", self.name()) + } +} + +/// Internal structure providing a convenient way to create error codes +struct ErrCode { + status_code: StatusCode, + error_type: ErrorType, + error_name: &'static str, +} + +impl ErrCode { + fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::AuthenticationError, + } + } + + fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::InternalError, + } + } + + fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::InvalidRequestError, + } + } +} diff --git a/dump/src/reader/v4/mod.rs b/dump/src/reader/v4/mod.rs index e58b711f7..f66f2caad 100644 --- a/dump/src/reader/v4/mod.rs +++ b/dump/src/reader/v4/mod.rs @@ -9,10 +9,11 @@ use tempfile::TempDir; use time::OffsetDateTime; use uuid::Uuid; -mod keys; -mod meta; -mod settings; -mod tasks; +pub mod errors; +pub mod keys; +pub mod meta; +pub mod settings; +pub mod tasks; use crate::{IndexMetadata, Result, Version}; @@ -46,7 +47,7 @@ pub type StarOr = meta::StarOr; pub type IndexUid = meta::IndexUid; // everything related to the errors -pub type ResponseError = tasks::ResponseError; +pub type ResponseError = errors::ResponseError; pub type Code = meilisearch_types::error::Code; #[derive(Serialize, Deserialize, Debug)] @@ -119,8 +120,8 @@ impl V4Reader { })) } - pub fn tasks(&mut self) -> impl Iterator)>> + '_ { - (&mut self.tasks).lines().map(|line| -> Result<_> { + pub fn tasks(&mut self) -> Box)>> + '_> { + Box::new((&mut self.tasks).lines().map(|line| -> Result<_> { let task: Task = serde_json::from_str(&line?)?; if !task.is_finished() { if let Some(uuid) = task.get_content_uuid() { @@ -137,13 +138,15 @@ impl V4Reader { } else { Ok((task, None)) } - }) + })) } - pub fn keys(&mut self) -> impl Iterator> + '_ { - (&mut self.keys) - .lines() - .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }) + pub fn keys(&mut self) -> Box> + '_> { + Box::new( + (&mut self.keys) + .lines() + .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }), + ) } } diff --git a/dump/src/reader/v4/tasks.rs b/dump/src/reader/v4/tasks.rs index 5d2e519c0..dbe4d225e 100644 --- a/dump/src/reader/v4/tasks.rs +++ b/dump/src/reader/v4/tasks.rs @@ -5,6 +5,7 @@ use time::{Duration, OffsetDateTime}; use uuid::Uuid; use super::{ + errors::ResponseError, meta::IndexUid, settings::{Settings, Unchecked}, }; @@ -148,19 +149,6 @@ pub enum TaskResult { Other, } -#[derive(Debug, Deserialize, Clone, PartialEq, Eq)] -#[cfg_attr(test, derive(serde::Serialize))] -#[serde(rename_all = "camelCase")] -pub struct ResponseError { - pub message: String, - #[serde(rename = "code")] - pub error_code: String, - #[serde(rename = "type")] - pub error_type: String, - #[serde(rename = "link")] - pub error_link: String, -} - impl Task { /// Return true when a task is finished. /// A task is finished when its last state is either `Succeeded` or `Failed`. diff --git a/dump/src/reader/v5/errors.rs b/dump/src/reader/v5/errors.rs new file mode 100644 index 000000000..74c3fb58d --- /dev/null +++ b/dump/src/reader/v5/errors.rs @@ -0,0 +1,285 @@ +use std::fmt; + +use http::StatusCode; +use serde::Deserialize; + +#[derive(Debug, Deserialize, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] +#[cfg_attr(test, derive(serde::Serialize))] +pub struct ResponseError { + #[serde(skip)] + code: StatusCode, + + pub message: String, + #[serde(rename = "code")] + pub error_code: String, + #[serde(rename = "type")] + pub error_type: String, + #[serde(rename = "link")] + pub error_link: String, +} + +impl ResponseError { + pub fn from_msg(message: String, code: Code) -> Self { + Self { + code: code.http(), + message, + error_code: code.err_code().error_name.to_string(), + error_type: code.type_(), + error_link: code.url(), + } + } +} + +#[derive(Deserialize, Debug, Clone, Copy)] +#[cfg_attr(test, derive(serde::Serialize))] +pub enum Code { + // index related error + CreateIndex, + IndexAlreadyExists, + IndexNotFound, + InvalidIndexUid, + InvalidMinWordLengthForTypo, + + // invalid state error + InvalidState, + MissingPrimaryKey, + PrimaryKeyAlreadyPresent, + + MaxFieldsLimitExceeded, + MissingDocumentId, + InvalidDocumentId, + + Filter, + Sort, + + BadParameter, + BadRequest, + DatabaseSizeLimitReached, + DocumentNotFound, + Internal, + InvalidGeoField, + InvalidRankingRule, + InvalidStore, + InvalidToken, + MissingAuthorizationHeader, + NoSpaceLeftOnDevice, + DumpNotFound, + TaskNotFound, + PayloadTooLarge, + RetrieveDocument, + SearchDocuments, + UnsupportedMediaType, + + DumpAlreadyInProgress, + DumpProcessFailed, + + InvalidContentType, + MissingContentType, + MalformedPayload, + MissingPayload, + + ApiKeyNotFound, + MissingParameter, + InvalidApiKeyActions, + InvalidApiKeyIndexes, + InvalidApiKeyExpiresAt, + InvalidApiKeyDescription, + InvalidApiKeyName, + InvalidApiKeyUid, + ImmutableField, + ApiKeyAlreadyExists, + + UnretrievableErrorCode, +} + +impl Code { + /// associate a `Code` variant to the actual ErrCode + fn err_code(&self) -> ErrCode { + use Code::*; + + match self { + // index related errors + // create index is thrown on internal error while creating an index. + CreateIndex => { + ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR) + } + IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT), + // thrown when requesting an unexisting index + IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND), + InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST), + + // invalid state error + InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR), + // thrown when no primary key has been set + MissingPrimaryKey => { + ErrCode::invalid("primary_key_inference_failed", StatusCode::BAD_REQUEST) + } + // error thrown when trying to set an already existing primary key + PrimaryKeyAlreadyPresent => { + ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST) + } + // invalid ranking rule + InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST), + + // invalid database + InvalidStore => { + ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR) + } + + // invalid document + MaxFieldsLimitExceeded => { + ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST) + } + MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST), + InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST), + + // error related to filters + Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST), + // error related to sorts + Sort => ErrCode::invalid("invalid_sort", StatusCode::BAD_REQUEST), + + BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST), + BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST), + DatabaseSizeLimitReached => ErrCode::internal( + "database_size_limit_reached", + StatusCode::INTERNAL_SERVER_ERROR, + ), + DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND), + Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR), + InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST), + InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN), + MissingAuthorizationHeader => { + ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED) + } + TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND), + DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND), + NoSpaceLeftOnDevice => { + ErrCode::internal("no_space_left_on_device", StatusCode::INTERNAL_SERVER_ERROR) + } + PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE), + RetrieveDocument => { + ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST) + } + SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST), + UnsupportedMediaType => { + ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE) + } + + // error related to dump + DumpAlreadyInProgress => { + ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT) + } + DumpProcessFailed => { + ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR) + } + MissingContentType => { + ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE) + } + MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST), + InvalidContentType => { + ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE) + } + MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST), + + // error related to keys + ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND), + MissingParameter => ErrCode::invalid("missing_parameter", StatusCode::BAD_REQUEST), + InvalidApiKeyActions => { + ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST) + } + InvalidApiKeyIndexes => { + ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST) + } + InvalidApiKeyExpiresAt => { + ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST) + } + InvalidApiKeyDescription => { + ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST) + } + InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST), + InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST), + ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT), + ImmutableField => ErrCode::invalid("immutable_field", StatusCode::BAD_REQUEST), + InvalidMinWordLengthForTypo => { + ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST) + } + UnretrievableErrorCode => { + ErrCode::invalid("unretrievable_error_code", StatusCode::BAD_REQUEST) + } + } + } + + /// return the HTTP status code associated with the `Code` + fn http(&self) -> StatusCode { + self.err_code().status_code + } + + /// return error name, used as error code + fn name(&self) -> String { + self.err_code().error_name.to_string() + } + + /// return the error type + fn type_(&self) -> String { + self.err_code().error_type.to_string() + } + + /// return the doc url associated with the error + fn url(&self) -> String { + format!("https://docs.meilisearch.com/errors#{}", self.name()) + } +} + +/// Internal structure providing a convenient way to create error codes +struct ErrCode { + status_code: StatusCode, + error_type: ErrorType, + error_name: &'static str, +} + +impl ErrCode { + fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::AuthenticationError, + } + } + + fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::InternalError, + } + } + + fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode { + ErrCode { + status_code, + error_name, + error_type: ErrorType::InvalidRequestError, + } + } +} + +#[allow(clippy::enum_variant_names)] +enum ErrorType { + InternalError, + InvalidRequestError, + AuthenticationError, +} + +impl fmt::Display for ErrorType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use ErrorType::*; + + match self { + InternalError => write!(f, "internal"), + InvalidRequestError => write!(f, "invalid_request"), + AuthenticationError => write!(f, "auth"), + } + } +} diff --git a/dump/src/reader/v5/mod.rs b/dump/src/reader/v5/mod.rs index 252eda4a6..b95736070 100644 --- a/dump/src/reader/v5/mod.rs +++ b/dump/src/reader/v5/mod.rs @@ -47,6 +47,7 @@ use crate::{IndexMetadata, Result, Version}; use super::{compat::v5_to_v6::CompatV5ToV6, DumpReader, IndexReader}; +pub mod errors; pub mod keys; pub mod meta; pub mod settings; @@ -80,8 +81,8 @@ pub type StarOr = meta::StarOr; pub type IndexUid = meta::IndexUid; // everything related to the errors -pub type ResponseError = tasks::ResponseError; -pub type Code = meilisearch_types::error::Code; +pub type ResponseError = errors::ResponseError; +pub type Code = errors::Code; #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] @@ -124,7 +125,7 @@ impl V5Reader { } pub fn to_v6(self) -> CompatV5ToV6 { - CompatV5ToV6::new(self) + CompatV5ToV6::new_v5(self) } pub fn version(&self) -> Version { @@ -153,8 +154,8 @@ impl V5Reader { })) } - pub fn tasks(&mut self) -> impl Iterator)>> + '_ { - (&mut self.tasks).lines().map(|line| -> Result<_> { + pub fn tasks(&mut self) -> Box)>> + '_> { + Box::new((&mut self.tasks).lines().map(|line| -> Result<_> { let task: Task = serde_json::from_str(&line?)?; if !task.is_finished() { if let Some(uuid) = task.get_content_uuid() { @@ -171,13 +172,15 @@ impl V5Reader { } else { Ok((task, None)) } - }) + })) } - pub fn keys(&mut self) -> impl Iterator> + '_ { - (&mut self.keys) - .lines() - .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }) + pub fn keys(&mut self) -> Box> + '_> { + Box::new( + (&mut self.keys) + .lines() + .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }), + ) } } diff --git a/dump/src/reader/v5/tasks.rs b/dump/src/reader/v5/tasks.rs index ce245b536..835fbab5e 100644 --- a/dump/src/reader/v5/tasks.rs +++ b/dump/src/reader/v5/tasks.rs @@ -5,6 +5,7 @@ use time::{Duration, OffsetDateTime}; use uuid::Uuid; use super::{ + errors::ResponseError, meta::IndexUid, settings::{Settings, Unchecked}, }; @@ -113,19 +114,6 @@ pub enum TaskResult { Other, } -#[derive(Debug, Deserialize, Clone, PartialEq, Eq)] -#[cfg_attr(test, derive(serde::Serialize))] -#[serde(rename_all = "camelCase")] -pub struct ResponseError { - pub message: String, - #[serde(rename = "code")] - pub error_code: String, - #[serde(rename = "type")] - pub error_type: String, - #[serde(rename = "link")] - pub error_link: String, -} - impl Task { /// Return true when a task is finished. /// A task is finished when its last state is either `Succeeded` or `Failed`. diff --git a/dump/src/reader/v6.rs b/dump/src/reader/v6.rs index c3db72df1..f0a6bd543 100644 --- a/dump/src/reader/v6.rs +++ b/dump/src/reader/v6.rs @@ -68,42 +68,45 @@ impl V6Reader { dump, }) } - fn version(&self) -> Version { + + pub fn version(&self) -> Version { Version::V6 } - fn date(&self) -> Option { + pub fn date(&self) -> Option { Some(self.metadata.dump_date) } - fn instance_uid(&self) -> Result> { + pub fn instance_uid(&self) -> Result> { Ok(Some(self.instance_uid)) } - fn indexes(&self) -> Result> + '_> { + pub fn indexes(&self) -> Result> + '_>> { let entries = fs::read_dir(self.dump.path().join("indexes"))?; - Ok(entries - .map(|entry| -> Result> { - let entry = entry?; - if entry.file_type()?.is_dir() { - let index = V6IndexReader::new( - entry - .file_name() - .to_str() - .ok_or(Error::BadIndexName)? - .to_string(), - &entry.path(), - )?; - Ok(Some(index)) - } else { - Ok(None) - } - }) - .filter_map(|entry| entry.transpose())) + Ok(Box::new( + entries + .map(|entry| -> Result> { + let entry = entry?; + if entry.file_type()?.is_dir() { + let index = V6IndexReader::new( + entry + .file_name() + .to_str() + .ok_or(Error::BadIndexName)? + .to_string(), + &entry.path(), + )?; + Ok(Some(index)) + } else { + Ok(None) + } + }) + .filter_map(|entry| entry.transpose()), + )) } - fn tasks(&mut self) -> impl Iterator)>> + '_ { - (&mut self.tasks).lines().map(|line| -> Result<_> { + pub fn tasks(&mut self) -> Box)>> + '_> { + Box::new((&mut self.tasks).lines().map(|line| -> Result<_> { let mut task: index_scheduler::TaskView = serde_json::from_str(&line?)?; // TODO: this can be removed once we can `Deserialize` the duration from the `TaskView`. if let Some((started_at, finished_at)) = task.started_at.zip(task.finished_at) { @@ -121,13 +124,15 @@ impl V6Reader { } else { Ok((task, None)) } - }) + })) } - fn keys(&mut self) -> impl Iterator> + '_ { - (&mut self.keys) - .lines() - .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }) + pub fn keys(&mut self) -> Box> + '_> { + Box::new( + (&mut self.keys) + .lines() + .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }), + ) } } @@ -165,7 +170,7 @@ impl DumpReader for V6Reader { } } -struct V6IndexReader { +pub struct V6IndexReader { metadata: IndexMetadata, documents: BufReader, settings: BufReader, @@ -184,17 +189,17 @@ impl V6IndexReader { Ok(ret) } - fn metadata(&self) -> &IndexMetadata { + pub fn metadata(&self) -> &IndexMetadata { &self.metadata } - fn documents(&mut self) -> Result> + '_> { + pub fn documents(&mut self) -> Result> + '_> { Ok((&mut self.documents) .lines() .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) })) } - fn settings(&mut self) -> Result> { + pub fn settings(&mut self) -> Result> { let settings: Settings = serde_json::from_reader(&mut self.settings)?; Ok(settings.check()) }