rewrite the compat API to something more generic

This commit is contained in:
Tamo 2022-10-06 19:44:50 +02:00 committed by Clément Renault
parent 2f47443458
commit 47e0288747
No known key found for this signature in database
GPG Key ID: 92ADA4E935E71FA4
13 changed files with 1355 additions and 132 deletions

1
Cargo.lock generated
View File

@ -1147,6 +1147,7 @@ dependencies = [
"anyhow", "anyhow",
"big_s", "big_s",
"flate2", "flate2",
"http",
"index", "index",
"index-scheduler", "index-scheduler",
"insta", "insta",

View File

@ -20,6 +20,7 @@ log = "0.4.17"
index-scheduler = { path = "../index-scheduler" } index-scheduler = { path = "../index-scheduler" }
meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" } meilisearch-types = { path = "../meilisearch-types" }
http = "0.2.8"
[dev-dependencies] [dev-dependencies]
big_s = "1.0.2" big_s = "1.0.2"

View File

@ -1,12 +1,150 @@
// pub mod v2; // pub mod v2;
// pub mod v3; // pub mod v3;
// pub mod v4; // pub mod v4;
use crate::Result;
// pub mod v4_to_v5; use self::{
v4_to_v5::CompatV4ToV5,
v5_to_v6::{CompatIndexV5ToV6, CompatV5ToV6},
};
use super::{
v5::V5Reader,
v6::{self, V6IndexReader, V6Reader},
};
pub mod v4_to_v5;
pub mod v5_to_v6; pub mod v5_to_v6;
pub struct Compat<From: ?Sized> { pub enum Compat {
from: Box<From>, Current(V6Reader),
Compat(CompatV5ToV6),
}
impl Compat {
pub fn version(&self) -> crate::Version {
match self {
Compat::Current(current) => current.version(),
Compat::Compat(compat) => compat.version(),
}
}
pub fn date(&self) -> Option<time::OffsetDateTime> {
match self {
Compat::Current(current) => current.date(),
Compat::Compat(compat) => compat.date(),
}
}
pub fn instance_uid(&self) -> Result<Option<uuid::Uuid>> {
match self {
Compat::Current(current) => current.instance_uid(),
Compat::Compat(compat) => compat.instance_uid(),
}
}
pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<CompatIndex>> + '_>> {
match self {
Compat::Current(current) => {
let indexes = Box::new(current.indexes()?.map(|res| res.map(CompatIndex::from)))
as Box<dyn Iterator<Item = Result<CompatIndex>> + '_>;
Ok(indexes)
}
Compat::Compat(compat) => {
let indexes = Box::new(compat.indexes()?.map(|res| res.map(CompatIndex::from)))
as Box<dyn Iterator<Item = Result<CompatIndex>> + '_>;
Ok(indexes)
}
}
}
pub fn tasks(
&mut self,
) -> Box<dyn Iterator<Item = Result<(v6::Task, Option<v6::UpdateFile>)>> + '_> {
match self {
Compat::Current(current) => current.tasks(),
Compat::Compat(compat) => compat.tasks(),
}
}
pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<v6::Key>> + '_> {
match self {
Compat::Current(current) => current.keys(),
Compat::Compat(compat) => compat.keys(),
}
}
}
impl From<V6Reader> for Compat {
fn from(value: V6Reader) -> Self {
Compat::Current(value)
}
}
impl From<CompatV5ToV6> for Compat {
fn from(value: CompatV5ToV6) -> Self {
Compat::Compat(value)
}
}
impl From<V5Reader> for Compat {
fn from(value: V5Reader) -> Self {
Compat::Compat(value.to_v6())
}
}
impl From<CompatV4ToV5> for Compat {
fn from(value: CompatV4ToV5) -> Self {
Compat::Compat(value.to_v6())
}
}
pub enum CompatIndex {
Current(v6::V6IndexReader),
Compat(CompatIndexV5ToV6),
}
impl CompatIndex {
pub fn new_v6(v6: v6::V6IndexReader) -> CompatIndex {
CompatIndex::Current(v6)
}
pub fn metadata(&self) -> &crate::IndexMetadata {
match self {
CompatIndex::Current(v6) => v6.metadata(),
CompatIndex::Compat(compat) => compat.metadata(),
}
}
pub fn documents(&mut self) -> Result<Box<dyn Iterator<Item = Result<v6::Document>> + '_>> {
match self {
CompatIndex::Current(v6) => v6
.documents()
.map(|iter| Box::new(iter) as Box<dyn Iterator<Item = Result<v6::Document>> + '_>),
CompatIndex::Compat(compat) => compat
.documents()
.map(|iter| Box::new(iter) as Box<dyn Iterator<Item = Result<v6::Document>> + '_>),
}
}
pub fn settings(&mut self) -> Result<v6::Settings<v6::Checked>> {
match self {
CompatIndex::Current(v6) => v6.settings(),
CompatIndex::Compat(compat) => compat.settings(),
}
}
}
impl From<V6IndexReader> for CompatIndex {
fn from(value: V6IndexReader) -> Self {
CompatIndex::Current(value)
}
}
impl From<CompatIndexV5ToV6> for CompatIndex {
fn from(value: CompatIndexV5ToV6) -> Self {
CompatIndex::Compat(value)
}
} }
/// Parses the v1 version of the Asc ranking rules `asc(price)`and returns the field name. /// Parses the v1 version of the Asc ranking rules `asc(price)`and returns the field name.

View File

@ -0,0 +1,444 @@
use std::fs::File;
use crate::reader::{v4, v5, DumpReader, IndexReader};
use crate::Result;
use super::v5_to_v6::CompatV5ToV6;
pub struct CompatV4ToV5 {
from: v4::V4Reader,
}
impl CompatV4ToV5 {
pub fn new(v4: v4::V4Reader) -> CompatV4ToV5 {
CompatV4ToV5 { from: v4 }
}
pub fn to_v6(self) -> CompatV5ToV6 {
CompatV5ToV6::Compat(self)
}
pub fn version(&self) -> crate::Version {
self.from.version()
}
pub fn date(&self) -> Option<time::OffsetDateTime> {
self.from.date()
}
pub fn instance_uid(&self) -> Result<Option<uuid::Uuid>> {
self.from.instance_uid()
}
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<CompatIndexV4ToV5>> + '_> {
Ok(self.from.indexes()?.map(|index_reader| -> Result<_> {
let compat = CompatIndexV4ToV5::new(index_reader?);
Ok(compat)
}))
}
pub fn tasks(
&mut self,
) -> Box<dyn Iterator<Item = Result<(v5::Task, Option<v5::UpdateFile>)>> + '_> {
Box::new(self.from.tasks().map(|task| {
task.map(|(task, content_file)| {
// let task_view: v4::tasks::TaskView = task.into();
let task = v5::Task {
id: task.id,
content: match task.content {
v4::tasks::TaskContent::DocumentAddition {
content_uuid,
merge_strategy,
primary_key,
documents_count,
allow_index_creation,
} => v5::tasks::TaskContent::DocumentAddition {
index_uid: v5::meta::IndexUid(task.index_uid.0),
content_uuid,
merge_strategy: match merge_strategy {
v4::tasks::IndexDocumentsMethod::ReplaceDocuments => {
v5::tasks::IndexDocumentsMethod::ReplaceDocuments
}
v4::tasks::IndexDocumentsMethod::UpdateDocuments => {
v5::tasks::IndexDocumentsMethod::UpdateDocuments
}
},
primary_key,
documents_count,
allow_index_creation,
},
v4::tasks::TaskContent::DocumentDeletion(deletion) => {
v5::tasks::TaskContent::DocumentDeletion {
index_uid: v5::meta::IndexUid(task.index_uid.0),
deletion: match deletion {
v4::tasks::DocumentDeletion::Clear => {
v5::tasks::DocumentDeletion::Clear
}
v4::tasks::DocumentDeletion::Ids(ids) => {
v5::tasks::DocumentDeletion::Ids(ids)
}
},
}
}
v4::tasks::TaskContent::SettingsUpdate {
settings,
is_deletion,
allow_index_creation,
} => v5::tasks::TaskContent::SettingsUpdate {
index_uid: v5::meta::IndexUid(task.index_uid.0),
settings: settings.into(),
is_deletion,
allow_index_creation,
},
v4::tasks::TaskContent::IndexDeletion => {
v5::tasks::TaskContent::IndexDeletion {
index_uid: v5::meta::IndexUid(task.index_uid.0),
}
}
v4::tasks::TaskContent::IndexCreation { primary_key } => {
v5::tasks::TaskContent::IndexCreation {
index_uid: v5::meta::IndexUid(task.index_uid.0),
primary_key,
}
}
v4::tasks::TaskContent::IndexUpdate { primary_key } => {
v5::tasks::TaskContent::IndexUpdate {
index_uid: v5::meta::IndexUid(task.index_uid.0),
primary_key,
}
}
},
events: task
.events
.into_iter()
.map(|event| match event {
v4::tasks::TaskEvent::Created(date) => {
v5::tasks::TaskEvent::Created(date)
}
v4::tasks::TaskEvent::Batched {
timestamp,
batch_id,
} => v5::tasks::TaskEvent::Batched {
timestamp,
batch_id,
},
v4::tasks::TaskEvent::Processing(date) => {
v5::tasks::TaskEvent::Processing(date)
}
v4::tasks::TaskEvent::Succeded { result, timestamp } => {
v5::tasks::TaskEvent::Succeeded {
result: match result {
v4::tasks::TaskResult::DocumentAddition {
indexed_documents,
} => v5::tasks::TaskResult::DocumentAddition {
indexed_documents,
},
v4::tasks::TaskResult::DocumentDeletion {
deleted_documents,
} => v5::tasks::TaskResult::DocumentDeletion {
deleted_documents,
},
v4::tasks::TaskResult::ClearAll { deleted_documents } => {
v5::tasks::TaskResult::ClearAll { deleted_documents }
}
v4::tasks::TaskResult::Other => {
v5::tasks::TaskResult::Other
}
},
timestamp,
}
}
v4::tasks::TaskEvent::Failed { error, timestamp } => {
v5::tasks::TaskEvent::Failed {
error: v5::ResponseError::from(error),
timestamp,
}
}
})
.collect(),
};
(task, content_file)
})
}))
}
pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<v5::Key>> + '_> {
Box::new(self.from.keys().map(|key| {
key.map(|key| v5::Key {
description: key.description,
name: None,
uid: v5::keys::KeyId::new_v4(),
actions: key
.actions
.into_iter()
.filter_map(|action| action.into())
.collect(),
indexes: key
.indexes
.into_iter()
.map(|index| match index.as_str() {
"*" => v5::StarOr::Star,
_ => v5::StarOr::Other(v5::meta::IndexUid(index)),
})
.collect(),
expires_at: key.expires_at,
created_at: key.created_at,
updated_at: key.updated_at,
})
}))
}
}
pub struct CompatIndexV4ToV5 {
from: v4::V4IndexReader,
}
impl CompatIndexV4ToV5 {
pub fn new(v4: v4::V4IndexReader) -> CompatIndexV4ToV5 {
CompatIndexV4ToV5 { from: v4 }
}
pub fn metadata(&self) -> &crate::IndexMetadata {
self.from.metadata()
}
pub fn documents(&mut self) -> Result<Box<dyn Iterator<Item = Result<v5::Document>> + '_>> {
self.from
.documents()
.map(|iter| Box::new(iter) as Box<dyn Iterator<Item = Result<v5::Document>> + '_>)
}
pub fn settings(&mut self) -> Result<v5::Settings<v5::Checked>> {
Ok(v5::Settings::<v5::Unchecked>::from(self.from.settings()?).check())
}
}
impl<T> From<v4::Setting<T>> for v5::Setting<T> {
fn from(setting: v4::Setting<T>) -> Self {
match setting {
v4::Setting::Set(t) => v5::Setting::Set(t),
v4::Setting::Reset => v5::Setting::Reset,
v4::Setting::NotSet => v5::Setting::NotSet,
}
}
}
impl From<v4::ResponseError> for v5::ResponseError {
fn from(error: v4::ResponseError) -> Self {
let code = match error.error_code.as_ref() {
"CreateIndex" => v5::Code::CreateIndex,
"IndexAlreadyExists" => v5::Code::IndexAlreadyExists,
"IndexNotFound" => v5::Code::IndexNotFound,
"InvalidIndexUid" => v5::Code::InvalidIndexUid,
"InvalidMinWordLengthForTypo" => v5::Code::InvalidMinWordLengthForTypo,
"InvalidState" => v5::Code::InvalidState,
"MissingPrimaryKey" => v5::Code::MissingPrimaryKey,
"PrimaryKeyAlreadyPresent" => v5::Code::PrimaryKeyAlreadyPresent,
"MaxFieldsLimitExceeded" => v5::Code::MaxFieldsLimitExceeded,
"MissingDocumentId" => v5::Code::MissingDocumentId,
"InvalidDocumentId" => v5::Code::InvalidDocumentId,
"Filter" => v5::Code::Filter,
"Sort" => v5::Code::Sort,
"BadParameter" => v5::Code::BadParameter,
"BadRequest" => v5::Code::BadRequest,
"DatabaseSizeLimitReached" => v5::Code::DatabaseSizeLimitReached,
"DocumentNotFound" => v5::Code::DocumentNotFound,
"Internal" => v5::Code::Internal,
"InvalidGeoField" => v5::Code::InvalidGeoField,
"InvalidRankingRule" => v5::Code::InvalidRankingRule,
"InvalidStore" => v5::Code::InvalidStore,
"InvalidToken" => v5::Code::InvalidToken,
"MissingAuthorizationHeader" => v5::Code::MissingAuthorizationHeader,
"NoSpaceLeftOnDevice" => v5::Code::NoSpaceLeftOnDevice,
"DumpNotFound" => v5::Code::DumpNotFound,
"TaskNotFound" => v5::Code::TaskNotFound,
"PayloadTooLarge" => v5::Code::PayloadTooLarge,
"RetrieveDocument" => v5::Code::RetrieveDocument,
"SearchDocuments" => v5::Code::SearchDocuments,
"UnsupportedMediaType" => v5::Code::UnsupportedMediaType,
"DumpAlreadyInProgress" => v5::Code::DumpAlreadyInProgress,
"DumpProcessFailed" => v5::Code::DumpProcessFailed,
"InvalidContentType" => v5::Code::InvalidContentType,
"MissingContentType" => v5::Code::MissingContentType,
"MalformedPayload" => v5::Code::MalformedPayload,
"MissingPayload" => v5::Code::MissingPayload,
"ApiKeyNotFound" => v5::Code::ApiKeyNotFound,
"MissingParameter" => v5::Code::MissingParameter,
"InvalidApiKeyActions" => v5::Code::InvalidApiKeyActions,
"InvalidApiKeyIndexes" => v5::Code::InvalidApiKeyIndexes,
"InvalidApiKeyExpiresAt" => v5::Code::InvalidApiKeyExpiresAt,
"InvalidApiKeyDescription" => v5::Code::InvalidApiKeyDescription,
other => {
log::warn!("Unknown error code {}", other);
v5::Code::UnretrievableErrorCode
}
};
v5::ResponseError::from_msg(error.message, code)
}
}
impl<T> From<v4::Settings<T>> for v5::Settings<v5::Unchecked> {
fn from(settings: v4::Settings<T>) -> Self {
v5::Settings {
displayed_attributes: settings.displayed_attributes.into(),
searchable_attributes: settings.searchable_attributes.into(),
filterable_attributes: settings.filterable_attributes.into(),
sortable_attributes: settings.sortable_attributes.into(),
ranking_rules: settings.ranking_rules.into(),
stop_words: settings.stop_words.into(),
synonyms: settings.synonyms.into(),
distinct_attribute: settings.distinct_attribute.into(),
typo_tolerance: match settings.typo_tolerance {
v4::Setting::Set(typo) => v5::Setting::Set(v5::TypoTolerance {
enabled: typo.enabled.into(),
min_word_size_for_typos: match typo.min_word_size_for_typos {
v4::Setting::Set(t) => v5::Setting::Set(v5::MinWordSizeForTypos {
one_typo: t.one_typo.into(),
two_typos: t.two_typos.into(),
}),
v4::Setting::Reset => v5::Setting::Reset,
v4::Setting::NotSet => v5::Setting::NotSet,
},
disable_on_words: typo.disable_on_words.into(),
disable_on_attributes: typo.disable_on_attributes.into(),
}),
v4::Setting::Reset => v5::Setting::Reset,
v4::Setting::NotSet => v5::Setting::NotSet,
},
faceting: v5::Setting::NotSet,
pagination: v5::Setting::NotSet,
_kind: std::marker::PhantomData,
}
}
}
impl From<v4::Action> for Option<v5::Action> {
fn from(key: v4::Action) -> Self {
match key {
v4::Action::All => Some(v5::Action::All),
v4::Action::Search => Some(v5::Action::Search),
v4::Action::DocumentsAdd => Some(v5::Action::DocumentsAdd),
v4::Action::DocumentsGet => Some(v5::Action::DocumentsGet),
v4::Action::DocumentsDelete => Some(v5::Action::DocumentsDelete),
v4::Action::IndexesAdd => Some(v5::Action::IndexesAdd),
v4::Action::IndexesGet => Some(v5::Action::IndexesGet),
v4::Action::IndexesUpdate => Some(v5::Action::IndexesUpdate),
v4::Action::IndexesDelete => Some(v5::Action::IndexesDelete),
v4::Action::TasksGet => Some(v5::Action::TasksGet),
v4::Action::SettingsGet => Some(v5::Action::SettingsGet),
v4::Action::SettingsUpdate => Some(v5::Action::SettingsUpdate),
v4::Action::StatsGet => Some(v5::Action::StatsGet),
v4::Action::DumpsCreate => Some(v5::Action::DumpsCreate),
v4::Action::DumpsGet => None,
v4::Action::Version => Some(v5::Action::Version),
}
}
}
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
use super::*;
#[test]
fn compat_v4_v5() {
let dump = File::open("tests/assets/v4.dump").unwrap();
let dir = TempDir::new().unwrap();
let mut dump = BufReader::new(dump);
let gz = GzDecoder::new(&mut dump);
let mut archive = tar::Archive::new(gz);
archive.unpack(dir.path()).unwrap();
let mut dump = v4::V4Reader::open(dir).unwrap().to_v5();
// top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
// tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
insta::assert_json_snapshot!(tasks);
assert_eq!(update_files.len(), 22);
assert!(update_files[0].is_none()); // the dump creation
assert!(update_files[1].is_some()); // the enqueued document addition
assert!(update_files[2..].iter().all(|u| u.is_none())); // everything already processed
// keys
let keys = dump.keys().collect::<Result<Vec<_>>>().unwrap();
insta::assert_json_snapshot!(keys);
// indexes
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
// the index are not ordered in any way by default
indexes.sort_by_key(|index| index.metadata().uid.to_string());
let mut products = indexes.pop().unwrap();
let mut movies = indexes.pop().unwrap();
let mut spells = indexes.pop().unwrap();
assert!(indexes.is_empty());
// products
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "products",
"primaryKey": "sku",
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);
insta::assert_debug_snapshot!(products.settings());
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
assert_eq!(documents.len(), 10);
insta::assert_json_snapshot!(documents);
// movies
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "movies",
"primaryKey": "id",
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);
insta::assert_debug_snapshot!(movies.settings());
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
assert_eq!(documents.len(), 200);
insta::assert_debug_snapshot!(documents);
// spells
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "dnd_spells",
"primaryKey": "index",
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);
insta::assert_debug_snapshot!(spells.settings());
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
assert_eq!(documents.len(), 10);
insta::assert_json_snapshot!(documents);
}
}

View File

@ -1,46 +1,65 @@
use crate::reader::{v5, v6, DumpReader, IndexReader}; use crate::reader::{v5, v6, DumpReader, IndexReader};
use crate::Result; use crate::Result;
pub struct CompatV5ToV6 { use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
from: v5::V5Reader,
pub enum CompatV5ToV6 {
V5(v5::V5Reader),
Compat(CompatV4ToV5),
} }
impl CompatV5ToV6 { impl CompatV5ToV6 {
pub fn new(v5: v5::V5Reader) -> CompatV5ToV6 { pub fn new_v5(v5: v5::V5Reader) -> CompatV5ToV6 {
CompatV5ToV6 { from: v5 } CompatV5ToV6::V5(v5)
}
}
impl DumpReader for CompatV5ToV6 {
fn version(&self) -> crate::Version {
self.from.version()
} }
fn date(&self) -> Option<time::OffsetDateTime> { pub fn version(&self) -> crate::Version {
self.from.date() match self {
CompatV5ToV6::V5(v5) => v5.version(),
CompatV5ToV6::Compat(compat) => compat.version(),
}
} }
fn instance_uid(&self) -> Result<Option<uuid::Uuid>> { pub fn date(&self) -> Option<time::OffsetDateTime> {
self.from.instance_uid() match self {
CompatV5ToV6::V5(v5) => v5.date(),
CompatV5ToV6::Compat(compat) => compat.date(),
}
} }
fn indexes( pub fn instance_uid(&self) -> Result<Option<uuid::Uuid>> {
&self, match self {
) -> Result<Box<dyn Iterator<Item = Result<Box<dyn crate::reader::IndexReader + '_>>> + '_>> CompatV5ToV6::V5(v5) => v5.instance_uid(),
{ CompatV5ToV6::Compat(compat) => compat.instance_uid(),
Ok(Box::new(self.from.indexes()?.map( }
|index_reader| -> Result<_> {
let compat = Box::new(CompatIndexV5ToV6::new(index_reader?))
as Box<dyn crate::reader::IndexReader + '_>;
Ok(compat)
},
)))
} }
fn tasks( pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>> {
let indexes = match self {
CompatV5ToV6::V5(v5) => Box::new(
v5.indexes()?
.map(|index| index.map(CompatIndexV5ToV6::from)),
)
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>,
CompatV5ToV6::Compat(compat) => Box::new(
compat
.indexes()?
.map(|index| index.map(CompatIndexV5ToV6::from)),
)
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>,
};
Ok(indexes)
}
pub fn tasks(
&mut self, &mut self,
) -> Box<dyn Iterator<Item = Result<(v6::Task, Option<v6::UpdateFile>)>> + '_> { ) -> Box<dyn Iterator<Item = Result<(v6::Task, Option<v6::UpdateFile>)>> + '_> {
Box::new(self.from.tasks().map(|task| { let tasks = match self {
CompatV5ToV6::V5(v5) => v5.tasks(),
CompatV5ToV6::Compat(compat) => compat.tasks(),
};
Box::new(tasks.map(|task| {
task.map(|(task, content_file)| { task.map(|(task, content_file)| {
let task_view: v5::tasks::TaskView = task.into(); let task_view: v5::tasks::TaskView = task.into();
@ -101,8 +120,12 @@ impl DumpReader for CompatV5ToV6 {
})) }))
} }
fn keys(&mut self) -> Box<dyn Iterator<Item = Result<v6::Key>> + '_> { pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<v6::Key>> + '_> {
Box::new(self.from.keys().map(|key| { let keys = match self {
CompatV5ToV6::V5(v5) => v5.keys(),
CompatV5ToV6::Compat(compat) => compat.keys(),
};
Box::new(keys.map(|key| {
key.map(|key| v6::Key { key.map(|key| v6::Key {
description: key.description, description: key.description,
name: key.name, name: key.name,
@ -130,29 +153,51 @@ impl DumpReader for CompatV5ToV6 {
} }
} }
pub struct CompatIndexV5ToV6 { pub enum CompatIndexV5ToV6 {
from: v5::V5IndexReader, V5(v5::V5IndexReader),
Compat(CompatIndexV4ToV5),
}
impl From<v5::V5IndexReader> for CompatIndexV5ToV6 {
fn from(index_reader: v5::V5IndexReader) -> Self {
Self::V5(index_reader)
}
}
impl From<CompatIndexV4ToV5> for CompatIndexV5ToV6 {
fn from(index_reader: CompatIndexV4ToV5) -> Self {
Self::Compat(index_reader)
}
} }
impl CompatIndexV5ToV6 { impl CompatIndexV5ToV6 {
pub fn new(v5: v5::V5IndexReader) -> CompatIndexV5ToV6 { pub fn new_v5(v5: v5::V5IndexReader) -> CompatIndexV5ToV6 {
CompatIndexV5ToV6 { from: v5 } CompatIndexV5ToV6::V5(v5)
}
}
impl IndexReader for CompatIndexV5ToV6 {
fn metadata(&self) -> &crate::IndexMetadata {
self.from.metadata()
} }
fn documents(&mut self) -> Result<Box<dyn Iterator<Item = Result<v6::Document>> + '_>> { pub fn metadata(&self) -> &crate::IndexMetadata {
self.from match self {
CompatIndexV5ToV6::V5(v5) => v5.metadata(),
CompatIndexV5ToV6::Compat(compat) => compat.metadata(),
}
}
pub fn documents(&mut self) -> Result<Box<dyn Iterator<Item = Result<v6::Document>> + '_>> {
match self {
CompatIndexV5ToV6::V5(v5) => v5
.documents() .documents()
.map(|iter| Box::new(iter) as Box<dyn Iterator<Item = Result<v6::Document>> + '_>) .map(|iter| Box::new(iter) as Box<dyn Iterator<Item = Result<v6::Document>> + '_>),
CompatIndexV5ToV6::Compat(compat) => compat
.documents()
.map(|iter| Box::new(iter) as Box<dyn Iterator<Item = Result<v6::Document>> + '_>),
}
} }
fn settings(&mut self) -> Result<v6::Settings<v6::Checked>> { pub fn settings(&mut self) -> Result<v6::Settings<v6::Checked>> {
Ok(v6::Settings::<v6::Unchecked>::from(self.from.settings()?).check()) match self {
CompatIndexV5ToV6::V5(v5) => Ok(v6::Settings::from(v5.settings()?).check()),
CompatIndexV5ToV6::Compat(compat) => Ok(v6::Settings::from(compat.settings()?).check()),
}
} }
} }

View File

@ -13,6 +13,8 @@ use uuid::Uuid;
// use crate::reader::compat::Compat; // use crate::reader::compat::Compat;
use crate::{IndexMetadata, Result, Version}; use crate::{IndexMetadata, Result, Version};
use self::compat::Compat;
// use self::loaders::{v2, v3, v4, v5}; // use self::loaders::{v2, v3, v4, v5};
// pub mod error; // pub mod error;
@ -23,7 +25,7 @@ pub(self) mod v4;
pub(self) mod v5; pub(self) mod v5;
pub(self) mod v6; pub(self) mod v6;
pub fn open(dump: impl Read) -> Result<Box<dyn DumpReader>> { pub fn open(dump: impl Read) -> Result<Compat> {
let path = TempDir::new()?; let path = TempDir::new()?;
let mut dump = BufReader::new(dump); let mut dump = BufReader::new(dump);
let gz = GzDecoder::new(&mut dump); let gz = GzDecoder::new(&mut dump);
@ -44,8 +46,8 @@ pub fn open(dump: impl Read) -> Result<Box<dyn DumpReader>> {
Version::V2 => todo!(), Version::V2 => todo!(),
Version::V3 => todo!(), Version::V3 => todo!(),
Version::V4 => todo!(), Version::V4 => todo!(),
Version::V5 => Ok(Box::new(v5::V5Reader::open(path)?.to_v6())), Version::V5 => Ok(v5::V5Reader::open(path)?.to_v6().into()),
Version::V6 => Ok(Box::new(v6::V6Reader::open(path)?)), Version::V6 => Ok(v6::V6Reader::open(path)?.into()),
} }
} }

View File

@ -0,0 +1,320 @@
use std::fmt;
use http::StatusCode;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
pub struct ResponseError {
#[serde(skip)]
#[cfg_attr(
feature = "test-traits",
proptest(strategy = "strategy::status_code_strategy()")
)]
pub code: StatusCode,
pub message: String,
#[serde(rename = "code")]
pub error_code: String,
#[serde(rename = "type")]
pub error_type: String,
#[serde(rename = "link")]
pub error_link: String,
}
impl ResponseError {
pub fn from_msg(message: String, code: Code) -> Self {
Self {
code: code.http(),
message,
error_code: code.err_code().error_name.to_string(),
error_type: code.type_(),
error_link: code.url(),
}
}
}
impl fmt::Display for ResponseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.message.fmt(f)
}
}
impl std::error::Error for ResponseError {}
impl<T> From<T> for ResponseError
where
T: ErrorCode,
{
fn from(other: T) -> Self {
Self {
code: other.http_status(),
message: other.to_string(),
error_code: other.error_name(),
error_type: other.error_type(),
error_link: other.error_url(),
}
}
}
pub trait ErrorCode: std::error::Error {
fn error_code(&self) -> Code;
/// returns the HTTP status code ascociated with the error
fn http_status(&self) -> StatusCode {
self.error_code().http()
}
/// returns the doc url ascociated with the error
fn error_url(&self) -> String {
self.error_code().url()
}
/// returns error name, used as error code
fn error_name(&self) -> String {
self.error_code().name()
}
/// return the error type
fn error_type(&self) -> String {
self.error_code().type_()
}
}
#[allow(clippy::enum_variant_names)]
enum ErrorType {
InternalError,
InvalidRequestError,
AuthenticationError,
}
impl fmt::Display for ErrorType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use ErrorType::*;
match self {
InternalError => write!(f, "internal"),
InvalidRequestError => write!(f, "invalid_request"),
AuthenticationError => write!(f, "auth"),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub enum Code {
// index related error
CreateIndex,
IndexAlreadyExists,
IndexNotFound,
InvalidIndexUid,
InvalidMinWordLengthForTypo,
// invalid state error
InvalidState,
MissingPrimaryKey,
PrimaryKeyAlreadyPresent,
MaxFieldsLimitExceeded,
MissingDocumentId,
InvalidDocumentId,
Filter,
Sort,
BadParameter,
BadRequest,
DatabaseSizeLimitReached,
DocumentNotFound,
Internal,
InvalidGeoField,
InvalidRankingRule,
InvalidStore,
InvalidToken,
MissingAuthorizationHeader,
NoSpaceLeftOnDevice,
DumpNotFound,
TaskNotFound,
PayloadTooLarge,
RetrieveDocument,
SearchDocuments,
UnsupportedMediaType,
DumpAlreadyInProgress,
DumpProcessFailed,
InvalidContentType,
MissingContentType,
MalformedPayload,
MissingPayload,
ApiKeyNotFound,
MissingParameter,
InvalidApiKeyActions,
InvalidApiKeyIndexes,
InvalidApiKeyExpiresAt,
InvalidApiKeyDescription,
}
impl Code {
/// ascociate a `Code` variant to the actual ErrCode
fn err_code(&self) -> ErrCode {
use Code::*;
match self {
// index related errors
// create index is thrown on internal error while creating an index.
CreateIndex => {
ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT),
// thrown when requesting an unexisting index
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
// invalid state error
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
// thrown when no primary key has been set
MissingPrimaryKey => {
ErrCode::invalid("primary_key_inference_failed", StatusCode::BAD_REQUEST)
}
// error thrown when trying to set an already existing primary key
PrimaryKeyAlreadyPresent => {
ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST)
}
// invalid ranking rule
InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST),
// invalid database
InvalidStore => {
ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR)
}
// invalid document
MaxFieldsLimitExceeded => {
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST)
}
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST),
// error related to filters
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
// error related to sorts
Sort => ErrCode::invalid("invalid_sort", StatusCode::BAD_REQUEST),
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
DatabaseSizeLimitReached => ErrCode::internal(
"database_size_limit_reached",
StatusCode::INTERNAL_SERVER_ERROR,
),
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN),
MissingAuthorizationHeader => {
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
}
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND),
NoSpaceLeftOnDevice => {
ErrCode::internal("no_space_left_on_device", StatusCode::INTERNAL_SERVER_ERROR)
}
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
RetrieveDocument => {
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
}
SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST),
UnsupportedMediaType => {
ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
// error related to dump
DumpAlreadyInProgress => {
ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT)
}
DumpProcessFailed => {
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
MissingContentType => {
ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST),
InvalidContentType => {
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
// error related to keys
ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND),
MissingParameter => ErrCode::invalid("missing_parameter", StatusCode::BAD_REQUEST),
InvalidApiKeyActions => {
ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST)
}
InvalidApiKeyIndexes => {
ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST)
}
InvalidApiKeyExpiresAt => {
ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST)
}
InvalidApiKeyDescription => {
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
}
InvalidMinWordLengthForTypo => {
ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST)
}
}
}
/// return the HTTP status code ascociated with the `Code`
fn http(&self) -> StatusCode {
self.err_code().status_code
}
/// return error name, used as error code
fn name(&self) -> String {
self.err_code().error_name.to_string()
}
/// return the error type
fn type_(&self) -> String {
self.err_code().error_type.to_string()
}
/// return the doc url ascociated with the error
fn url(&self) -> String {
format!("https://docs.meilisearch.com/errors#{}", self.name())
}
}
/// Internal structure providing a convenient way to create error codes
struct ErrCode {
status_code: StatusCode,
error_type: ErrorType,
error_name: &'static str,
}
impl ErrCode {
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::AuthenticationError,
}
}
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InternalError,
}
}
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InvalidRequestError,
}
}
}

View File

@ -9,10 +9,11 @@ use tempfile::TempDir;
use time::OffsetDateTime; use time::OffsetDateTime;
use uuid::Uuid; use uuid::Uuid;
mod keys; pub mod errors;
mod meta; pub mod keys;
mod settings; pub mod meta;
mod tasks; pub mod settings;
pub mod tasks;
use crate::{IndexMetadata, Result, Version}; use crate::{IndexMetadata, Result, Version};
@ -46,7 +47,7 @@ pub type StarOr<T> = meta::StarOr<T>;
pub type IndexUid = meta::IndexUid; pub type IndexUid = meta::IndexUid;
// everything related to the errors // everything related to the errors
pub type ResponseError = tasks::ResponseError; pub type ResponseError = errors::ResponseError;
pub type Code = meilisearch_types::error::Code; pub type Code = meilisearch_types::error::Code;
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
@ -119,8 +120,8 @@ impl V4Reader {
})) }))
} }
pub fn tasks(&mut self) -> impl Iterator<Item = Result<(Task, Option<UpdateFile>)>> + '_ { pub fn tasks(&mut self) -> Box<dyn Iterator<Item = Result<(Task, Option<UpdateFile>)>> + '_> {
(&mut self.tasks).lines().map(|line| -> Result<_> { Box::new((&mut self.tasks).lines().map(|line| -> Result<_> {
let task: Task = serde_json::from_str(&line?)?; let task: Task = serde_json::from_str(&line?)?;
if !task.is_finished() { if !task.is_finished() {
if let Some(uuid) = task.get_content_uuid() { if let Some(uuid) = task.get_content_uuid() {
@ -137,13 +138,15 @@ impl V4Reader {
} else { } else {
Ok((task, None)) Ok((task, None))
} }
}) }))
} }
pub fn keys(&mut self) -> impl Iterator<Item = Result<Key>> + '_ { pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
Box::new(
(&mut self.keys) (&mut self.keys)
.lines() .lines()
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }) .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
)
} }
} }

View File

@ -5,6 +5,7 @@ use time::{Duration, OffsetDateTime};
use uuid::Uuid; use uuid::Uuid;
use super::{ use super::{
errors::ResponseError,
meta::IndexUid, meta::IndexUid,
settings::{Settings, Unchecked}, settings::{Settings, Unchecked},
}; };
@ -148,19 +149,6 @@ pub enum TaskResult {
Other, Other,
} }
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
#[serde(rename_all = "camelCase")]
pub struct ResponseError {
pub message: String,
#[serde(rename = "code")]
pub error_code: String,
#[serde(rename = "type")]
pub error_type: String,
#[serde(rename = "link")]
pub error_link: String,
}
impl Task { impl Task {
/// Return true when a task is finished. /// Return true when a task is finished.
/// A task is finished when its last state is either `Succeeded` or `Failed`. /// A task is finished when its last state is either `Succeeded` or `Failed`.

View File

@ -0,0 +1,285 @@
use std::fmt;
use http::StatusCode;
use serde::Deserialize;
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
#[cfg_attr(test, derive(serde::Serialize))]
pub struct ResponseError {
#[serde(skip)]
code: StatusCode,
pub message: String,
#[serde(rename = "code")]
pub error_code: String,
#[serde(rename = "type")]
pub error_type: String,
#[serde(rename = "link")]
pub error_link: String,
}
impl ResponseError {
pub fn from_msg(message: String, code: Code) -> Self {
Self {
code: code.http(),
message,
error_code: code.err_code().error_name.to_string(),
error_type: code.type_(),
error_link: code.url(),
}
}
}
#[derive(Deserialize, Debug, Clone, Copy)]
#[cfg_attr(test, derive(serde::Serialize))]
pub enum Code {
// index related error
CreateIndex,
IndexAlreadyExists,
IndexNotFound,
InvalidIndexUid,
InvalidMinWordLengthForTypo,
// invalid state error
InvalidState,
MissingPrimaryKey,
PrimaryKeyAlreadyPresent,
MaxFieldsLimitExceeded,
MissingDocumentId,
InvalidDocumentId,
Filter,
Sort,
BadParameter,
BadRequest,
DatabaseSizeLimitReached,
DocumentNotFound,
Internal,
InvalidGeoField,
InvalidRankingRule,
InvalidStore,
InvalidToken,
MissingAuthorizationHeader,
NoSpaceLeftOnDevice,
DumpNotFound,
TaskNotFound,
PayloadTooLarge,
RetrieveDocument,
SearchDocuments,
UnsupportedMediaType,
DumpAlreadyInProgress,
DumpProcessFailed,
InvalidContentType,
MissingContentType,
MalformedPayload,
MissingPayload,
ApiKeyNotFound,
MissingParameter,
InvalidApiKeyActions,
InvalidApiKeyIndexes,
InvalidApiKeyExpiresAt,
InvalidApiKeyDescription,
InvalidApiKeyName,
InvalidApiKeyUid,
ImmutableField,
ApiKeyAlreadyExists,
UnretrievableErrorCode,
}
impl Code {
/// associate a `Code` variant to the actual ErrCode
fn err_code(&self) -> ErrCode {
use Code::*;
match self {
// index related errors
// create index is thrown on internal error while creating an index.
CreateIndex => {
ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT),
// thrown when requesting an unexisting index
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
// invalid state error
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
// thrown when no primary key has been set
MissingPrimaryKey => {
ErrCode::invalid("primary_key_inference_failed", StatusCode::BAD_REQUEST)
}
// error thrown when trying to set an already existing primary key
PrimaryKeyAlreadyPresent => {
ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST)
}
// invalid ranking rule
InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST),
// invalid database
InvalidStore => {
ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR)
}
// invalid document
MaxFieldsLimitExceeded => {
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST)
}
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST),
// error related to filters
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
// error related to sorts
Sort => ErrCode::invalid("invalid_sort", StatusCode::BAD_REQUEST),
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
DatabaseSizeLimitReached => ErrCode::internal(
"database_size_limit_reached",
StatusCode::INTERNAL_SERVER_ERROR,
),
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN),
MissingAuthorizationHeader => {
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
}
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND),
NoSpaceLeftOnDevice => {
ErrCode::internal("no_space_left_on_device", StatusCode::INTERNAL_SERVER_ERROR)
}
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
RetrieveDocument => {
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
}
SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST),
UnsupportedMediaType => {
ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
// error related to dump
DumpAlreadyInProgress => {
ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT)
}
DumpProcessFailed => {
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
MissingContentType => {
ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST),
InvalidContentType => {
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
// error related to keys
ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND),
MissingParameter => ErrCode::invalid("missing_parameter", StatusCode::BAD_REQUEST),
InvalidApiKeyActions => {
ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST)
}
InvalidApiKeyIndexes => {
ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST)
}
InvalidApiKeyExpiresAt => {
ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST)
}
InvalidApiKeyDescription => {
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
}
InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST),
InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST),
ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT),
ImmutableField => ErrCode::invalid("immutable_field", StatusCode::BAD_REQUEST),
InvalidMinWordLengthForTypo => {
ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST)
}
UnretrievableErrorCode => {
ErrCode::invalid("unretrievable_error_code", StatusCode::BAD_REQUEST)
}
}
}
/// return the HTTP status code associated with the `Code`
fn http(&self) -> StatusCode {
self.err_code().status_code
}
/// return error name, used as error code
fn name(&self) -> String {
self.err_code().error_name.to_string()
}
/// return the error type
fn type_(&self) -> String {
self.err_code().error_type.to_string()
}
/// return the doc url associated with the error
fn url(&self) -> String {
format!("https://docs.meilisearch.com/errors#{}", self.name())
}
}
/// Internal structure providing a convenient way to create error codes
struct ErrCode {
status_code: StatusCode,
error_type: ErrorType,
error_name: &'static str,
}
impl ErrCode {
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::AuthenticationError,
}
}
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InternalError,
}
}
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InvalidRequestError,
}
}
}
#[allow(clippy::enum_variant_names)]
enum ErrorType {
InternalError,
InvalidRequestError,
AuthenticationError,
}
impl fmt::Display for ErrorType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use ErrorType::*;
match self {
InternalError => write!(f, "internal"),
InvalidRequestError => write!(f, "invalid_request"),
AuthenticationError => write!(f, "auth"),
}
}
}

View File

@ -47,6 +47,7 @@ use crate::{IndexMetadata, Result, Version};
use super::{compat::v5_to_v6::CompatV5ToV6, DumpReader, IndexReader}; use super::{compat::v5_to_v6::CompatV5ToV6, DumpReader, IndexReader};
pub mod errors;
pub mod keys; pub mod keys;
pub mod meta; pub mod meta;
pub mod settings; pub mod settings;
@ -80,8 +81,8 @@ pub type StarOr<T> = meta::StarOr<T>;
pub type IndexUid = meta::IndexUid; pub type IndexUid = meta::IndexUid;
// everything related to the errors // everything related to the errors
pub type ResponseError = tasks::ResponseError; pub type ResponseError = errors::ResponseError;
pub type Code = meilisearch_types::error::Code; pub type Code = errors::Code;
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -124,7 +125,7 @@ impl V5Reader {
} }
pub fn to_v6(self) -> CompatV5ToV6 { pub fn to_v6(self) -> CompatV5ToV6 {
CompatV5ToV6::new(self) CompatV5ToV6::new_v5(self)
} }
pub fn version(&self) -> Version { pub fn version(&self) -> Version {
@ -153,8 +154,8 @@ impl V5Reader {
})) }))
} }
pub fn tasks(&mut self) -> impl Iterator<Item = Result<(Task, Option<UpdateFile>)>> + '_ { pub fn tasks(&mut self) -> Box<dyn Iterator<Item = Result<(Task, Option<UpdateFile>)>> + '_> {
(&mut self.tasks).lines().map(|line| -> Result<_> { Box::new((&mut self.tasks).lines().map(|line| -> Result<_> {
let task: Task = serde_json::from_str(&line?)?; let task: Task = serde_json::from_str(&line?)?;
if !task.is_finished() { if !task.is_finished() {
if let Some(uuid) = task.get_content_uuid() { if let Some(uuid) = task.get_content_uuid() {
@ -171,13 +172,15 @@ impl V5Reader {
} else { } else {
Ok((task, None)) Ok((task, None))
} }
}) }))
} }
pub fn keys(&mut self) -> impl Iterator<Item = Result<Key>> + '_ { pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
Box::new(
(&mut self.keys) (&mut self.keys)
.lines() .lines()
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }) .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
)
} }
} }

View File

@ -5,6 +5,7 @@ use time::{Duration, OffsetDateTime};
use uuid::Uuid; use uuid::Uuid;
use super::{ use super::{
errors::ResponseError,
meta::IndexUid, meta::IndexUid,
settings::{Settings, Unchecked}, settings::{Settings, Unchecked},
}; };
@ -113,19 +114,6 @@ pub enum TaskResult {
Other, Other,
} }
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
#[serde(rename_all = "camelCase")]
pub struct ResponseError {
pub message: String,
#[serde(rename = "code")]
pub error_code: String,
#[serde(rename = "type")]
pub error_type: String,
#[serde(rename = "link")]
pub error_link: String,
}
impl Task { impl Task {
/// Return true when a task is finished. /// Return true when a task is finished.
/// A task is finished when its last state is either `Succeeded` or `Failed`. /// A task is finished when its last state is either `Succeeded` or `Failed`.

View File

@ -68,21 +68,23 @@ impl V6Reader {
dump, dump,
}) })
} }
fn version(&self) -> Version {
pub fn version(&self) -> Version {
Version::V6 Version::V6
} }
fn date(&self) -> Option<OffsetDateTime> { pub fn date(&self) -> Option<OffsetDateTime> {
Some(self.metadata.dump_date) Some(self.metadata.dump_date)
} }
fn instance_uid(&self) -> Result<Option<Uuid>> { pub fn instance_uid(&self) -> Result<Option<Uuid>> {
Ok(Some(self.instance_uid)) Ok(Some(self.instance_uid))
} }
fn indexes(&self) -> Result<impl Iterator<Item = Result<V6IndexReader>> + '_> { pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<V6IndexReader>> + '_>> {
let entries = fs::read_dir(self.dump.path().join("indexes"))?; let entries = fs::read_dir(self.dump.path().join("indexes"))?;
Ok(entries Ok(Box::new(
entries
.map(|entry| -> Result<Option<_>> { .map(|entry| -> Result<Option<_>> {
let entry = entry?; let entry = entry?;
if entry.file_type()?.is_dir() { if entry.file_type()?.is_dir() {
@ -99,11 +101,12 @@ impl V6Reader {
Ok(None) Ok(None)
} }
}) })
.filter_map(|entry| entry.transpose())) .filter_map(|entry| entry.transpose()),
))
} }
fn tasks(&mut self) -> impl Iterator<Item = Result<(Task, Option<UpdateFile>)>> + '_ { pub fn tasks(&mut self) -> Box<dyn Iterator<Item = Result<(Task, Option<UpdateFile>)>> + '_> {
(&mut self.tasks).lines().map(|line| -> Result<_> { Box::new((&mut self.tasks).lines().map(|line| -> Result<_> {
let mut task: index_scheduler::TaskView = serde_json::from_str(&line?)?; let mut task: index_scheduler::TaskView = serde_json::from_str(&line?)?;
// TODO: this can be removed once we can `Deserialize` the duration from the `TaskView`. // TODO: this can be removed once we can `Deserialize` the duration from the `TaskView`.
if let Some((started_at, finished_at)) = task.started_at.zip(task.finished_at) { if let Some((started_at, finished_at)) = task.started_at.zip(task.finished_at) {
@ -121,13 +124,15 @@ impl V6Reader {
} else { } else {
Ok((task, None)) Ok((task, None))
} }
}) }))
} }
fn keys(&mut self) -> impl Iterator<Item = Result<Key>> + '_ { pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
Box::new(
(&mut self.keys) (&mut self.keys)
.lines() .lines()
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }) .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
)
} }
} }
@ -165,7 +170,7 @@ impl DumpReader for V6Reader {
} }
} }
struct V6IndexReader { pub struct V6IndexReader {
metadata: IndexMetadata, metadata: IndexMetadata,
documents: BufReader<File>, documents: BufReader<File>,
settings: BufReader<File>, settings: BufReader<File>,
@ -184,17 +189,17 @@ impl V6IndexReader {
Ok(ret) Ok(ret)
} }
fn metadata(&self) -> &IndexMetadata { pub fn metadata(&self) -> &IndexMetadata {
&self.metadata &self.metadata
} }
fn documents(&mut self) -> Result<impl Iterator<Item = Result<Document>> + '_> { pub fn documents(&mut self) -> Result<impl Iterator<Item = Result<Document>> + '_> {
Ok((&mut self.documents) Ok((&mut self.documents)
.lines() .lines()
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) })) .map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }))
} }
fn settings(&mut self) -> Result<Settings<Checked>> { pub fn settings(&mut self) -> Result<Settings<Checked>> {
let settings: Settings<Unchecked> = serde_json::from_reader(&mut self.settings)?; let settings: Settings<Unchecked> = serde_json::from_reader(&mut self.settings)?;
Ok(settings.check()) Ok(settings.check())
} }