diff --git a/Cargo.lock b/Cargo.lock index 85f1b45dd..8b420c459 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2368,6 +2368,7 @@ dependencies = [ "proptest-derive", "serde", "serde_json", + "time 0.3.14", "tokio", ] diff --git a/dump/src/lib.rs b/dump/src/lib.rs index 1b3a4c90d..31a7c96eb 100644 --- a/dump/src/lib.rs +++ b/dump/src/lib.rs @@ -52,14 +52,12 @@ pub(crate) mod test { }; use big_s::S; - use index_scheduler::{ - task::{Details, DetailsView}, - Kind, Status, TaskView, - }; + use index_scheduler::task::Details; use maplit::btreeset; use meilisearch_auth::{Action, Key}; use meilisearch_types::milli::{self, update::Setting}; use meilisearch_types::settings::{Checked, Settings}; + use meilisearch_types::tasks::{DetailsView, Kind, Status, TaskView}; use meilisearch_types::{index_uid::IndexUid, star_or::StarOr}; use serde_json::{json, Map, Value}; use time::{macros::datetime, Duration}; diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index bc04b7cfc..c145b223e 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -102,9 +102,7 @@ impl CompatV5ToV6 { }, v5::tasks::TaskContent::Dump { .. } => v6::Kind::DumpExport, }, - details: todo!(), - /* - task_view.details.map(|details| match details { + details: task_view.details.map(|details| match details { v5::Details::DocumentAddition { received_documents, indexed_documents, @@ -130,9 +128,7 @@ impl CompatV5ToV6 { } v5::Details::Dump { dump_uid } => v6::Details::Dump { dump_uid }, }), - */ error: task_view.error.map(|e| e.into()), - duration: task_view.duration, enqueued_at: task_view.enqueued_at, started_at: task_view.started_at, finished_at: task_view.finished_at, diff --git a/dump/src/reader/v6.rs b/dump/src/reader/v6.rs index 8a6b1ff53..7ac454ad5 100644 --- a/dump/src/reader/v6.rs +++ b/dump/src/reader/v6.rs @@ -21,14 +21,14 @@ pub type Settings = meilisearch_types::settings::Settings; pub type Checked = meilisearch_types::settings::Checked; pub type Unchecked = meilisearch_types::settings::Unchecked; -pub type Task = index_scheduler::TaskView; +pub type Task = meilisearch_types::tasks::TaskDump; pub type Key = meilisearch_auth::Key; // ===== Other types to clarify the code of the compat module // everything related to the tasks -pub type Status = index_scheduler::Status; -pub type Kind = index_scheduler::Kind; -pub type Details = index_scheduler::Details; +pub type Status = meilisearch_types::tasks::Status; +pub type Kind = meilisearch_types::tasks::Kind; +pub type Details = meilisearch_types::tasks::Details; // everything related to the settings pub type Setting = meilisearch_types::milli::update::Setting; @@ -109,11 +109,8 @@ impl V6Reader { &mut self, ) -> Box>)>> + '_> { Box::new((&mut self.tasks).lines().map(|line| -> Result<_> { - let mut task: index_scheduler::TaskView = todo!(); // serde_json::from_str(&line?)?; - // TODO: this can be removed once we can `Deserialize` the duration from the `TaskView`. - if let Some((started_at, finished_at)) = task.started_at.zip(task.finished_at) { - task.duration = Some(finished_at - started_at); - } + let task: Task = serde_json::from_str(&line?)?; + let update_file_path = self .dump .path() diff --git a/dump/src/writer.rs b/dump/src/writer.rs index f7b68b5ae..bc5752fee 100644 --- a/dump/src/writer.rs +++ b/dump/src/writer.rs @@ -5,9 +5,9 @@ use std::{ }; use flate2::{write::GzEncoder, Compression}; -use index_scheduler::TaskView; use meilisearch_auth::Key; use meilisearch_types::settings::{Checked, Settings}; +use meilisearch_types::tasks::TaskDump; use serde_json::{Map, Value}; use tempfile::TempDir; use time::OffsetDateTime; @@ -105,17 +105,12 @@ impl TaskWriter { /// Pushes tasks in the dump. /// If the tasks has an associated `update_file` it'll use the `task_id` as its name. - pub fn push_task(&mut self, task: &TaskView) -> Result { - // TODO: this could be removed the day we implements `Deserialize` on the Duration. - let mut task = task.clone(); - task.duration = None; - - self.queue.write_all(&serde_json::to_vec(&task)?)?; + pub fn push_task(&mut self, task: &TaskDump) -> Result { + self.queue.write_all(&serde_json::to_vec(task)?)?; self.queue.write_all(b"\n")?; Ok(UpdateFile::new( - self.update_files - .join(format!("{}.jsonl", task.uid.to_string())), + self.update_files.join(format!("{}.jsonl", task.uid)), )) } } diff --git a/index-scheduler/src/autobatcher.rs b/index-scheduler/src/autobatcher.rs index 168e8e035..8a01ef493 100644 --- a/index-scheduler/src/autobatcher.rs +++ b/index-scheduler/src/autobatcher.rs @@ -1,10 +1,9 @@ use meilisearch_types::milli::update::IndexDocumentsMethod::{ self, ReplaceDocuments, UpdateDocuments, }; +use meilisearch_types::tasks::{Kind, TaskId}; use std::ops::ControlFlow::{self, Break, Continue}; -use crate::{task::Kind, TaskId}; - #[derive(Debug)] pub enum BatchKind { DocumentClear { diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index c3656a315..853bfd602 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -1,8 +1,11 @@ use crate::{ autobatcher::BatchKind, - task::{Details, Kind, KindWithContent, Status, Task}, + task::{KindWithContent, Task}, Error, IndexScheduler, Result, TaskId, }; + +use meilisearch_types::tasks::{Details, Kind, Status}; + use log::{debug, info}; use meilisearch_types::milli::update::IndexDocumentsConfig; use meilisearch_types::milli::update::{ diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 2430bb090..89f7aa07b 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -11,7 +11,8 @@ pub type Result = std::result::Result; pub type TaskId = u32; pub use error::Error; -pub use task::{Details, Kind, KindWithContent, Status, TaskView}; +use meilisearch_types::tasks::{Kind, Status, TaskView}; +pub use task::KindWithContent; use std::path::PathBuf; use std::sync::{Arc, RwLock}; @@ -34,7 +35,7 @@ use crate::task::Task; const DEFAULT_LIMIT: fn() -> u32 = || 20; -#[derive(derive_builder::Builder, Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct Query { #[serde(default = "DEFAULT_LIMIT")] diff --git a/index-scheduler/src/task.rs b/index-scheduler/src/task.rs index 1b2e167c8..471694151 100644 --- a/index-scheduler/src/task.rs +++ b/index-scheduler/src/task.rs @@ -3,54 +3,13 @@ use meilisearch_types::error::ResponseError; use meilisearch_types::milli::update::IndexDocumentsMethod; use meilisearch_types::settings::{Settings, Unchecked}; -use serde::{Deserialize, Serialize, Serializer}; -use std::{ - fmt::{Display, Write}, - path::PathBuf, - str::FromStr, -}; -use time::{Duration, OffsetDateTime}; +use meilisearch_types::tasks::{Details, Kind, Status, TaskView}; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use time::OffsetDateTime; use uuid::Uuid; -use crate::{Error, TaskId}; - -#[derive(Debug, Clone, PartialEq, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct TaskView { - pub uid: TaskId, - #[serde(default)] - pub index_uid: Option, - pub status: Status, - // TODO use our own Kind for the user - #[serde(rename = "type")] - pub kind: Kind, - - #[serde(skip_serializing_if = "Option::is_none")] - pub details: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub error: Option, - - #[serde( - serialize_with = "serialize_duration", - skip_serializing_if = "Option::is_none", - default - )] - pub duration: Option, - #[serde(with = "time::serde::rfc3339")] - pub enqueued_at: OffsetDateTime, - #[serde( - with = "time::serde::rfc3339::option", - skip_serializing_if = "Option::is_none", - default - )] - pub started_at: Option, - #[serde( - with = "time::serde::rfc3339::option", - skip_serializing_if = "Option::is_none", - default - )] - pub finished_at: Option, -} +use crate::TaskId; #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -109,38 +68,6 @@ impl Task { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum Status { - Enqueued, - Processing, - Succeeded, - Failed, -} -impl Display for Status { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Status::Enqueued => write!(f, "enqueued"), - Status::Processing => write!(f, "processing"), - Status::Succeeded => write!(f, "succeeded"), - Status::Failed => write!(f, "failed"), - } - } -} -impl FromStr for Status { - type Err = Error; - - fn from_str(s: &str) -> Result { - match s { - "enqueued" => Ok(Status::Enqueued), - "processing" => Ok(Status::Processing), - "succeeded" => Ok(Status::Succeeded), - "failed" => Ok(Status::Failed), - s => Err(Error::InvalidStatus(s.to_string())), - } - } -} - #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum KindWithContent { @@ -330,219 +257,6 @@ impl KindWithContent { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum Kind { - DocumentImport { - method: IndexDocumentsMethod, - allow_index_creation: bool, - }, - DocumentDeletion, - DocumentClear, - Settings { - allow_index_creation: bool, - }, - IndexCreation, - IndexDeletion, - IndexUpdate, - IndexSwap, - CancelTask, - DeleteTasks, - DumpExport, - Snapshot, -} - -impl FromStr for Kind { - type Err = Error; - - fn from_str(s: &str) -> Result { - match s { - "document_addition" => Ok(Kind::DocumentImport { - method: IndexDocumentsMethod::ReplaceDocuments, - // TODO this doesn't make sense - allow_index_creation: false, - }), - "document_update" => Ok(Kind::DocumentImport { - method: IndexDocumentsMethod::UpdateDocuments, - // TODO this doesn't make sense - allow_index_creation: false, - }), - "document_deletion" => Ok(Kind::DocumentDeletion), - "document_clear" => Ok(Kind::DocumentClear), - "settings" => Ok(Kind::Settings { - // TODO this doesn't make sense - allow_index_creation: false, - }), - "index_creation" => Ok(Kind::IndexCreation), - "index_deletion" => Ok(Kind::IndexDeletion), - "index_update" => Ok(Kind::IndexUpdate), - "index_swap" => Ok(Kind::IndexSwap), - "cancel_task" => Ok(Kind::CancelTask), - "delete_tasks" => Ok(Kind::DeleteTasks), - "dump_export" => Ok(Kind::DumpExport), - "snapshot" => Ok(Kind::Snapshot), - s => Err(Error::InvalidKind(s.to_string())), - } - } -} - -#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -#[allow(clippy::large_enum_variant)] -pub enum Details { - DocumentAddition { - received_documents: u64, - indexed_documents: u64, - }, - Settings { - settings: Settings, - }, - IndexInfo { - primary_key: Option, - }, - DocumentDeletion { - received_document_ids: usize, - // TODO why is this optional? - deleted_documents: Option, - }, - ClearAll { - deleted_documents: Option, - }, - DeleteTasks { - matched_tasks: usize, - deleted_tasks: Option, - original_query: String, - }, - Dump { - dump_uid: String, - }, -} - -#[derive(Default, Debug, PartialEq, Clone, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct DetailsView { - #[serde(skip_serializing_if = "Option::is_none")] - received_documents: Option, - #[serde(skip_serializing_if = "Option::is_none")] - indexed_documents: Option, - #[serde(skip_serializing_if = "Option::is_none")] - primary_key: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - received_document_ids: Option, - #[serde(skip_serializing_if = "Option::is_none")] - deleted_documents: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - matched_tasks: Option, - #[serde(skip_serializing_if = "Option::is_none")] - deleted_tasks: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - original_query: Option, - #[serde(skip_serializing_if = "Option::is_none")] - dump_uid: Option, - #[serde(skip_serializing_if = "Option::is_none")] - #[serde(flatten)] - settings: Option>, -} - -impl Details { - fn as_details_view(&self) -> DetailsView { - match self.clone() { - Details::DocumentAddition { - received_documents, - indexed_documents, - } => DetailsView { - received_documents: Some(received_documents), - indexed_documents: Some(indexed_documents), - ..DetailsView::default() - }, - Details::Settings { settings } => DetailsView { - settings: Some(settings), - ..DetailsView::default() - }, - Details::IndexInfo { primary_key } => DetailsView { - primary_key: Some(primary_key), - ..DetailsView::default() - }, - Details::DocumentDeletion { - received_document_ids, - deleted_documents, - } => DetailsView { - received_document_ids: Some(received_document_ids), - deleted_documents: Some(deleted_documents), - ..DetailsView::default() - }, - Details::ClearAll { deleted_documents } => DetailsView { - deleted_documents: Some(deleted_documents), - ..DetailsView::default() - }, - Details::DeleteTasks { - matched_tasks, - deleted_tasks, - original_query, - } => DetailsView { - matched_tasks: Some(matched_tasks), - deleted_tasks: Some(deleted_tasks), - original_query: Some(original_query), - ..DetailsView::default() - }, - Details::Dump { dump_uid } => DetailsView { - dump_uid: Some(dump_uid), - ..DetailsView::default() - }, - } - } -} - -/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for -/// https://github.com/time-rs/time/issues/378. -/// This code is a port of the old code of time that was removed in 0.2. -fn serialize_duration( - duration: &Option, - serializer: S, -) -> Result { - match duration { - Some(duration) => { - // technically speaking, negative duration is not valid ISO 8601 - if duration.is_negative() { - return serializer.serialize_none(); - } - - const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds(); - let secs = duration.whole_seconds(); - let days = secs / SECS_PER_DAY; - let secs = secs - days * SECS_PER_DAY; - let hasdate = days != 0; - let nanos = duration.subsec_nanoseconds(); - let hastime = (secs != 0 || nanos != 0) || !hasdate; - - // all the following unwrap can't fail - let mut res = String::new(); - write!(&mut res, "P").unwrap(); - - if hasdate { - write!(&mut res, "{}D", days).unwrap(); - } - - const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds(); - const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds(); - - if hastime { - if nanos == 0 { - write!(&mut res, "T{}S", secs).unwrap(); - } else if nanos % NANOS_PER_MILLI == 0 { - write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap(); - } else if nanos % NANOS_PER_MICRO == 0 { - write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap(); - } else { - write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap(); - } - } - - serializer.serialize_str(&res) - } - None => serializer.serialize_none(), - } -} - #[cfg(test)] mod tests { use meilisearch_types::heed::{types::SerdeJson, BytesDecode, BytesEncode}; diff --git a/index-scheduler/src/utils.rs b/index-scheduler/src/utils.rs index 8a35ee387..640d43df8 100644 --- a/index-scheduler/src/utils.rs +++ b/index-scheduler/src/utils.rs @@ -4,10 +4,8 @@ use meilisearch_types::heed::{types::DecodeIgnore, RoTxn, RwTxn}; use meilisearch_types::milli::BEU32; use roaring::RoaringBitmap; -use crate::{ - task::{Kind, Status, Task}, - Error, IndexScheduler, Result, TaskId, -}; +use crate::{Error, IndexScheduler, Result, Task, TaskId}; +use meilisearch_types::tasks::{Kind, Status}; impl IndexScheduler { pub(crate) fn last_task_id(&self, rtxn: &RoTxn) -> Result> { diff --git a/index/src/lib.rs b/index/src/lib.rs deleted file mode 100644 index 5909cf8d6..000000000 --- a/index/src/lib.rs +++ /dev/null @@ -1,16 +0,0 @@ -pub use milli; -pub use search::{ - all_documents, perform_search, retrieve_document, retrieve_documents, settings, - MatchingStrategy, SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, - DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, -}; -pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecked}; - -use serde_json::{Map, Value}; - -// mod dump; -pub mod error; -mod search; -pub mod updates; - -pub type Document = Map; diff --git a/meilisearch-http/src/routes/indexes/documents.rs b/meilisearch-http/src/routes/indexes/documents.rs index d036b719a..e1f493a47 100644 --- a/meilisearch-http/src/routes/indexes/documents.rs +++ b/meilisearch-http/src/routes/indexes/documents.rs @@ -6,13 +6,14 @@ use actix_web::HttpMessage; use actix_web::{web, HttpRequest, HttpResponse}; use bstr::ByteSlice; use futures::StreamExt; -use index_scheduler::{IndexScheduler, KindWithContent, TaskView}; +use index_scheduler::{IndexScheduler, KindWithContent}; use log::debug; use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType}; use meilisearch_types::error::ResponseError; use meilisearch_types::heed::RoTxn; use meilisearch_types::milli::update::IndexDocumentsMethod; use meilisearch_types::star_or::StarOr; +use meilisearch_types::tasks::TaskView; use meilisearch_types::{milli, Document, Index}; use mime::Mime; use once_cell::sync::Lazy; diff --git a/meilisearch-http/src/routes/indexes/mod.rs b/meilisearch-http/src/routes/indexes/mod.rs index 6fd2066cf..9b2ed0d2b 100644 --- a/meilisearch-http/src/routes/indexes/mod.rs +++ b/meilisearch-http/src/routes/indexes/mod.rs @@ -1,9 +1,10 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; -use index_scheduler::{IndexScheduler, KindWithContent, Query, Status}; +use index_scheduler::{IndexScheduler, KindWithContent, Query}; use log::debug; use meilisearch_types::error::ResponseError; use meilisearch_types::milli::{self, FieldDistribution, Index}; +use meilisearch_types::tasks::Status; use serde::{Deserialize, Serialize}; use serde_json::json; use time::OffsetDateTime; diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index b47c0f0cb..6ac3733f7 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -2,11 +2,12 @@ use std::collections::BTreeMap; use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; -use index_scheduler::{IndexScheduler, Query, Status}; +use index_scheduler::{IndexScheduler, Query}; use log::debug; use meilisearch_types::error::ResponseError; use meilisearch_types::settings::{Settings, Unchecked}; use meilisearch_types::star_or::StarOr; +use meilisearch_types::tasks::Status; use serde::{Deserialize, Serialize}; use serde_json::json; use time::OffsetDateTime; diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 7622173cc..25385e2bf 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -1,10 +1,10 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::{IndexScheduler, TaskId}; -use index_scheduler::{Kind, Status}; use meilisearch_types::error::ResponseError; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::star_or::StarOr; +use meilisearch_types::tasks::{Kind, Status}; use serde::Deserialize; use serde_cs::vec::CS; use serde_json::json; diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index ea6710c5e..dfbf9edf7 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -13,6 +13,7 @@ proptest = { version = "1.0.0", optional = true } proptest-derive = { version = "0.3.0", optional = true } serde = { version = "1.0.145", features = ["derive"] } serde_json = "1.0.85" +time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } tokio = "1.0" [dev-dependencies] diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index 1d2ba0ffd..674bf24ac 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -3,6 +3,7 @@ pub mod error; pub mod index_uid; pub mod settings; pub mod star_or; +pub mod tasks; pub use milli; pub use milli::heed; diff --git a/meilisearch-types/src/tasks.rs b/meilisearch-types/src/tasks.rs new file mode 100644 index 000000000..2aa2dabb3 --- /dev/null +++ b/meilisearch-types/src/tasks.rs @@ -0,0 +1,378 @@ +use milli::update::IndexDocumentsMethod; +use serde::{Deserialize, Serialize, Serializer}; +use std::{ + fmt::{Display, Write}, + str::FromStr, +}; +use time::{Duration, OffsetDateTime}; + +use crate::{ + error::{Code, ResponseError}, + settings::{Settings, Unchecked}, +}; + +pub type TaskId = u32; + +#[derive(Debug, Clone, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct TaskView { + pub uid: TaskId, + #[serde(default)] + pub index_uid: Option, + pub status: Status, + // TODO use our own Kind for the user + #[serde(rename = "type")] + pub kind: Kind, + + #[serde(skip_serializing_if = "Option::is_none")] + pub details: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, + + #[serde( + serialize_with = "serialize_duration", + skip_serializing_if = "Option::is_none", + default + )] + pub duration: Option, + #[serde(with = "time::serde::rfc3339")] + pub enqueued_at: OffsetDateTime, + #[serde( + with = "time::serde::rfc3339::option", + skip_serializing_if = "Option::is_none", + default + )] + pub started_at: Option, + #[serde( + with = "time::serde::rfc3339::option", + skip_serializing_if = "Option::is_none", + default + )] + pub finished_at: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TaskDump { + pub uid: TaskId, + #[serde(default)] + pub index_uid: Option, + pub status: Status, + // TODO use our own Kind for the user + #[serde(rename = "type")] + pub kind: Kind, + + #[serde(skip_serializing_if = "Option::is_none")] + pub details: Option
, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, + + #[serde(with = "time::serde::rfc3339")] + pub enqueued_at: OffsetDateTime, + #[serde( + with = "time::serde::rfc3339::option", + skip_serializing_if = "Option::is_none", + default + )] + pub started_at: Option, + #[serde( + with = "time::serde::rfc3339::option", + skip_serializing_if = "Option::is_none", + default + )] + pub finished_at: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum Status { + Enqueued, + Processing, + Succeeded, + Failed, +} + +impl Display for Status { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Status::Enqueued => write!(f, "enqueued"), + Status::Processing => write!(f, "processing"), + Status::Succeeded => write!(f, "succeeded"), + Status::Failed => write!(f, "failed"), + } + } +} + +impl FromStr for Status { + type Err = ResponseError; + + fn from_str(s: &str) -> Result { + match s { + "enqueued" => Ok(Status::Enqueued), + "processing" => Ok(Status::Processing), + "succeeded" => Ok(Status::Succeeded), + "failed" => Ok(Status::Failed), + s => Err(ResponseError::from_msg( + format!("`{}` is not a status. Available types are", s), + Code::BadRequest, + )), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum Kind { + DocumentImport { + method: IndexDocumentsMethod, + allow_index_creation: bool, + }, + DocumentDeletion, + DocumentClear, + Settings { + allow_index_creation: bool, + }, + IndexCreation, + IndexDeletion, + IndexUpdate, + IndexSwap, + CancelTask, + DeleteTasks, + DumpExport, + Snapshot, +} + +impl FromStr for Kind { + type Err = ResponseError; + + fn from_str(s: &str) -> Result { + match s { + "document_addition" => Ok(Kind::DocumentImport { + method: IndexDocumentsMethod::ReplaceDocuments, + // TODO this doesn't make sense + allow_index_creation: false, + }), + "document_update" => Ok(Kind::DocumentImport { + method: IndexDocumentsMethod::UpdateDocuments, + // TODO this doesn't make sense + allow_index_creation: false, + }), + "document_deletion" => Ok(Kind::DocumentDeletion), + "document_clear" => Ok(Kind::DocumentClear), + "settings" => Ok(Kind::Settings { + // TODO this doesn't make sense + allow_index_creation: false, + }), + "index_creation" => Ok(Kind::IndexCreation), + "index_deletion" => Ok(Kind::IndexDeletion), + "index_update" => Ok(Kind::IndexUpdate), + "index_swap" => Ok(Kind::IndexSwap), + "cancel_task" => Ok(Kind::CancelTask), + "delete_tasks" => Ok(Kind::DeleteTasks), + "dump_export" => Ok(Kind::DumpExport), + "snapshot" => Ok(Kind::Snapshot), + s => Err(ResponseError::from_msg( + format!("`{}` is not a type. Available status are ", s), + Code::BadRequest, + )), + } + } +} + +#[derive(Default, Debug, PartialEq, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct DetailsView { + #[serde(skip_serializing_if = "Option::is_none")] + pub received_documents: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub indexed_documents: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub primary_key: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub received_document_ids: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub deleted_documents: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub matched_tasks: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub deleted_tasks: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub original_query: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub dump_uid: Option, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(flatten)] + pub settings: Option>, +} + +// A `Kind` specific version made for the dump. If modified you may break the dump. +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum KindDump { + DocumentImport { + primary_key: Option, + method: IndexDocumentsMethod, + documents_count: u64, + allow_index_creation: bool, + }, + DocumentDeletion { + documents_ids: Vec, + }, + DocumentClear, + Settings { + new_settings: Settings, + is_deletion: bool, + allow_index_creation: bool, + }, + IndexDeletion, + IndexCreation { + primary_key: Option, + }, + IndexUpdate { + primary_key: Option, + }, + IndexSwap { + lhs: String, + rhs: String, + }, + CancelTask { + tasks: Vec, + }, + DeleteTasks { + query: String, + tasks: Vec, + }, + DumpExport, + Snapshot, +} + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +#[allow(clippy::large_enum_variant)] +pub enum Details { + DocumentAddition { + received_documents: u64, + indexed_documents: u64, + }, + Settings { + settings: Settings, + }, + IndexInfo { + primary_key: Option, + }, + DocumentDeletion { + received_document_ids: usize, + // TODO why is this optional? + deleted_documents: Option, + }, + ClearAll { + deleted_documents: Option, + }, + DeleteTasks { + matched_tasks: usize, + deleted_tasks: Option, + original_query: String, + }, + Dump { + dump_uid: String, + }, +} + +impl Details { + pub fn as_details_view(&self) -> DetailsView { + match self.clone() { + Details::DocumentAddition { + received_documents, + indexed_documents, + } => DetailsView { + received_documents: Some(received_documents), + indexed_documents: Some(indexed_documents), + ..DetailsView::default() + }, + Details::Settings { settings } => DetailsView { + settings: Some(settings), + ..DetailsView::default() + }, + Details::IndexInfo { primary_key } => DetailsView { + primary_key: Some(primary_key), + ..DetailsView::default() + }, + Details::DocumentDeletion { + received_document_ids, + deleted_documents, + } => DetailsView { + received_document_ids: Some(received_document_ids), + deleted_documents: Some(deleted_documents), + ..DetailsView::default() + }, + Details::ClearAll { deleted_documents } => DetailsView { + deleted_documents: Some(deleted_documents), + ..DetailsView::default() + }, + Details::DeleteTasks { + matched_tasks, + deleted_tasks, + original_query, + } => DetailsView { + matched_tasks: Some(matched_tasks), + deleted_tasks: Some(deleted_tasks), + original_query: Some(original_query), + ..DetailsView::default() + }, + Details::Dump { dump_uid } => DetailsView { + dump_uid: Some(dump_uid), + ..DetailsView::default() + }, + } + } +} + +/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for +/// https://github.com/time-rs/time/issues/378. +/// This code is a port of the old code of time that was removed in 0.2. +fn serialize_duration( + duration: &Option, + serializer: S, +) -> Result { + match duration { + Some(duration) => { + // technically speaking, negative duration is not valid ISO 8601 + if duration.is_negative() { + return serializer.serialize_none(); + } + + const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds(); + let secs = duration.whole_seconds(); + let days = secs / SECS_PER_DAY; + let secs = secs - days * SECS_PER_DAY; + let hasdate = days != 0; + let nanos = duration.subsec_nanoseconds(); + let hastime = (secs != 0 || nanos != 0) || !hasdate; + + // all the following unwrap can't fail + let mut res = String::new(); + write!(&mut res, "P").unwrap(); + + if hasdate { + write!(&mut res, "{}D", days).unwrap(); + } + + const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds(); + const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds(); + + if hastime { + if nanos == 0 { + write!(&mut res, "T{}S", secs).unwrap(); + } else if nanos % NANOS_PER_MILLI == 0 { + write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap(); + } else if nanos % NANOS_PER_MICRO == 0 { + write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap(); + } else { + write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap(); + } + } + + serializer.serialize_str(&res) + } + None => serializer.serialize_none(), + } +}