start moving a lot of task types to meilisearch_types

This commit is contained in:
Tamo 2022-10-12 00:43:24 +02:00 committed by Clément Renault
parent fa4c1de019
commit 510ce9fc51
No known key found for this signature in database
GPG Key ID: 92ADA4E935E71FA4
18 changed files with 416 additions and 347 deletions

1
Cargo.lock generated
View File

@ -2368,6 +2368,7 @@ dependencies = [
"proptest-derive", "proptest-derive",
"serde", "serde",
"serde_json", "serde_json",
"time 0.3.14",
"tokio", "tokio",
] ]

View File

@ -52,14 +52,12 @@ pub(crate) mod test {
}; };
use big_s::S; use big_s::S;
use index_scheduler::{ use index_scheduler::task::Details;
task::{Details, DetailsView},
Kind, Status, TaskView,
};
use maplit::btreeset; use maplit::btreeset;
use meilisearch_auth::{Action, Key}; use meilisearch_auth::{Action, Key};
use meilisearch_types::milli::{self, update::Setting}; use meilisearch_types::milli::{self, update::Setting};
use meilisearch_types::settings::{Checked, Settings}; use meilisearch_types::settings::{Checked, Settings};
use meilisearch_types::tasks::{DetailsView, Kind, Status, TaskView};
use meilisearch_types::{index_uid::IndexUid, star_or::StarOr}; use meilisearch_types::{index_uid::IndexUid, star_or::StarOr};
use serde_json::{json, Map, Value}; use serde_json::{json, Map, Value};
use time::{macros::datetime, Duration}; use time::{macros::datetime, Duration};

View File

@ -102,9 +102,7 @@ impl CompatV5ToV6 {
}, },
v5::tasks::TaskContent::Dump { .. } => v6::Kind::DumpExport, v5::tasks::TaskContent::Dump { .. } => v6::Kind::DumpExport,
}, },
details: todo!(), details: task_view.details.map(|details| match details {
/*
task_view.details.map(|details| match details {
v5::Details::DocumentAddition { v5::Details::DocumentAddition {
received_documents, received_documents,
indexed_documents, indexed_documents,
@ -130,9 +128,7 @@ impl CompatV5ToV6 {
} }
v5::Details::Dump { dump_uid } => v6::Details::Dump { dump_uid }, v5::Details::Dump { dump_uid } => v6::Details::Dump { dump_uid },
}), }),
*/
error: task_view.error.map(|e| e.into()), error: task_view.error.map(|e| e.into()),
duration: task_view.duration,
enqueued_at: task_view.enqueued_at, enqueued_at: task_view.enqueued_at,
started_at: task_view.started_at, started_at: task_view.started_at,
finished_at: task_view.finished_at, finished_at: task_view.finished_at,

View File

@ -21,14 +21,14 @@ pub type Settings<T> = meilisearch_types::settings::Settings<T>;
pub type Checked = meilisearch_types::settings::Checked; pub type Checked = meilisearch_types::settings::Checked;
pub type Unchecked = meilisearch_types::settings::Unchecked; pub type Unchecked = meilisearch_types::settings::Unchecked;
pub type Task = index_scheduler::TaskView; pub type Task = meilisearch_types::tasks::TaskDump;
pub type Key = meilisearch_auth::Key; pub type Key = meilisearch_auth::Key;
// ===== Other types to clarify the code of the compat module // ===== Other types to clarify the code of the compat module
// everything related to the tasks // everything related to the tasks
pub type Status = index_scheduler::Status; pub type Status = meilisearch_types::tasks::Status;
pub type Kind = index_scheduler::Kind; pub type Kind = meilisearch_types::tasks::Kind;
pub type Details = index_scheduler::Details; pub type Details = meilisearch_types::tasks::Details;
// everything related to the settings // everything related to the settings
pub type Setting<T> = meilisearch_types::milli::update::Setting<T>; pub type Setting<T> = meilisearch_types::milli::update::Setting<T>;
@ -109,11 +109,8 @@ impl V6Reader {
&mut self, &mut self,
) -> Box<dyn Iterator<Item = Result<(Task, Option<Box<super::UpdateFile>>)>> + '_> { ) -> Box<dyn Iterator<Item = Result<(Task, Option<Box<super::UpdateFile>>)>> + '_> {
Box::new((&mut self.tasks).lines().map(|line| -> Result<_> { Box::new((&mut self.tasks).lines().map(|line| -> Result<_> {
let mut task: index_scheduler::TaskView = todo!(); // serde_json::from_str(&line?)?; let task: Task = serde_json::from_str(&line?)?;
// TODO: this can be removed once we can `Deserialize` the duration from the `TaskView`.
if let Some((started_at, finished_at)) = task.started_at.zip(task.finished_at) {
task.duration = Some(finished_at - started_at);
}
let update_file_path = self let update_file_path = self
.dump .dump
.path() .path()

View File

@ -5,9 +5,9 @@ use std::{
}; };
use flate2::{write::GzEncoder, Compression}; use flate2::{write::GzEncoder, Compression};
use index_scheduler::TaskView;
use meilisearch_auth::Key; use meilisearch_auth::Key;
use meilisearch_types::settings::{Checked, Settings}; use meilisearch_types::settings::{Checked, Settings};
use meilisearch_types::tasks::TaskDump;
use serde_json::{Map, Value}; use serde_json::{Map, Value};
use tempfile::TempDir; use tempfile::TempDir;
use time::OffsetDateTime; use time::OffsetDateTime;
@ -105,17 +105,12 @@ impl TaskWriter {
/// Pushes tasks in the dump. /// Pushes tasks in the dump.
/// If the tasks has an associated `update_file` it'll use the `task_id` as its name. /// If the tasks has an associated `update_file` it'll use the `task_id` as its name.
pub fn push_task(&mut self, task: &TaskView) -> Result<UpdateFile> { pub fn push_task(&mut self, task: &TaskDump) -> Result<UpdateFile> {
// TODO: this could be removed the day we implements `Deserialize` on the Duration. self.queue.write_all(&serde_json::to_vec(task)?)?;
let mut task = task.clone();
task.duration = None;
self.queue.write_all(&serde_json::to_vec(&task)?)?;
self.queue.write_all(b"\n")?; self.queue.write_all(b"\n")?;
Ok(UpdateFile::new( Ok(UpdateFile::new(
self.update_files self.update_files.join(format!("{}.jsonl", task.uid)),
.join(format!("{}.jsonl", task.uid.to_string())),
)) ))
} }
} }

View File

@ -1,10 +1,9 @@
use meilisearch_types::milli::update::IndexDocumentsMethod::{ use meilisearch_types::milli::update::IndexDocumentsMethod::{
self, ReplaceDocuments, UpdateDocuments, self, ReplaceDocuments, UpdateDocuments,
}; };
use meilisearch_types::tasks::{Kind, TaskId};
use std::ops::ControlFlow::{self, Break, Continue}; use std::ops::ControlFlow::{self, Break, Continue};
use crate::{task::Kind, TaskId};
#[derive(Debug)] #[derive(Debug)]
pub enum BatchKind { pub enum BatchKind {
DocumentClear { DocumentClear {

View File

@ -1,8 +1,11 @@
use crate::{ use crate::{
autobatcher::BatchKind, autobatcher::BatchKind,
task::{Details, Kind, KindWithContent, Status, Task}, task::{KindWithContent, Task},
Error, IndexScheduler, Result, TaskId, Error, IndexScheduler, Result, TaskId,
}; };
use meilisearch_types::tasks::{Details, Kind, Status};
use log::{debug, info}; use log::{debug, info};
use meilisearch_types::milli::update::IndexDocumentsConfig; use meilisearch_types::milli::update::IndexDocumentsConfig;
use meilisearch_types::milli::update::{ use meilisearch_types::milli::update::{

View File

@ -11,7 +11,8 @@ pub type Result<T> = std::result::Result<T, Error>;
pub type TaskId = u32; pub type TaskId = u32;
pub use error::Error; pub use error::Error;
pub use task::{Details, Kind, KindWithContent, Status, TaskView}; use meilisearch_types::tasks::{Kind, Status, TaskView};
pub use task::KindWithContent;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
@ -34,7 +35,7 @@ use crate::task::Task;
const DEFAULT_LIMIT: fn() -> u32 = || 20; const DEFAULT_LIMIT: fn() -> u32 = || 20;
#[derive(derive_builder::Builder, Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Query { pub struct Query {
#[serde(default = "DEFAULT_LIMIT")] #[serde(default = "DEFAULT_LIMIT")]

View File

@ -3,54 +3,13 @@ use meilisearch_types::error::ResponseError;
use meilisearch_types::milli::update::IndexDocumentsMethod; use meilisearch_types::milli::update::IndexDocumentsMethod;
use meilisearch_types::settings::{Settings, Unchecked}; use meilisearch_types::settings::{Settings, Unchecked};
use serde::{Deserialize, Serialize, Serializer}; use meilisearch_types::tasks::{Details, Kind, Status, TaskView};
use std::{ use serde::{Deserialize, Serialize};
fmt::{Display, Write}, use std::path::PathBuf;
path::PathBuf, use time::OffsetDateTime;
str::FromStr,
};
use time::{Duration, OffsetDateTime};
use uuid::Uuid; use uuid::Uuid;
use crate::{Error, TaskId}; use crate::TaskId;
#[derive(Debug, Clone, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TaskView {
pub uid: TaskId,
#[serde(default)]
pub index_uid: Option<String>,
pub status: Status,
// TODO use our own Kind for the user
#[serde(rename = "type")]
pub kind: Kind,
#[serde(skip_serializing_if = "Option::is_none")]
pub details: Option<DetailsView>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<ResponseError>,
#[serde(
serialize_with = "serialize_duration",
skip_serializing_if = "Option::is_none",
default
)]
pub duration: Option<Duration>,
#[serde(with = "time::serde::rfc3339")]
pub enqueued_at: OffsetDateTime,
#[serde(
with = "time::serde::rfc3339::option",
skip_serializing_if = "Option::is_none",
default
)]
pub started_at: Option<OffsetDateTime>,
#[serde(
with = "time::serde::rfc3339::option",
skip_serializing_if = "Option::is_none",
default
)]
pub finished_at: Option<OffsetDateTime>,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)] #[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -109,38 +68,6 @@ impl Task {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum Status {
Enqueued,
Processing,
Succeeded,
Failed,
}
impl Display for Status {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Status::Enqueued => write!(f, "enqueued"),
Status::Processing => write!(f, "processing"),
Status::Succeeded => write!(f, "succeeded"),
Status::Failed => write!(f, "failed"),
}
}
}
impl FromStr for Status {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"enqueued" => Ok(Status::Enqueued),
"processing" => Ok(Status::Processing),
"succeeded" => Ok(Status::Succeeded),
"failed" => Ok(Status::Failed),
s => Err(Error::InvalidStatus(s.to_string())),
}
}
}
#[derive(Debug, PartialEq, Serialize, Deserialize)] #[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub enum KindWithContent { pub enum KindWithContent {
@ -330,219 +257,6 @@ impl KindWithContent {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum Kind {
DocumentImport {
method: IndexDocumentsMethod,
allow_index_creation: bool,
},
DocumentDeletion,
DocumentClear,
Settings {
allow_index_creation: bool,
},
IndexCreation,
IndexDeletion,
IndexUpdate,
IndexSwap,
CancelTask,
DeleteTasks,
DumpExport,
Snapshot,
}
impl FromStr for Kind {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"document_addition" => Ok(Kind::DocumentImport {
method: IndexDocumentsMethod::ReplaceDocuments,
// TODO this doesn't make sense
allow_index_creation: false,
}),
"document_update" => Ok(Kind::DocumentImport {
method: IndexDocumentsMethod::UpdateDocuments,
// TODO this doesn't make sense
allow_index_creation: false,
}),
"document_deletion" => Ok(Kind::DocumentDeletion),
"document_clear" => Ok(Kind::DocumentClear),
"settings" => Ok(Kind::Settings {
// TODO this doesn't make sense
allow_index_creation: false,
}),
"index_creation" => Ok(Kind::IndexCreation),
"index_deletion" => Ok(Kind::IndexDeletion),
"index_update" => Ok(Kind::IndexUpdate),
"index_swap" => Ok(Kind::IndexSwap),
"cancel_task" => Ok(Kind::CancelTask),
"delete_tasks" => Ok(Kind::DeleteTasks),
"dump_export" => Ok(Kind::DumpExport),
"snapshot" => Ok(Kind::Snapshot),
s => Err(Error::InvalidKind(s.to_string())),
}
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[allow(clippy::large_enum_variant)]
pub enum Details {
DocumentAddition {
received_documents: u64,
indexed_documents: u64,
},
Settings {
settings: Settings<Unchecked>,
},
IndexInfo {
primary_key: Option<String>,
},
DocumentDeletion {
received_document_ids: usize,
// TODO why is this optional?
deleted_documents: Option<u64>,
},
ClearAll {
deleted_documents: Option<u64>,
},
DeleteTasks {
matched_tasks: usize,
deleted_tasks: Option<usize>,
original_query: String,
},
Dump {
dump_uid: String,
},
}
#[derive(Default, Debug, PartialEq, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DetailsView {
#[serde(skip_serializing_if = "Option::is_none")]
received_documents: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
indexed_documents: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
primary_key: Option<Option<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
received_document_ids: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
deleted_documents: Option<Option<u64>>,
#[serde(skip_serializing_if = "Option::is_none")]
matched_tasks: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
deleted_tasks: Option<Option<usize>>,
#[serde(skip_serializing_if = "Option::is_none")]
original_query: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
dump_uid: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(flatten)]
settings: Option<Settings<Unchecked>>,
}
impl Details {
fn as_details_view(&self) -> DetailsView {
match self.clone() {
Details::DocumentAddition {
received_documents,
indexed_documents,
} => DetailsView {
received_documents: Some(received_documents),
indexed_documents: Some(indexed_documents),
..DetailsView::default()
},
Details::Settings { settings } => DetailsView {
settings: Some(settings),
..DetailsView::default()
},
Details::IndexInfo { primary_key } => DetailsView {
primary_key: Some(primary_key),
..DetailsView::default()
},
Details::DocumentDeletion {
received_document_ids,
deleted_documents,
} => DetailsView {
received_document_ids: Some(received_document_ids),
deleted_documents: Some(deleted_documents),
..DetailsView::default()
},
Details::ClearAll { deleted_documents } => DetailsView {
deleted_documents: Some(deleted_documents),
..DetailsView::default()
},
Details::DeleteTasks {
matched_tasks,
deleted_tasks,
original_query,
} => DetailsView {
matched_tasks: Some(matched_tasks),
deleted_tasks: Some(deleted_tasks),
original_query: Some(original_query),
..DetailsView::default()
},
Details::Dump { dump_uid } => DetailsView {
dump_uid: Some(dump_uid),
..DetailsView::default()
},
}
}
}
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
/// https://github.com/time-rs/time/issues/378.
/// This code is a port of the old code of time that was removed in 0.2.
fn serialize_duration<S: Serializer>(
duration: &Option<Duration>,
serializer: S,
) -> Result<S::Ok, S::Error> {
match duration {
Some(duration) => {
// technically speaking, negative duration is not valid ISO 8601
if duration.is_negative() {
return serializer.serialize_none();
}
const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds();
let secs = duration.whole_seconds();
let days = secs / SECS_PER_DAY;
let secs = secs - days * SECS_PER_DAY;
let hasdate = days != 0;
let nanos = duration.subsec_nanoseconds();
let hastime = (secs != 0 || nanos != 0) || !hasdate;
// all the following unwrap can't fail
let mut res = String::new();
write!(&mut res, "P").unwrap();
if hasdate {
write!(&mut res, "{}D", days).unwrap();
}
const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds();
const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds();
if hastime {
if nanos == 0 {
write!(&mut res, "T{}S", secs).unwrap();
} else if nanos % NANOS_PER_MILLI == 0 {
write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap();
} else if nanos % NANOS_PER_MICRO == 0 {
write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap();
} else {
write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap();
}
}
serializer.serialize_str(&res)
}
None => serializer.serialize_none(),
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use meilisearch_types::heed::{types::SerdeJson, BytesDecode, BytesEncode}; use meilisearch_types::heed::{types::SerdeJson, BytesDecode, BytesEncode};

View File

@ -4,10 +4,8 @@ use meilisearch_types::heed::{types::DecodeIgnore, RoTxn, RwTxn};
use meilisearch_types::milli::BEU32; use meilisearch_types::milli::BEU32;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::{ use crate::{Error, IndexScheduler, Result, Task, TaskId};
task::{Kind, Status, Task}, use meilisearch_types::tasks::{Kind, Status};
Error, IndexScheduler, Result, TaskId,
};
impl IndexScheduler { impl IndexScheduler {
pub(crate) fn last_task_id(&self, rtxn: &RoTxn) -> Result<Option<TaskId>> { pub(crate) fn last_task_id(&self, rtxn: &RoTxn) -> Result<Option<TaskId>> {

View File

@ -1,16 +0,0 @@
pub use milli;
pub use search::{
all_documents, perform_search, retrieve_document, retrieve_documents, settings,
MatchingStrategy, SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
};
pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecked};
use serde_json::{Map, Value};
// mod dump;
pub mod error;
mod search;
pub mod updates;
pub type Document = Map<String, Value>;

View File

@ -6,13 +6,14 @@ use actix_web::HttpMessage;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use bstr::ByteSlice; use bstr::ByteSlice;
use futures::StreamExt; use futures::StreamExt;
use index_scheduler::{IndexScheduler, KindWithContent, TaskView}; use index_scheduler::{IndexScheduler, KindWithContent};
use log::debug; use log::debug;
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType}; use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::heed::RoTxn; use meilisearch_types::heed::RoTxn;
use meilisearch_types::milli::update::IndexDocumentsMethod; use meilisearch_types::milli::update::IndexDocumentsMethod;
use meilisearch_types::star_or::StarOr; use meilisearch_types::star_or::StarOr;
use meilisearch_types::tasks::TaskView;
use meilisearch_types::{milli, Document, Index}; use meilisearch_types::{milli, Document, Index};
use mime::Mime; use mime::Mime;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;

View File

@ -1,9 +1,10 @@
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::{IndexScheduler, KindWithContent, Query, Status}; use index_scheduler::{IndexScheduler, KindWithContent, Query};
use log::debug; use log::debug;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::milli::{self, FieldDistribution, Index}; use meilisearch_types::milli::{self, FieldDistribution, Index};
use meilisearch_types::tasks::Status;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use time::OffsetDateTime; use time::OffsetDateTime;

View File

@ -2,11 +2,12 @@ use std::collections::BTreeMap;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::{IndexScheduler, Query, Status}; use index_scheduler::{IndexScheduler, Query};
use log::debug; use log::debug;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::settings::{Settings, Unchecked}; use meilisearch_types::settings::{Settings, Unchecked};
use meilisearch_types::star_or::StarOr; use meilisearch_types::star_or::StarOr;
use meilisearch_types::tasks::Status;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use time::OffsetDateTime; use time::OffsetDateTime;

View File

@ -1,10 +1,10 @@
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::{IndexScheduler, TaskId}; use index_scheduler::{IndexScheduler, TaskId};
use index_scheduler::{Kind, Status};
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid; use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::star_or::StarOr; use meilisearch_types::star_or::StarOr;
use meilisearch_types::tasks::{Kind, Status};
use serde::Deserialize; use serde::Deserialize;
use serde_cs::vec::CS; use serde_cs::vec::CS;
use serde_json::json; use serde_json::json;

View File

@ -13,6 +13,7 @@ proptest = { version = "1.0.0", optional = true }
proptest-derive = { version = "0.3.0", optional = true } proptest-derive = { version = "0.3.0", optional = true }
serde = { version = "1.0.145", features = ["derive"] } serde = { version = "1.0.145", features = ["derive"] }
serde_json = "1.0.85" serde_json = "1.0.85"
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
tokio = "1.0" tokio = "1.0"
[dev-dependencies] [dev-dependencies]

View File

@ -3,6 +3,7 @@ pub mod error;
pub mod index_uid; pub mod index_uid;
pub mod settings; pub mod settings;
pub mod star_or; pub mod star_or;
pub mod tasks;
pub use milli; pub use milli;
pub use milli::heed; pub use milli::heed;

View File

@ -0,0 +1,378 @@
use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize, Serializer};
use std::{
fmt::{Display, Write},
str::FromStr,
};
use time::{Duration, OffsetDateTime};
use crate::{
error::{Code, ResponseError},
settings::{Settings, Unchecked},
};
pub type TaskId = u32;
#[derive(Debug, Clone, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TaskView {
pub uid: TaskId,
#[serde(default)]
pub index_uid: Option<String>,
pub status: Status,
// TODO use our own Kind for the user
#[serde(rename = "type")]
pub kind: Kind,
#[serde(skip_serializing_if = "Option::is_none")]
pub details: Option<DetailsView>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<ResponseError>,
#[serde(
serialize_with = "serialize_duration",
skip_serializing_if = "Option::is_none",
default
)]
pub duration: Option<Duration>,
#[serde(with = "time::serde::rfc3339")]
pub enqueued_at: OffsetDateTime,
#[serde(
with = "time::serde::rfc3339::option",
skip_serializing_if = "Option::is_none",
default
)]
pub started_at: Option<OffsetDateTime>,
#[serde(
with = "time::serde::rfc3339::option",
skip_serializing_if = "Option::is_none",
default
)]
pub finished_at: Option<OffsetDateTime>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TaskDump {
pub uid: TaskId,
#[serde(default)]
pub index_uid: Option<String>,
pub status: Status,
// TODO use our own Kind for the user
#[serde(rename = "type")]
pub kind: Kind,
#[serde(skip_serializing_if = "Option::is_none")]
pub details: Option<Details>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<ResponseError>,
#[serde(with = "time::serde::rfc3339")]
pub enqueued_at: OffsetDateTime,
#[serde(
with = "time::serde::rfc3339::option",
skip_serializing_if = "Option::is_none",
default
)]
pub started_at: Option<OffsetDateTime>,
#[serde(
with = "time::serde::rfc3339::option",
skip_serializing_if = "Option::is_none",
default
)]
pub finished_at: Option<OffsetDateTime>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum Status {
Enqueued,
Processing,
Succeeded,
Failed,
}
impl Display for Status {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Status::Enqueued => write!(f, "enqueued"),
Status::Processing => write!(f, "processing"),
Status::Succeeded => write!(f, "succeeded"),
Status::Failed => write!(f, "failed"),
}
}
}
impl FromStr for Status {
type Err = ResponseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"enqueued" => Ok(Status::Enqueued),
"processing" => Ok(Status::Processing),
"succeeded" => Ok(Status::Succeeded),
"failed" => Ok(Status::Failed),
s => Err(ResponseError::from_msg(
format!("`{}` is not a status. Available types are", s),
Code::BadRequest,
)),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum Kind {
DocumentImport {
method: IndexDocumentsMethod,
allow_index_creation: bool,
},
DocumentDeletion,
DocumentClear,
Settings {
allow_index_creation: bool,
},
IndexCreation,
IndexDeletion,
IndexUpdate,
IndexSwap,
CancelTask,
DeleteTasks,
DumpExport,
Snapshot,
}
impl FromStr for Kind {
type Err = ResponseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"document_addition" => Ok(Kind::DocumentImport {
method: IndexDocumentsMethod::ReplaceDocuments,
// TODO this doesn't make sense
allow_index_creation: false,
}),
"document_update" => Ok(Kind::DocumentImport {
method: IndexDocumentsMethod::UpdateDocuments,
// TODO this doesn't make sense
allow_index_creation: false,
}),
"document_deletion" => Ok(Kind::DocumentDeletion),
"document_clear" => Ok(Kind::DocumentClear),
"settings" => Ok(Kind::Settings {
// TODO this doesn't make sense
allow_index_creation: false,
}),
"index_creation" => Ok(Kind::IndexCreation),
"index_deletion" => Ok(Kind::IndexDeletion),
"index_update" => Ok(Kind::IndexUpdate),
"index_swap" => Ok(Kind::IndexSwap),
"cancel_task" => Ok(Kind::CancelTask),
"delete_tasks" => Ok(Kind::DeleteTasks),
"dump_export" => Ok(Kind::DumpExport),
"snapshot" => Ok(Kind::Snapshot),
s => Err(ResponseError::from_msg(
format!("`{}` is not a type. Available status are ", s),
Code::BadRequest,
)),
}
}
}
#[derive(Default, Debug, PartialEq, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DetailsView {
#[serde(skip_serializing_if = "Option::is_none")]
pub received_documents: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub indexed_documents: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub primary_key: Option<Option<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub received_document_ids: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub deleted_documents: Option<Option<u64>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub matched_tasks: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub deleted_tasks: Option<Option<usize>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub original_query: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub dump_uid: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(flatten)]
pub settings: Option<Settings<Unchecked>>,
}
// A `Kind` specific version made for the dump. If modified you may break the dump.
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum KindDump {
DocumentImport {
primary_key: Option<String>,
method: IndexDocumentsMethod,
documents_count: u64,
allow_index_creation: bool,
},
DocumentDeletion {
documents_ids: Vec<String>,
},
DocumentClear,
Settings {
new_settings: Settings<Unchecked>,
is_deletion: bool,
allow_index_creation: bool,
},
IndexDeletion,
IndexCreation {
primary_key: Option<String>,
},
IndexUpdate {
primary_key: Option<String>,
},
IndexSwap {
lhs: String,
rhs: String,
},
CancelTask {
tasks: Vec<TaskId>,
},
DeleteTasks {
query: String,
tasks: Vec<TaskId>,
},
DumpExport,
Snapshot,
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[allow(clippy::large_enum_variant)]
pub enum Details {
DocumentAddition {
received_documents: u64,
indexed_documents: u64,
},
Settings {
settings: Settings<Unchecked>,
},
IndexInfo {
primary_key: Option<String>,
},
DocumentDeletion {
received_document_ids: usize,
// TODO why is this optional?
deleted_documents: Option<u64>,
},
ClearAll {
deleted_documents: Option<u64>,
},
DeleteTasks {
matched_tasks: usize,
deleted_tasks: Option<usize>,
original_query: String,
},
Dump {
dump_uid: String,
},
}
impl Details {
pub fn as_details_view(&self) -> DetailsView {
match self.clone() {
Details::DocumentAddition {
received_documents,
indexed_documents,
} => DetailsView {
received_documents: Some(received_documents),
indexed_documents: Some(indexed_documents),
..DetailsView::default()
},
Details::Settings { settings } => DetailsView {
settings: Some(settings),
..DetailsView::default()
},
Details::IndexInfo { primary_key } => DetailsView {
primary_key: Some(primary_key),
..DetailsView::default()
},
Details::DocumentDeletion {
received_document_ids,
deleted_documents,
} => DetailsView {
received_document_ids: Some(received_document_ids),
deleted_documents: Some(deleted_documents),
..DetailsView::default()
},
Details::ClearAll { deleted_documents } => DetailsView {
deleted_documents: Some(deleted_documents),
..DetailsView::default()
},
Details::DeleteTasks {
matched_tasks,
deleted_tasks,
original_query,
} => DetailsView {
matched_tasks: Some(matched_tasks),
deleted_tasks: Some(deleted_tasks),
original_query: Some(original_query),
..DetailsView::default()
},
Details::Dump { dump_uid } => DetailsView {
dump_uid: Some(dump_uid),
..DetailsView::default()
},
}
}
}
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
/// https://github.com/time-rs/time/issues/378.
/// This code is a port of the old code of time that was removed in 0.2.
fn serialize_duration<S: Serializer>(
duration: &Option<Duration>,
serializer: S,
) -> Result<S::Ok, S::Error> {
match duration {
Some(duration) => {
// technically speaking, negative duration is not valid ISO 8601
if duration.is_negative() {
return serializer.serialize_none();
}
const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds();
let secs = duration.whole_seconds();
let days = secs / SECS_PER_DAY;
let secs = secs - days * SECS_PER_DAY;
let hasdate = days != 0;
let nanos = duration.subsec_nanoseconds();
let hastime = (secs != 0 || nanos != 0) || !hasdate;
// all the following unwrap can't fail
let mut res = String::new();
write!(&mut res, "P").unwrap();
if hasdate {
write!(&mut res, "{}D", days).unwrap();
}
const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds();
const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds();
if hastime {
if nanos == 0 {
write!(&mut res, "T{}S", secs).unwrap();
} else if nanos % NANOS_PER_MILLI == 0 {
write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap();
} else if nanos % NANOS_PER_MICRO == 0 {
write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap();
} else {
write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap();
}
}
serializer.serialize_str(&res)
}
None => serializer.serialize_none(),
}
}