Marin Postma 2021-12-02 16:03:26 +01:00
parent c9f3726447
commit a30e02c18c
88 changed files with 5553 additions and 4496 deletions

View file

@ -75,7 +75,30 @@ impl SegmentAnalytics {
let client = HttpClient::default();
let user = User::UserId { user_id };
let batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
// If Meilisearch is Launched for the first time:
// 1. Send an event Launched associated to the user `total_launch`.
// 2. Batch an event Launched with the real instance-id and send it in one hour.
if first_time_run {
let _ = batcher
.push(Track {
user: User::UserId {
user_id: "total_launch".to_string(),
},
event: "Launched".to_string(),
..Default::default()
})
.await;
let _ = batcher.flush().await;
let _ = batcher
.push(Track {
user: user.clone(),
event: "Launched".to_string(),
..Default::default()
})
.await;
}
let (sender, inbox) = mpsc::channel(100); // How many analytics can we bufferize
@ -95,10 +118,6 @@ impl SegmentAnalytics {
sender,
user: user.clone(),
};
// batch the launched for the first time track event
if first_time_run {
this.publish("Launched".to_string(), json!({}), None);
}
(Arc::new(this), user.to_string())
}
@ -216,7 +235,9 @@ impl Segment {
async fn run(mut self, meilisearch: MeiliSearch) {
const INTERVAL: Duration = Duration::from_secs(60 * 60); // one hour
let mut interval = tokio::time::interval(INTERVAL);
// The first batch must be sent after one hour.
let mut interval =
tokio::time::interval_at(tokio::time::Instant::now() + INTERVAL, INTERVAL);
loop {
select! {
@ -304,10 +325,8 @@ pub struct SearchAggregator {
used_syntax: HashMap<String, usize>,
// q
// everytime a request has a q field, this field must be incremented by the number of terms
sum_of_terms_count: usize,
// everytime a request has a q field, this field must be incremented by one
total_number_of_q: usize,
// The maximum number of terms in a q request
max_terms_number: usize,
// pagination
max_limit: usize,
@ -354,8 +373,7 @@ impl SearchAggregator {
}
if let Some(ref q) = query.q {
ret.total_number_of_q = 1;
ret.sum_of_terms_count = q.split_whitespace().count();
ret.max_terms_number = q.split_whitespace().count();
}
ret.max_limit = query.limit;
@ -365,7 +383,7 @@ impl SearchAggregator {
}
pub fn succeed(&mut self, result: &SearchResult) {
self.total_succeeded += 1;
self.total_succeeded = self.total_succeeded.saturating_add(1);
self.time_spent.push(result.processing_time_ms as usize);
}
@ -376,23 +394,31 @@ impl SearchAggregator {
self.user_agents.insert(user_agent);
}
// request
self.total_received += other.total_received;
self.total_succeeded += other.total_succeeded;
self.total_received = self.total_received.saturating_add(other.total_received);
self.total_succeeded = self.total_succeeded.saturating_add(other.total_succeeded);
self.time_spent.append(&mut other.time_spent);
// sort
self.sort_with_geo_point |= other.sort_with_geo_point;
self.sort_sum_of_criteria_terms += other.sort_sum_of_criteria_terms;
self.sort_total_number_of_criteria += other.sort_total_number_of_criteria;
self.sort_sum_of_criteria_terms = self
.sort_sum_of_criteria_terms
.saturating_add(other.sort_sum_of_criteria_terms);
self.sort_total_number_of_criteria = self
.sort_total_number_of_criteria
.saturating_add(other.sort_total_number_of_criteria);
// filter
self.filter_with_geo_radius |= other.filter_with_geo_radius;
self.filter_sum_of_criteria_terms += other.filter_sum_of_criteria_terms;
self.filter_total_number_of_criteria += other.filter_total_number_of_criteria;
self.filter_sum_of_criteria_terms = self
.filter_sum_of_criteria_terms
.saturating_add(other.filter_sum_of_criteria_terms);
self.filter_total_number_of_criteria = self
.filter_total_number_of_criteria
.saturating_add(other.filter_total_number_of_criteria);
for (key, value) in other.used_syntax.into_iter() {
*self.used_syntax.entry(key).or_insert(0) += value;
let used_syntax = self.used_syntax.entry(key).or_insert(0);
*used_syntax = used_syntax.saturating_add(value);
}
// q
self.sum_of_terms_count += other.sum_of_terms_count;
self.total_number_of_q += other.total_number_of_q;
self.max_terms_number = self.max_terms_number.max(other.max_terms_number);
// pagination
self.max_limit = self.max_limit.max(other.max_limit);
self.max_offset = self.max_offset.max(other.max_offset);
@ -407,12 +433,12 @@ impl SearchAggregator {
// we get all the values in a sorted manner
let time_spent = self.time_spent.into_sorted_vec();
// We are only intersted by the slowest value of the 99th fastest results
let time_spent = time_spent[percentile_99th as usize];
let time_spent = time_spent.get(percentile_99th as usize);
let properties = json!({
"user-agent": self.user_agents,
"requests": {
"99th_response_time": format!("{:.2}", time_spent),
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
"total_succeeded": self.total_succeeded,
"total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics
"total_received": self.total_received,
@ -427,7 +453,7 @@ impl SearchAggregator {
"most_used_syntax": self.used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
},
"q": {
"avg_terms_number": format!("{:.2}", self.sum_of_terms_count as f64 / self.total_number_of_q as f64),
"max_terms_number": self.max_terms_number,
},
"pagination": {
"max_limit": self.max_limit,

View file

@ -2,12 +2,8 @@ use std::error::Error;
use std::fmt;
use actix_web as aweb;
use actix_web::body::Body;
use actix_web::http::StatusCode;
use actix_web::HttpResponseBuilder;
use aweb::error::{JsonPayloadError, QueryPayloadError};
use meilisearch_error::{Code, ErrorCode};
use serde::{Deserialize, Serialize};
use meilisearch_error::{Code, ErrorCode, ResponseError};
#[derive(Debug, thiserror::Error)]
pub enum MeilisearchHttpError {
@ -36,54 +32,6 @@ impl From<MeilisearchHttpError> for aweb::Error {
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ResponseError {
#[serde(skip)]
code: StatusCode,
message: String,
#[serde(rename = "code")]
error_code: String,
#[serde(rename = "type")]
error_type: String,
#[serde(rename = "link")]
error_link: String,
}
impl fmt::Display for ResponseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.message.fmt(f)
}
}
impl<T> From<T> for ResponseError
where
T: ErrorCode,
{
fn from(other: T) -> Self {
Self {
code: other.http_status(),
message: other.to_string(),
error_code: other.error_name(),
error_type: other.error_type(),
error_link: other.error_url(),
}
}
}
impl aweb::error::ResponseError for ResponseError {
fn error_response(&self) -> aweb::HttpResponse<Body> {
let json = serde_json::to_vec(self).unwrap();
HttpResponseBuilder::new(self.status_code())
.content_type("application/json")
.body(json)
}
fn status_code(&self) -> StatusCode {
self.code
}
}
impl fmt::Display for PayloadError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {

View file

@ -8,8 +8,8 @@ use std::ops::Deref;
use actix_web::FromRequest;
use futures::future::err;
use futures::future::{ok, Ready};
use meilisearch_error::ResponseError;
use crate::error::ResponseError;
use error::AuthenticationError;
macro_rules! create_policies {

View file

@ -1,12 +1,14 @@
#![allow(rustdoc::private_intra_doc_links)]
#[macro_use]
pub mod error;
pub mod analytics;
mod task;
#[macro_use]
pub mod extractors;
pub mod analytics;
pub mod helpers;
pub mod option;
pub mod routes;
use std::sync::Arc;
use std::time::Duration;
@ -53,7 +55,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
let mut meilisearch = MeiliSearch::builder();
meilisearch
.set_max_index_size(opt.max_index_size.get_bytes() as usize)
.set_max_update_store_size(opt.max_udb_size.get_bytes() as usize)
.set_max_task_store_size(opt.max_task_db_size.get_bytes() as usize)
.set_ignore_missing_snapshot(opt.ignore_missing_snapshot)
.set_ignore_snapshot_if_db_exists(opt.ignore_snapshot_if_db_exists)
.set_dump_dst(opt.dumps_dir.clone())
@ -180,7 +182,8 @@ macro_rules! create_app {
use actix_web::middleware::TrailingSlash;
use actix_web::App;
use actix_web::{middleware, web};
use meilisearch_http::error::{MeilisearchHttpError, ResponseError};
use meilisearch_error::ResponseError;
use meilisearch_http::error::MeilisearchHttpError;
use meilisearch_http::routes;
use meilisearch_http::{configure_auth, configure_data, dashboard};

View file

@ -45,8 +45,8 @@ pub struct Opt {
pub max_index_size: Byte,
/// The maximum size, in bytes, of the update lmdb database directory
#[structopt(long, env = "MEILI_MAX_UDB_SIZE", default_value = "100 GiB")]
pub max_udb_size: Byte,
#[structopt(long, env = "MEILI_MAX_TASK_DB_SIZE", default_value = "100 GiB")]
pub max_task_db_size: Byte,
/// The maximum size, in bytes, of accepted JSON payloads
#[structopt(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "100 MB")]

View file

@ -1,11 +1,11 @@
use actix_web::{web, HttpRequest, HttpResponse};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize};
use serde_json::json;
use crate::analytics::Analytics;
use crate::error::ResponseError;
use crate::extractors::authentication::{policies::*, GuardedData};
pub fn configure(cfg: &mut web::ServiceConfig) {

View file

@ -1,25 +1,38 @@
use actix_web::error::PayloadError;
use actix_web::http::header::CONTENT_TYPE;
use actix_web::web::Bytes;
use actix_web::HttpMessage;
use actix_web::{web, HttpRequest, HttpResponse};
use bstr::ByteSlice;
use futures::{Stream, StreamExt};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
use meilisearch_lib::milli::update::IndexDocumentsMethod;
use meilisearch_lib::MeiliSearch;
use mime::Mime;
use once_cell::sync::Lazy;
use serde::Deserialize;
use serde_json::Value;
use tokio::sync::mpsc;
use crate::analytics::Analytics;
use crate::error::{MeilisearchHttpError, ResponseError};
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::payload::Payload;
use crate::routes::IndexParam;
use crate::task::SummarizedTaskView;
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
vec![
"application/json".to_string(),
"application/x-ndjson".to_string(),
"text/csv".to_string(),
]
});
/// This is required because Payload is not Sync nor Send
fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, PayloadError>> {
let (snd, recv) = mpsc::channel(1);
@ -31,6 +44,24 @@ fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, P
tokio_stream::wrappers::ReceiverStream::new(recv)
}
/// Extracts the mime type from the content type and return
/// a meilisearch error if anyhthing bad happen.
fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpError> {
match req.mime_type() {
Ok(Some(mime)) => Ok(Some(mime)),
Ok(None) => Ok(None),
Err(_) => match req.headers().get(CONTENT_TYPE) {
Some(content_type) => Err(MeilisearchHttpError::InvalidContentType(
content_type.as_bytes().as_bstr().to_string(),
ACCEPTED_CONTENT_TYPE.clone(),
)),
None => Err(MeilisearchHttpError::MissingContentType(
ACCEPTED_CONTENT_TYPE.clone(),
)),
},
}
}
#[derive(Deserialize)]
pub struct DocumentParam {
index_uid: String,
@ -76,11 +107,9 @@ pub async fn delete_document(
index_uid,
} = path.into_inner();
let update = Update::DeleteDocuments(vec![document_id]);
let update_status = meilisearch
.register_update(index_uid, update, false)
.await?;
debug!("returns: {:?}", update_status);
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
let task: SummarizedTaskView = meilisearch.register_update(index_uid, update).await?.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Deserialize, Debug)]
@ -93,7 +122,7 @@ pub struct BrowseQuery {
pub async fn get_all_documents(
meilisearch: GuardedData<Public, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
params: web::Query<BrowseQuery>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params);
@ -110,7 +139,7 @@ pub async fn get_all_documents(
let documents = meilisearch
.documents(
path.index_uid.clone(),
path.into_inner(),
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
attributes_to_retrieve,
@ -128,91 +157,83 @@ pub struct UpdateDocumentsQuery {
pub async fn add_documents(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
params: web::Query<UpdateDocumentsQuery>,
body: Payload,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params);
let content_type = req
.headers()
.get("Content-type")
.map(|s| s.to_str().unwrap_or("unkown"));
let params = params.into_inner();
let index_uid = path.into_inner();
analytics.add_documents(
&params,
meilisearch.get_index(path.index_uid.clone()).await.is_err(),
meilisearch.get_index(index_uid.clone()).await.is_err(),
&req,
);
document_addition(
content_type,
let task = document_addition(
extract_mime_type(&req)?,
meilisearch,
path.index_uid.clone(),
index_uid,
params.primary_key,
body,
IndexDocumentsMethod::ReplaceDocuments,
)
.await
.await?;
Ok(HttpResponse::Accepted().json(task))
}
pub async fn update_documents(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
params: web::Query<UpdateDocumentsQuery>,
body: Payload,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params);
let content_type = req
.headers()
.get("Content-type")
.map(|s| s.to_str().unwrap_or("unkown"));
let index_uid = path.into_inner();
analytics.update_documents(
&params,
meilisearch.get_index(path.index_uid.clone()).await.is_err(),
meilisearch.get_index(index_uid.clone()).await.is_err(),
&req,
);
document_addition(
content_type,
let task = document_addition(
extract_mime_type(&req)?,
meilisearch,
path.into_inner().index_uid,
index_uid,
params.into_inner().primary_key,
body,
IndexDocumentsMethod::UpdateDocuments,
)
.await
.await?;
Ok(HttpResponse::Accepted().json(task))
}
/// Route used when the payload type is "application/json"
/// Used to add or replace documents
async fn document_addition(
content_type: Option<&str>,
mime_type: Option<Mime>,
meilisearch: GuardedData<Private, MeiliSearch>,
index_uid: String,
primary_key: Option<String>,
body: Payload,
method: IndexDocumentsMethod,
) -> Result<HttpResponse, ResponseError> {
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
vec![
"application/json".to_string(),
"application/x-ndjson".to_string(),
"text/csv".to_string(),
]
});
let format = match content_type {
Some("application/json") => DocumentAdditionFormat::Json,
Some("application/x-ndjson") => DocumentAdditionFormat::Ndjson,
Some("text/csv") => DocumentAdditionFormat::Csv,
Some(other) => {
) -> Result<SummarizedTaskView, ResponseError> {
let format = match mime_type
.as_ref()
.map(|m| (m.type_().as_str(), m.subtype().as_str()))
{
Some(("application", "json")) => DocumentAdditionFormat::Json,
Some(("application", "x-ndjson")) => DocumentAdditionFormat::Ndjson,
Some(("text", "csv")) => DocumentAdditionFormat::Csv,
Some((type_, subtype)) => {
return Err(MeilisearchHttpError::InvalidContentType(
other.to_string(),
format!("{}/{}", type_, subtype),
ACCEPTED_CONTENT_TYPE.clone(),
)
.into())
@ -231,15 +252,15 @@ async fn document_addition(
format,
};
let update_status = meilisearch.register_update(index_uid, update, true).await?;
let task = meilisearch.register_update(index_uid, update).await?.into();
debug!("returns: {:?}", update_status);
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
debug!("returns: {:?}", task);
Ok(task)
}
pub async fn delete_documents(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
body: web::Json<Vec<Value>>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", body);
@ -253,21 +274,25 @@ pub async fn delete_documents(
.collect();
let update = Update::DeleteDocuments(ids);
let update_status = meilisearch
.register_update(path.into_inner().index_uid, update, false)
.await?;
debug!("returns: {:?}", update_status);
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
let task: SummarizedTaskView = meilisearch
.register_update(path.into_inner(), update)
.await?
.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
pub async fn clear_all_documents(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let update = Update::ClearDocuments;
let update_status = meilisearch
.register_update(path.into_inner().index_uid, update, false)
.await?;
debug!("returns: {:?}", update_status);
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
let task: SummarizedTaskView = meilisearch
.register_update(path.into_inner(), update)
.await?
.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}

View file

@ -1,20 +1,20 @@
use actix_web::{web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use log::debug;
use meilisearch_lib::index_controller::IndexSettings;
use meilisearch_error::ResponseError;
use meilisearch_lib::index_controller::Update;
use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize};
use serde_json::json;
use crate::analytics::Analytics;
use crate::error::ResponseError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::routes::IndexParam;
use crate::task::SummarizedTaskView;
pub mod documents;
pub mod search;
pub mod settings;
pub mod updates;
pub mod tasks;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
@ -33,7 +33,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::resource("/stats").route(web::get().to(get_index_stats)))
.service(web::scope("/documents").configure(documents::configure))
.service(web::scope("/search").configure(search::configure))
.service(web::scope("/updates").configure(updates::configure))
.service(web::scope("/tasks").configure(tasks::configure))
.service(web::scope("/settings").configure(settings::configure)),
);
}
@ -59,19 +59,25 @@ pub async fn create_index(
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let body = body.into_inner();
let IndexCreateRequest {
primary_key, uid, ..
} = body.into_inner();
analytics.publish(
"Index Created".to_string(),
json!({ "primary_key": body.primary_key}),
json!({ "primary_key": primary_key }),
Some(&req),
);
let meta = meilisearch.create_index(body.uid, body.primary_key).await?;
Ok(HttpResponse::Created().json(meta))
let update = Update::CreateIndex { primary_key };
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[allow(dead_code)]
pub struct UpdateIndexRequest {
uid: Option<String>,
primary_key: Option<String>,
@ -89,16 +95,16 @@ pub struct UpdateIndexResponse {
pub async fn get_index(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let meta = meilisearch.get_index(path.index_uid.clone()).await?;
let meta = meilisearch.get_index(path.into_inner()).await?;
debug!("returns: {:?}", meta);
Ok(HttpResponse::Ok().json(meta))
}
pub async fn update_index(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
body: web::Json<UpdateIndexRequest>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
@ -110,30 +116,36 @@ pub async fn update_index(
json!({ "primary_key": body.primary_key}),
Some(&req),
);
let settings = IndexSettings {
uid: body.uid,
let update = Update::UpdateIndex {
primary_key: body.primary_key,
};
let meta = meilisearch
.update_index(path.into_inner().index_uid, settings)
.await?;
debug!("returns: {:?}", meta);
Ok(HttpResponse::Ok().json(meta))
let task: SummarizedTaskView = meilisearch
.register_update(path.into_inner(), update)
.await?
.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
pub async fn delete_index(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
meilisearch.delete_index(path.index_uid.clone()).await?;
Ok(HttpResponse::NoContent().finish())
let uid = path.into_inner();
let update = Update::DeleteIndex;
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
Ok(HttpResponse::Accepted().json(task))
}
pub async fn get_index_stats(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let response = meilisearch.get_index_stats(path.index_uid.clone()).await?;
let response = meilisearch.get_index_stats(path.into_inner()).await?;
debug!("returns: {:?}", response);
Ok(HttpResponse::Ok().json(response))

View file

@ -1,14 +1,13 @@
use actix_web::{web, HttpRequest, HttpResponse};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::index::{default_crop_length, SearchQuery, DEFAULT_SEARCH_LIMIT};
use meilisearch_lib::MeiliSearch;
use serde::Deserialize;
use serde_json::Value;
use crate::analytics::{Analytics, SearchAggregator};
use crate::error::ResponseError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::routes::IndexParam;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
@ -108,7 +107,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
pub async fn search_with_url_query(
meilisearch: GuardedData<Public, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
params: web::Query<SearchQueryGet>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
@ -118,7 +117,7 @@ pub async fn search_with_url_query(
let mut aggregate = SearchAggregator::from_query(&query, &req);
let search_result = meilisearch.search(path.into_inner().index_uid, query).await;
let search_result = meilisearch.search(path.into_inner(), query).await;
if let Ok(ref search_result) = search_result {
aggregate.succeed(search_result);
}
@ -136,7 +135,7 @@ pub async fn search_with_url_query(
pub async fn search_with_post(
meilisearch: GuardedData<Public, MeiliSearch>,
path: web::Path<IndexParam>,
path: web::Path<String>,
params: web::Json<SearchQuery>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
@ -146,7 +145,7 @@ pub async fn search_with_post(
let mut aggregate = SearchAggregator::from_query(&query, &req);
let search_result = meilisearch.search(path.into_inner().index_uid, query).await;
let search_result = meilisearch.search(path.into_inner(), query).await;
if let Ok(ref search_result) = search_result {
aggregate.succeed(search_result);
}

View file

@ -1,28 +1,30 @@
use log::debug;
use actix_web::{web, HttpRequest, HttpResponse};
use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked};
use meilisearch_lib::index_controller::Update;
use meilisearch_lib::MeiliSearch;
use serde_json::json;
use crate::analytics::Analytics;
use crate::error::ResponseError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::task::SummarizedTaskView;
#[macro_export]
macro_rules! make_setting_route {
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
pub mod $attr {
use actix_web::{web, HttpRequest, HttpResponse, Resource};
use log::debug;
use actix_web::{web, HttpResponse, HttpRequest, Resource};
use meilisearch_lib::milli::update::Setting;
use meilisearch_lib::{MeiliSearch, index::Settings, index_controller::Update};
use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch};
use crate::analytics::Analytics;
use crate::error::ResponseError;
use crate::extractors::authentication::{GuardedData, policies::*};
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::task::SummarizedTaskView;
use meilisearch_error::ResponseError;
pub async fn delete(
meilisearch: GuardedData<Private, MeiliSearch>,
@ -32,10 +34,17 @@ macro_rules! make_setting_route {
$attr: Setting::Reset,
..Default::default()
};
let update = Update::Settings(settings);
let update_status = meilisearch.register_update(index_uid.into_inner(), update, false).await?;
debug!("returns: {:?}", update_status);
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
let update = Update::Settings {
settings,
is_deletion: true,
};
let task: SummarizedTaskView = meilisearch
.register_update(index_uid.into_inner(), update)
.await?
.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
pub async fn update(
@ -43,7 +52,7 @@ macro_rules! make_setting_route {
index_uid: actix_web::web::Path<String>,
body: actix_web::web::Json<Option<$type>>,
req: HttpRequest,
$analytics_var: web::Data< dyn Analytics>,
$analytics_var: web::Data<dyn Analytics>,
) -> std::result::Result<HttpResponse, ResponseError> {
let body = body.into_inner();
@ -52,15 +61,22 @@ macro_rules! make_setting_route {
let settings = Settings {
$attr: match body {
Some(inner_body) => Setting::Set(inner_body),
None => Setting::Reset
None => Setting::Reset,
},
..Default::default()
};
let update = Update::Settings(settings);
let update_status = meilisearch.register_update(index_uid.into_inner(), update, true).await?;
debug!("returns: {:?}", update_status);
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
let update = Update::Settings {
settings,
is_deletion: false,
};
let task: SummarizedTaskView = meilisearch
.register_update(index_uid.into_inner(), update)
.await?
.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
pub async fn get(
@ -71,6 +87,7 @@ macro_rules! make_setting_route {
debug!("returns: {:?}", settings);
let mut json = serde_json::json!(&settings);
let val = json[$camelcase_attr].take();
Ok(HttpResponse::Ok().json(val))
}
@ -151,7 +168,7 @@ make_setting_route!(
"SearchableAttributes Updated".to_string(),
json!({
"searchable_attributes": {
"total": setting.as_ref().map(|sort| sort.len()).unwrap_or(0),
"total": setting.as_ref().map(|searchable| searchable.len()).unwrap_or(0),
},
}),
Some(req),
@ -240,6 +257,9 @@ pub async fn update_all(
"ranking_rules": {
"sort_position": settings.ranking_rules.as_ref().set().map(|sort| sort.iter().position(|s| s == "sort")),
},
"searchable_attributes": {
"total": settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()).unwrap_or(0),
},
"sortable_attributes": {
"total": settings.sortable_attributes.as_ref().set().map(|sort| sort.len()).unwrap_or(0),
"has_geo": settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")).unwrap_or(false),
@ -252,13 +272,17 @@ pub async fn update_all(
Some(&req),
);
let update = Update::Settings(settings);
let update_result = meilisearch
.register_update(index_uid.into_inner(), update, true)
.await?;
let json = serde_json::json!({ "updateId": update_result.id() });
debug!("returns: {:?}", json);
Ok(HttpResponse::Accepted().json(json))
let update = Update::Settings {
settings,
is_deletion: false,
};
let task: SummarizedTaskView = meilisearch
.register_update(index_uid.into_inner(), update)
.await?
.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
pub async fn get_all(
@ -274,13 +298,17 @@ pub async fn delete_all(
data: GuardedData<Private, MeiliSearch>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let settings = Settings::cleared();
let settings = Settings::cleared().into_unchecked();
let update = Update::Settings(settings.into_unchecked());
let update_result = data
.register_update(index_uid.into_inner(), update, false)
.await?;
let json = serde_json::json!({ "updateId": update_result.id() });
debug!("returns: {:?}", json);
Ok(HttpResponse::Accepted().json(json))
let update = Update::Settings {
settings,
is_deletion: true,
};
let task: SummarizedTaskView = data
.register_update(index_uid.into_inner(), update)
.await?
.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}

View file

@ -0,0 +1,76 @@
use actix_web::{web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize};
use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::task::{TaskListView, TaskView};
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(get_all_tasks_status)))
.service(web::resource("{task_id}").route(web::get().to(get_task_status)));
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdateIndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
}
#[derive(Deserialize)]
pub struct UpdateParam {
index_uid: String,
task_id: u64,
}
pub async fn get_task_status(
meilisearch: GuardedData<Private, MeiliSearch>,
index_uid: web::Path<UpdateParam>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Index Tasks Seen".to_string(),
json!({ "per_task_uid": true }),
Some(&req),
);
let UpdateParam { index_uid, task_id } = index_uid.into_inner();
let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Ok().json(task))
}
pub async fn get_all_tasks_status(
meilisearch: GuardedData<Private, MeiliSearch>,
index_uid: web::Path<String>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Index Tasks Seen".to_string(),
json!({ "per_task_uid": false }),
Some(&req),
);
let tasks: TaskListView = meilisearch
.list_index_task(index_uid.into_inner(), None, None)
.await?
.into_iter()
.map(TaskView::from)
.collect::<Vec<_>>()
.into();
debug!("returns: {:?}", tasks);
Ok(HttpResponse::Ok().json(tasks))
}

View file

@ -1,59 +0,0 @@
use actix_web::{web, HttpResponse};
use chrono::{DateTime, Utc};
use log::debug;
use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize};
use crate::error::ResponseError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::routes::{IndexParam, UpdateStatusResponse};
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(get_all_updates_status)))
.service(web::resource("{update_id}").route(web::get().to(get_update_status)));
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdateIndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
}
#[derive(Deserialize)]
pub struct UpdateParam {
index_uid: String,
update_id: u64,
}
pub async fn get_update_status(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<UpdateParam>,
) -> Result<HttpResponse, ResponseError> {
let params = path.into_inner();
let meta = meilisearch
.update_status(params.index_uid, params.update_id)
.await?;
let meta = UpdateStatusResponse::from(meta);
debug!("returns: {:?}", meta);
Ok(HttpResponse::Ok().json(meta))
}
pub async fn get_all_updates_status(
meilisearch: GuardedData<Private, MeiliSearch>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let metas = meilisearch
.all_update_status(path.into_inner().index_uid)
.await?;
let metas = metas
.into_iter()
.map(UpdateStatusResponse::from)
.collect::<Vec<_>>();
debug!("returns: {:?}", metas);
Ok(HttpResponse::Ok().json(metas))
}

View file

@ -1,23 +1,22 @@
use std::time::Duration;
use actix_web::{web, HttpResponse};
use chrono::{DateTime, Utc};
use log::debug;
use meilisearch_lib::index_controller::updates::status::{UpdateResult, UpdateStatus};
use serde::{Deserialize, Serialize};
use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked};
use meilisearch_lib::{MeiliSearch, Update};
use meilisearch_lib::MeiliSearch;
use crate::error::ResponseError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::ApiKeys;
mod dump;
pub mod indexes;
mod tasks;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("/health").route(web::get().to(get_health)))
cfg.service(web::scope("/tasks").configure(tasks::configure))
.service(web::resource("/health").route(web::get().to(get_health)))
.service(web::scope("/dumps").configure(dump::configure))
.service(web::resource("/keys").route(web::get().to(list_keys)))
.service(web::resource("/stats").route(web::get().to(get_stats)))
@ -48,38 +47,6 @@ pub enum UpdateType {
},
}
impl From<&UpdateStatus> for UpdateType {
fn from(other: &UpdateStatus) -> Self {
use meilisearch_lib::milli::update::IndexDocumentsMethod::*;
match other.meta() {
Update::DocumentAddition { method, .. } => {
let number = match other {
UpdateStatus::Processed(processed) => match processed.success {
UpdateResult::DocumentsAddition(ref addition) => {
Some(addition.nb_documents)
}
_ => None,
},
_ => None,
};
match method {
ReplaceDocuments => UpdateType::DocumentsAddition { number },
UpdateDocuments => UpdateType::DocumentsPartial { number },
_ => unreachable!(),
}
}
Update::Settings(settings) => UpdateType::Settings {
settings: settings.clone(),
},
Update::ClearDocuments => UpdateType::ClearAll,
Update::DeleteDocuments(ids) => UpdateType::DocumentsDeletion {
number: Some(ids.len()),
},
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ProcessedUpdateResult {
@ -135,81 +102,6 @@ pub enum UpdateStatusResponse {
},
}
impl From<UpdateStatus> for UpdateStatusResponse {
fn from(other: UpdateStatus) -> Self {
let update_type = UpdateType::from(&other);
match other {
UpdateStatus::Processing(processing) => {
let content = EnqueuedUpdateResult {
update_id: processing.id(),
update_type,
enqueued_at: processing.from.enqueued_at,
started_processing_at: Some(processing.started_processing_at),
};
UpdateStatusResponse::Processing { content }
}
UpdateStatus::Enqueued(enqueued) => {
let content = EnqueuedUpdateResult {
update_id: enqueued.id(),
update_type,
enqueued_at: enqueued.enqueued_at,
started_processing_at: None,
};
UpdateStatusResponse::Enqueued { content }
}
UpdateStatus::Processed(processed) => {
let duration = processed
.processed_at
.signed_duration_since(processed.from.started_processing_at)
.num_milliseconds();
// necessary since chrono::duration don't expose a f64 secs method.
let duration = Duration::from_millis(duration as u64).as_secs_f64();
let content = ProcessedUpdateResult {
update_id: processed.id(),
update_type,
duration,
enqueued_at: processed.from.from.enqueued_at,
processed_at: processed.processed_at,
};
UpdateStatusResponse::Processed { content }
}
UpdateStatus::Aborted(_) => unreachable!(),
UpdateStatus::Failed(failed) => {
let duration = failed
.failed_at
.signed_duration_since(failed.from.started_processing_at)
.num_milliseconds();
// necessary since chrono::duration don't expose a f64 secs method.
let duration = Duration::from_millis(duration as u64).as_secs_f64();
let update_id = failed.id();
let processed_at = failed.failed_at;
let enqueued_at = failed.from.from.enqueued_at;
let error = failed.into();
let content = FailedUpdateResult {
update_id,
update_type,
error,
duration,
enqueued_at,
processed_at,
};
UpdateStatusResponse::Failed { content }
}
}
}
}
#[derive(Deserialize)]
pub struct IndexParam {
index_uid: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct IndexUpdateResponse {
@ -365,8 +257,8 @@ mod test {
indexes::documents::add_documents,
indexes::documents::delete_document,
indexes::updates::get_all_updates_status,
indexes::updates::get_update_status,
indexes::tasks::get_all_tasks_status,
indexes::tasks::get_task_status,
}
Admin => { list_keys, }
}

View file

@ -0,0 +1,56 @@
use actix_web::{web, HttpRequest, HttpResponse};
use meilisearch_error::ResponseError;
use meilisearch_lib::tasks::task::TaskId;
use meilisearch_lib::MeiliSearch;
use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::task::{TaskListView, TaskView};
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(get_tasks)))
.service(web::resource("/{task_id}").route(web::get().to(get_task)));
}
async fn get_tasks(
meilisearch: GuardedData<Private, MeiliSearch>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Tasks Seen".to_string(),
json!({ "per_task_uid": false }),
Some(&req),
);
let tasks: TaskListView = meilisearch
.list_tasks(None, None, None)
.await?
.into_iter()
.map(TaskView::from)
.collect::<Vec<_>>()
.into();
Ok(HttpResponse::Ok().json(tasks))
}
async fn get_task(
meilisearch: GuardedData<Private, MeiliSearch>,
task_id: web::Path<TaskId>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Tasks Seen".to_string(),
json!({ "per_task_uid": true }),
Some(&req),
);
let task: TaskView = meilisearch
.get_task(task_id.into_inner(), None)
.await?
.into();
Ok(HttpResponse::Ok().json(task))
}

View file

@ -0,0 +1,292 @@
use chrono::{DateTime, Duration, Utc};
use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked};
use meilisearch_lib::milli::update::IndexDocumentsMethod;
use meilisearch_lib::tasks::task::{
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
};
use serde::{Serialize, Serializer};
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
enum TaskType {
IndexCreation,
IndexUpdate,
IndexDeletion,
DocumentsAddition,
DocumentsPartial,
DocumentsDeletion,
SettingsUpdate,
ClearAll,
}
impl From<TaskContent> for TaskType {
fn from(other: TaskContent) -> Self {
match other {
TaskContent::DocumentAddition {
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
..
} => TaskType::DocumentsAddition,
TaskContent::DocumentAddition {
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
..
} => TaskType::DocumentsPartial,
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentsDeletion,
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
TaskContent::IndexDeletion => TaskType::IndexDeletion,
TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
_ => unreachable!("unexpected task type"),
}
}
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
enum TaskStatus {
Enqueued,
Processing,
Succeeded,
Failed,
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
#[allow(clippy::large_enum_variant)]
enum TaskDetails {
#[serde(rename_all = "camelCase")]
DocumentsAddition {
received_documents: usize,
indexed_documents: Option<u64>,
},
#[serde(rename_all = "camelCase")]
Settings {
#[serde(flatten)]
settings: Settings<Unchecked>,
},
#[serde(rename_all = "camelCase")]
IndexInfo { primary_key: Option<String> },
#[serde(rename_all = "camelCase")]
DocumentDeletion {
received_document_ids: usize,
deleted_documents: Option<u64>,
},
#[serde(rename_all = "camelCase")]
ClearAll { deleted_documents: Option<u64> },
}
fn serialize_duration<S: Serializer>(
duration: &Option<Duration>,
serializer: S,
) -> Result<S::Ok, S::Error> {
match duration {
Some(duration) => {
let duration_str = duration.to_string();
serializer.serialize_str(&duration_str)
}
None => serializer.serialize_none(),
}
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TaskView {
uid: TaskId,
index_uid: String,
status: TaskStatus,
#[serde(rename = "type")]
task_type: TaskType,
#[serde(skip_serializing_if = "Option::is_none")]
details: Option<TaskDetails>,
#[serde(skip_serializing_if = "Option::is_none")]
error: Option<ResponseError>,
#[serde(serialize_with = "serialize_duration")]
duration: Option<Duration>,
enqueued_at: DateTime<Utc>,
started_at: Option<DateTime<Utc>>,
finished_at: Option<DateTime<Utc>>,
}
impl From<Task> for TaskView {
fn from(task: Task) -> Self {
let Task {
id,
index_uid,
content,
events,
} = task;
let (task_type, mut details) = match content {
TaskContent::DocumentAddition {
merge_strategy,
documents_count,
..
} => {
let details = TaskDetails::DocumentsAddition {
received_documents: documents_count,
indexed_documents: None,
};
let task_type = match merge_strategy {
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentsPartial,
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentsAddition,
_ => unreachable!("Unexpected document merge strategy."),
};
(task_type, Some(details))
}
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
TaskType::DocumentsDeletion,
Some(TaskDetails::DocumentDeletion {
received_document_ids: ids.len(),
deleted_documents: None,
}),
),
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
TaskType::ClearAll,
Some(TaskDetails::ClearAll {
deleted_documents: None,
}),
),
TaskContent::IndexDeletion => (
TaskType::IndexDeletion,
Some(TaskDetails::ClearAll {
deleted_documents: None,
}),
),
TaskContent::SettingsUpdate { settings, .. } => (
TaskType::SettingsUpdate,
Some(TaskDetails::Settings { settings }),
),
TaskContent::IndexCreation { primary_key } => (
TaskType::IndexCreation,
Some(TaskDetails::IndexInfo { primary_key }),
),
TaskContent::IndexUpdate { primary_key } => (
TaskType::IndexUpdate,
Some(TaskDetails::IndexInfo { primary_key }),
),
};
// An event always has at least one event: "Created"
let (status, error, finished_at) = match events.last().unwrap() {
TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None),
TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None),
TaskEvent::Processing(_) => (TaskStatus::Processing, None, None),
TaskEvent::Succeded { timestamp, result } => {
match (result, &mut details) {
(
TaskResult::DocumentAddition {
indexed_documents: num,
..
},
Some(TaskDetails::DocumentsAddition {
ref mut indexed_documents,
..
}),
) => {
indexed_documents.replace(*num);
}
(
TaskResult::DocumentDeletion {
deleted_documents: docs,
..
},
Some(TaskDetails::DocumentDeletion {
ref mut deleted_documents,
..
}),
) => {
deleted_documents.replace(*docs);
}
(
TaskResult::ClearAll {
deleted_documents: docs,
},
Some(TaskDetails::ClearAll {
ref mut deleted_documents,
}),
) => {
deleted_documents.replace(*docs);
}
_ => (),
}
(TaskStatus::Succeeded, None, Some(*timestamp))
}
TaskEvent::Failed { timestamp, error } => {
(TaskStatus::Failed, Some(error.clone()), Some(*timestamp))
}
};
let enqueued_at = match events.first() {
Some(TaskEvent::Created(ts)) => *ts,
_ => unreachable!("A task must always have a creation event."),
};
let duration = finished_at.map(|ts| (ts - enqueued_at));
let started_at = events.iter().find_map(|e| match e {
TaskEvent::Processing(ts) => Some(*ts),
_ => None,
});
Self {
uid: id,
index_uid: index_uid.into_inner(),
status,
task_type,
details,
error,
duration,
enqueued_at,
started_at,
finished_at,
}
}
}
#[derive(Debug, Serialize)]
pub struct TaskListView {
results: Vec<TaskView>,
}
impl From<Vec<TaskView>> for TaskListView {
fn from(results: Vec<TaskView>) -> Self {
Self { results }
}
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SummarizedTaskView {
uid: TaskId,
index_uid: String,
status: TaskStatus,
#[serde(rename = "type")]
task_type: TaskType,
enqueued_at: DateTime<Utc>,
}
impl From<Task> for SummarizedTaskView {
fn from(mut other: Task) -> Self {
let created_event = other
.events
.drain(..1)
.next()
.expect("Task must have an enqueued event.");
let enqueued_at = match created_event {
TaskEvent::Created(ts) => ts,
_ => unreachable!("The first event of a task must always be 'Created'"),
};
Self {
uid: other.id,
index_uid: other.index_uid.to_string(),
status: TaskStatus::Enqueued,
task_type: other.content.into(),
enqueued_at,
}
}
}