2022-09-27 16:33:37 +02:00
|
|
|
use actix_web::web::Data;
|
2021-12-02 16:03:26 +01:00
|
|
|
use actix_web::{web, HttpRequest, HttpResponse};
|
2022-09-27 16:33:37 +02:00
|
|
|
use index_scheduler::{IndexScheduler, TaskId};
|
2022-06-06 12:38:46 +02:00
|
|
|
use meilisearch_types::error::ResponseError;
|
|
|
|
use meilisearch_types::index_uid::IndexUid;
|
2022-06-06 12:45:52 +02:00
|
|
|
use meilisearch_types::star_or::StarOr;
|
2022-10-12 00:43:24 +02:00
|
|
|
use meilisearch_types::tasks::{Kind, Status};
|
2022-05-17 16:08:23 +02:00
|
|
|
use serde::Deserialize;
|
|
|
|
use serde_cs::vec::CS;
|
2021-12-02 16:03:26 +01:00
|
|
|
use serde_json::json;
|
|
|
|
|
|
|
|
use crate::analytics::Analytics;
|
|
|
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
2022-03-04 20:12:44 +01:00
|
|
|
use crate::extractors::sequential_extractor::SeqHandler;
|
2021-12-02 16:03:26 +01:00
|
|
|
|
2022-06-06 12:45:52 +02:00
|
|
|
use super::fold_star_or;
|
2022-05-25 11:51:26 +02:00
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
const DEFAULT_LIMIT: fn() -> u32 = || 20;
|
2022-06-01 12:04:01 +02:00
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
2022-03-04 20:12:44 +01:00
|
|
|
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
|
|
|
|
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
2021-12-02 16:03:26 +01:00
|
|
|
}
|
|
|
|
|
2022-05-17 16:08:23 +02:00
|
|
|
#[derive(Deserialize, Debug)]
|
|
|
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
2022-07-07 10:56:02 +02:00
|
|
|
pub struct TasksFilterQuery {
|
2022-05-17 16:08:23 +02:00
|
|
|
#[serde(rename = "type")]
|
2022-09-22 12:14:51 +02:00
|
|
|
type_: Option<CS<StarOr<Kind>>>,
|
|
|
|
status: Option<CS<StarOr<Status>>>,
|
2022-05-30 17:01:51 +02:00
|
|
|
index_uid: Option<CS<StarOr<IndexUid>>>,
|
2022-06-01 12:04:01 +02:00
|
|
|
#[serde(default = "DEFAULT_LIMIT")]
|
2022-09-27 16:33:37 +02:00
|
|
|
limit: u32,
|
2022-06-01 15:30:39 +02:00
|
|
|
from: Option<TaskId>,
|
2022-05-30 13:59:27 +02:00
|
|
|
}
|
|
|
|
|
2022-05-18 12:07:06 +02:00
|
|
|
#[rustfmt::skip]
|
|
|
|
fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool {
|
|
|
|
matches!((type_, content),
|
2022-05-25 12:05:24 +02:00
|
|
|
(TaskType::IndexCreation, TaskContent::IndexCreation { .. })
|
2022-05-18 12:07:06 +02:00
|
|
|
| (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. })
|
2022-05-31 17:18:40 +02:00
|
|
|
| (TaskType::IndexDeletion, TaskContent::IndexDeletion { .. })
|
2022-05-25 12:05:24 +02:00
|
|
|
| (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. })
|
2022-05-31 17:18:40 +02:00
|
|
|
| (TaskType::DocumentDeletion, TaskContent::DocumentDeletion{ .. })
|
2022-05-18 12:07:06 +02:00
|
|
|
| (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. })
|
2022-10-11 15:51:04 +02:00
|
|
|
| (TaskType::DumpCreation, TaskContent::Dump { .. })
|
2022-05-18 12:07:06 +02:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2022-05-25 12:05:24 +02:00
|
|
|
#[rustfmt::skip]
|
2022-05-18 12:07:06 +02:00
|
|
|
fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool {
|
|
|
|
events.last().map_or(false, |event| {
|
2022-05-25 12:05:24 +02:00
|
|
|
matches!((status, event),
|
|
|
|
(TaskStatus::Enqueued, TaskEvent::Created(_))
|
|
|
|
| (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. })
|
2022-05-30 16:42:51 +02:00
|
|
|
| (TaskStatus::Succeeded, TaskEvent::Succeeded { .. })
|
2022-05-25 12:05:24 +02:00
|
|
|
| (TaskStatus::Failed, TaskEvent::Failed { .. }),
|
2022-05-18 12:07:06 +02:00
|
|
|
)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
async fn get_tasks(
|
2022-09-27 16:33:37 +02:00
|
|
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
|
2022-07-07 10:56:02 +02:00
|
|
|
params: web::Query<TasksFilterQuery>,
|
2021-12-02 16:03:26 +01:00
|
|
|
req: HttpRequest,
|
|
|
|
analytics: web::Data<dyn Analytics>,
|
|
|
|
) -> Result<HttpResponse, ResponseError> {
|
2022-07-07 10:56:02 +02:00
|
|
|
let TasksFilterQuery {
|
2022-05-17 16:08:23 +02:00
|
|
|
type_,
|
|
|
|
status,
|
|
|
|
index_uid,
|
2022-05-31 11:56:51 +02:00
|
|
|
limit,
|
2022-06-01 15:30:39 +02:00
|
|
|
from,
|
2022-05-17 16:08:23 +02:00
|
|
|
} = params.into_inner();
|
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
let search_rules = &index_scheduler.filters().search_rules;
|
2022-05-18 12:07:06 +02:00
|
|
|
|
2022-05-31 11:56:51 +02:00
|
|
|
// We first transform a potential indexUid=* into a "not specified indexUid filter"
|
2022-05-30 17:12:53 +02:00
|
|
|
// for every one of the filters: type, status, and indexUid.
|
2022-06-06 10:17:33 +02:00
|
|
|
let type_: Option<Vec<_>> = type_.and_then(fold_star_or);
|
|
|
|
let status: Option<Vec<_>> = status.and_then(fold_star_or);
|
|
|
|
let index_uid: Option<Vec<_>> = index_uid.and_then(fold_star_or);
|
2022-05-30 17:12:53 +02:00
|
|
|
|
2022-07-07 10:56:02 +02:00
|
|
|
analytics.publish(
|
|
|
|
"Tasks Seen".to_string(),
|
|
|
|
json!({
|
|
|
|
"filtered_by_index_uid": index_uid.as_ref().map_or(false, |v| !v.is_empty()),
|
|
|
|
"filtered_by_type": type_.as_ref().map_or(false, |v| !v.is_empty()),
|
|
|
|
"filtered_by_status": status.as_ref().map_or(false, |v| !v.is_empty()),
|
|
|
|
}),
|
|
|
|
Some(&req),
|
|
|
|
);
|
|
|
|
|
2022-09-22 12:14:51 +02:00
|
|
|
let mut filters = index_scheduler::Query::default();
|
|
|
|
|
2022-05-30 17:12:53 +02:00
|
|
|
// Then we filter on potential indexes and make sure that the search filter
|
|
|
|
// restrictions are also applied.
|
2022-09-22 12:14:51 +02:00
|
|
|
match index_uid {
|
2022-05-17 16:08:23 +02:00
|
|
|
Some(indexes) => {
|
2022-05-30 13:59:27 +02:00
|
|
|
for name in indexes {
|
2022-05-17 16:08:23 +02:00
|
|
|
if search_rules.is_index_authorized(&name) {
|
2022-09-22 12:14:51 +02:00
|
|
|
filters = filters.with_index(name.to_string());
|
2022-05-17 16:08:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
2022-09-22 12:14:51 +02:00
|
|
|
if !search_rules.is_index_authorized("*") {
|
2022-05-17 16:08:23 +02:00
|
|
|
for (index, _policy) in search_rules.clone() {
|
2022-09-22 12:14:51 +02:00
|
|
|
filters = filters.with_index(index.to_string());
|
2022-05-17 16:08:23 +02:00
|
|
|
}
|
|
|
|
}
|
2021-12-06 15:45:41 +01:00
|
|
|
}
|
2022-01-12 15:35:33 +01:00
|
|
|
};
|
2021-12-06 15:45:41 +01:00
|
|
|
|
2022-09-22 12:14:51 +02:00
|
|
|
if let Some(kinds) = type_ {
|
|
|
|
for kind in kinds {
|
|
|
|
filters = filters.with_kind(kind);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(statuses) = status {
|
|
|
|
for status in statuses {
|
|
|
|
filters = filters.with_status(status);
|
|
|
|
}
|
|
|
|
}
|
2022-05-18 12:07:06 +02:00
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
filters.from = from;
|
2022-05-31 11:56:51 +02:00
|
|
|
// We +1 just to know if there is more after this "page" or not.
|
2022-06-01 12:04:01 +02:00
|
|
|
let limit = limit.saturating_add(1);
|
2022-09-27 16:33:37 +02:00
|
|
|
filters.limit = limit;
|
2022-05-31 11:56:51 +02:00
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
let mut tasks_results: Vec<_> = index_scheduler.get_tasks(filters)?.into_iter().collect();
|
2022-05-31 11:56:51 +02:00
|
|
|
|
|
|
|
// If we were able to fetch the number +1 tasks we asked
|
|
|
|
// it means that there is more to come.
|
2022-09-27 16:33:37 +02:00
|
|
|
let next = if tasks_results.len() == limit as usize {
|
2022-06-01 15:30:39 +02:00
|
|
|
tasks_results.pop().map(|t| t.uid)
|
2022-05-31 11:56:51 +02:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2022-06-01 15:30:39 +02:00
|
|
|
let from = tasks_results.first().map(|t| t.uid);
|
|
|
|
|
2022-09-22 12:14:51 +02:00
|
|
|
// TODO: TAMO: define a structure to represent this type
|
|
|
|
let tasks = json!({
|
|
|
|
"results": tasks_results,
|
|
|
|
"limit": limit.saturating_sub(1),
|
|
|
|
"from": from,
|
|
|
|
"next": next,
|
|
|
|
});
|
2021-12-02 16:03:26 +01:00
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().json(tasks))
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn get_task(
|
2022-09-27 16:33:37 +02:00
|
|
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
|
2021-12-02 16:03:26 +01:00
|
|
|
task_id: web::Path<TaskId>,
|
|
|
|
req: HttpRequest,
|
|
|
|
analytics: web::Data<dyn Analytics>,
|
|
|
|
) -> Result<HttpResponse, ResponseError> {
|
2022-09-22 20:02:55 +02:00
|
|
|
let task_id = task_id.into_inner();
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
analytics.publish(
|
|
|
|
"Tasks Seen".to_string(),
|
|
|
|
json!({ "per_task_uid": true }),
|
|
|
|
Some(&req),
|
|
|
|
);
|
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
let search_rules = &index_scheduler.filters().search_rules;
|
2022-09-22 12:14:51 +02:00
|
|
|
let mut filters = index_scheduler::Query::default();
|
|
|
|
if !search_rules.is_index_authorized("*") {
|
2022-01-12 15:35:33 +01:00
|
|
|
for (index, _policy) in search_rules.clone() {
|
2022-09-22 12:14:51 +02:00
|
|
|
filters = filters.with_index(index);
|
2021-12-06 15:45:41 +01:00
|
|
|
}
|
2022-09-22 12:14:51 +02:00
|
|
|
}
|
|
|
|
|
2022-09-22 20:02:55 +02:00
|
|
|
filters.uid = Some(vec![task_id]);
|
2021-12-02 16:03:26 +01:00
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
if let Some(task) = index_scheduler.get_tasks(filters)?.first() {
|
2022-09-22 20:02:55 +02:00
|
|
|
Ok(HttpResponse::Ok().json(task))
|
|
|
|
} else {
|
|
|
|
Err(index_scheduler::Error::TaskNotFound(task_id).into())
|
|
|
|
}
|
2021-12-02 16:03:26 +01:00
|
|
|
}
|