mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 11:57:07 +02:00
wip integrating the scheduler in meilisearch-http
This commit is contained in:
parent
250410495c
commit
8d51c1f389
16 changed files with 251 additions and 192 deletions
|
@ -48,6 +48,9 @@ log = "0.4.17"
|
|||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
meilisearch-lib = { path = "../meilisearch-lib", default-features = false }
|
||||
index = { path = "../index" }
|
||||
index-scheduler = { path = "../index-scheduler" }
|
||||
document-formats = { path = "../document-formats" }
|
||||
mimalloc = { version = "0.1.29", default-features = false }
|
||||
mime = "0.3.16"
|
||||
num_cpus = "1.13.1"
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
#[macro_use]
|
||||
pub mod error;
|
||||
pub mod analytics;
|
||||
pub mod task;
|
||||
#[macro_use]
|
||||
pub mod extractors;
|
||||
pub mod option;
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::KindWithContent;
|
||||
use log::debug;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
|
@ -7,7 +8,6 @@ use serde_json::json;
|
|||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
||||
|
@ -20,7 +20,10 @@ pub async fn create_dump(
|
|||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
||||
|
||||
let res: SummarizedTaskView = meilisearch.register_dump_task().await?.into();
|
||||
let task = KindWithContent::DumpExport {
|
||||
output: "toto".to_string().into(),
|
||||
};
|
||||
let res = meilisearch.register_task(task).await?;
|
||||
|
||||
debug!("returns: {:?}", res);
|
||||
Ok(HttpResponse::Accepted().json(res))
|
||||
|
|
|
@ -4,9 +4,10 @@ use actix_web::web::Bytes;
|
|||
use actix_web::HttpMessage;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use bstr::ByteSlice;
|
||||
use document_formats::PayloadType;
|
||||
use futures::{Stream, StreamExt};
|
||||
use index_scheduler::{KindWithContent, TaskView};
|
||||
use log::debug;
|
||||
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
|
@ -24,7 +25,6 @@ use crate::extractors::authentication::{policies::*, GuardedData};
|
|||
use crate::extractors::payload::Payload;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::{fold_star_or, PaginationView};
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
vec![
|
||||
|
@ -117,8 +117,11 @@ pub async fn delete_document(
|
|||
document_id,
|
||||
index_uid,
|
||||
} = path.into_inner();
|
||||
let update = Update::DeleteDocuments(vec![document_id]);
|
||||
let task: SummarizedTaskView = meilisearch.register_update(index_uid, update).await?.into();
|
||||
let task = KindWithContent::DocumentDeletion {
|
||||
index_uid,
|
||||
documents_ids: vec![document_id],
|
||||
};
|
||||
let task = meilisearch.register_task(task).await?;
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
@ -235,14 +238,14 @@ async fn document_addition(
|
|||
body: Payload,
|
||||
method: IndexDocumentsMethod,
|
||||
allow_index_creation: bool,
|
||||
) -> Result<SummarizedTaskView, ResponseError> {
|
||||
) -> Result<TaskView, ResponseError> {
|
||||
let format = match mime_type
|
||||
.as_ref()
|
||||
.map(|m| (m.type_().as_str(), m.subtype().as_str()))
|
||||
{
|
||||
Some(("application", "json")) => DocumentAdditionFormat::Json,
|
||||
Some(("application", "x-ndjson")) => DocumentAdditionFormat::Ndjson,
|
||||
Some(("text", "csv")) => DocumentAdditionFormat::Csv,
|
||||
Some(("application", "json")) => PayloadType::Json,
|
||||
Some(("application", "x-ndjson")) => PayloadType::Ndjson,
|
||||
Some(("text", "csv")) => PayloadType::Csv,
|
||||
Some((type_, subtype)) => {
|
||||
return Err(MeilisearchHttpError::InvalidContentType(
|
||||
format!("{}/{}", type_, subtype),
|
||||
|
@ -257,12 +260,16 @@ async fn document_addition(
|
|||
}
|
||||
};
|
||||
|
||||
let update = Update::DocumentAddition {
|
||||
payload: Box::new(payload_to_stream(body)),
|
||||
let (file, uuid) = meilisearch.create_update_file()?;
|
||||
|
||||
let update = KindWithContent::DocumentAddition {
|
||||
content_file: Box::new(payload_to_stream(body)),
|
||||
documents_count: 0, // TODO: TAMO: get the document count
|
||||
primary_key,
|
||||
method,
|
||||
format,
|
||||
allow_index_creation,
|
||||
index_uid,
|
||||
};
|
||||
|
||||
let task = meilisearch.register_update(index_uid, update).await?.into();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::KindWithContent;
|
||||
use log::debug;
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
@ -10,7 +10,7 @@ use time::OffsetDateTime;
|
|||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, AuthenticationError, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
use index_scheduler::task::TaskView;
|
||||
|
||||
use super::Pagination;
|
||||
|
||||
|
@ -48,10 +48,14 @@ pub async fn list_indexes(
|
|||
let nb_indexes = indexes.len();
|
||||
let iter = indexes
|
||||
.into_iter()
|
||||
.filter(|i| search_rules.is_index_authorized(&i.uid));
|
||||
.filter(|index| search_rules.is_index_authorized(&index.name));
|
||||
/*
|
||||
TODO: TAMO: implements me
|
||||
let ret = paginate
|
||||
.into_inner()
|
||||
.auto_paginate_unsized(nb_indexes, iter);
|
||||
*/
|
||||
let ret = todo!();
|
||||
|
||||
debug!("returns: {:?}", ret);
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
|
@ -70,9 +74,7 @@ pub async fn create_index(
|
|||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let IndexCreateRequest {
|
||||
primary_key, uid, ..
|
||||
} = body.into_inner();
|
||||
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
||||
|
||||
let allow_index_creation = meilisearch.filters().search_rules.is_index_authorized(&uid);
|
||||
if allow_index_creation {
|
||||
|
@ -82,8 +84,11 @@ pub async fn create_index(
|
|||
Some(&req),
|
||||
);
|
||||
|
||||
let update = Update::CreateIndex { primary_key };
|
||||
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
|
||||
let task = KindWithContent::IndexCreation {
|
||||
index_uid: uid,
|
||||
primary_key,
|
||||
};
|
||||
let task = meilisearch.register_task(task).await?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
} else {
|
||||
|
@ -118,7 +123,10 @@ pub async fn get_index(
|
|||
) -> Result<HttpResponse, ResponseError> {
|
||||
let meta = meilisearch.get_index(path.into_inner()).await?;
|
||||
debug!("returns: {:?}", meta);
|
||||
Ok(HttpResponse::Ok().json(meta))
|
||||
|
||||
// TODO: TAMO: do this as well
|
||||
todo!()
|
||||
// Ok(HttpResponse::Ok().json(meta))
|
||||
}
|
||||
|
||||
pub async fn update_index(
|
||||
|
@ -136,14 +144,12 @@ pub async fn update_index(
|
|||
Some(&req),
|
||||
);
|
||||
|
||||
let update = Update::UpdateIndex {
|
||||
let task = KindWithContent::IndexUpdate {
|
||||
index_uid: path.into_inner(),
|
||||
primary_key: body.primary_key,
|
||||
};
|
||||
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(path.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
let task = meilisearch.register_task(task).await?;
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
|
@ -153,9 +159,9 @@ pub async fn delete_index(
|
|||
meilisearch: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, MeiliSearch>,
|
||||
path: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let uid = path.into_inner();
|
||||
let update = Update::DeleteIndex;
|
||||
let task: SummarizedTaskView = meilisearch.register_update(uid, update).await?.into();
|
||||
let index_uid = path.into_inner();
|
||||
let task = KindWithContent::IndexDeletion { index_uid };
|
||||
let task = meilisearch.register_task(task).await?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_lib::index::{
|
||||
use index::{
|
||||
MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
DEFAULT_SEARCH_OFFSET,
|
||||
};
|
||||
use log::debug;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::Deserialize;
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
use log::debug;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use index::{Settings, Unchecked};
|
||||
use index_scheduler::KindWithContent;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
|
@ -18,34 +17,33 @@ macro_rules! make_setting_route {
|
|||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||
use log::debug;
|
||||
|
||||
use index::Settings;
|
||||
use index_scheduler::KindWithContent;
|
||||
use meilisearch_lib::milli::update::Setting;
|
||||
use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use $crate::analytics::Analytics;
|
||||
use $crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use $crate::extractors::sequential_extractor::SeqHandler;
|
||||
use $crate::task::SummarizedTaskView;
|
||||
|
||||
pub async fn delete(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = Settings {
|
||||
let new_settings = Settings {
|
||||
$attr: Setting::Reset,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||
let update = Update::Settings {
|
||||
settings,
|
||||
let task = KindWithContent::Settings {
|
||||
index_uid: index_uid.into_inner(),
|
||||
new_settings,
|
||||
is_deletion: true,
|
||||
allow_index_creation,
|
||||
};
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(index_uid.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
let task = meilisearch.register_task(task).await?;
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
|
@ -62,7 +60,7 @@ macro_rules! make_setting_route {
|
|||
|
||||
$analytics(&body, &req);
|
||||
|
||||
let settings = Settings {
|
||||
let new_settings = Settings {
|
||||
$attr: match body {
|
||||
Some(inner_body) => Setting::Set(inner_body),
|
||||
None => Setting::Reset,
|
||||
|
@ -71,15 +69,13 @@ macro_rules! make_setting_route {
|
|||
};
|
||||
|
||||
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||
let update = Update::Settings {
|
||||
settings,
|
||||
let task = KindWithContent::Settings {
|
||||
index_uid: index_uid.into_inner(),
|
||||
new_settings,
|
||||
is_deletion: false,
|
||||
allow_index_creation,
|
||||
};
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(index_uid.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
let task = meilisearch.register_task(task).await?;
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
|
@ -89,7 +85,9 @@ macro_rules! make_setting_route {
|
|||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, MeiliSearch>,
|
||||
index_uid: actix_web::web::Path<String>,
|
||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||
let settings = meilisearch.settings(index_uid.into_inner()).await?;
|
||||
let index = meilisearch.get_index(index_uid.into_inner()).await?;
|
||||
let settings = index.settings()?;
|
||||
|
||||
debug!("returns: {:?}", settings);
|
||||
let mut json = serde_json::json!(&settings);
|
||||
let val = json[$camelcase_attr].take();
|
||||
|
@ -175,11 +173,11 @@ make_setting_route!(
|
|||
make_setting_route!(
|
||||
"/typo-tolerance",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::TypoSettings,
|
||||
index::updates::TypoSettings,
|
||||
typo_tolerance,
|
||||
"typoTolerance",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::TypoSettings>, req: &HttpRequest| {
|
||||
|setting: &Option<index::updates::TypoSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
|
@ -285,11 +283,11 @@ make_setting_route!(
|
|||
make_setting_route!(
|
||||
"/faceting",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::FacetingSettings,
|
||||
index::updates::FacetingSettings,
|
||||
faceting,
|
||||
"faceting",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::FacetingSettings>, req: &HttpRequest| {
|
||||
|setting: &Option<index::updates::FacetingSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
|
@ -307,11 +305,11 @@ make_setting_route!(
|
|||
make_setting_route!(
|
||||
"/pagination",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::PaginationSettings,
|
||||
index::updates::PaginationSettings,
|
||||
pagination,
|
||||
"pagination",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::PaginationSettings>, req: &HttpRequest| {
|
||||
|setting: &Option<index::updates::PaginationSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
|
@ -361,40 +359,40 @@ pub async fn update_all(
|
|||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = body.into_inner();
|
||||
let new_settings = body.into_inner();
|
||||
|
||||
analytics.publish(
|
||||
"Settings Updated".to_string(),
|
||||
json!({
|
||||
"ranking_rules": {
|
||||
"sort_position": settings.ranking_rules.as_ref().set().map(|sort| sort.iter().position(|s| s == "sort")),
|
||||
"sort_position": new_settings.ranking_rules.as_ref().set().map(|sort| sort.iter().position(|s| s == "sort")),
|
||||
},
|
||||
"searchable_attributes": {
|
||||
"total": settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
|
||||
"total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
|
||||
},
|
||||
"sortable_attributes": {
|
||||
"total": settings.sortable_attributes.as_ref().set().map(|sort| sort.len()),
|
||||
"has_geo": settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")),
|
||||
"total": new_settings.sortable_attributes.as_ref().set().map(|sort| sort.len()),
|
||||
"has_geo": new_settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"filterable_attributes": {
|
||||
"total": settings.filterable_attributes.as_ref().set().map(|filter| filter.len()),
|
||||
"has_geo": settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")),
|
||||
"total": new_settings.filterable_attributes.as_ref().set().map(|filter| filter.len()),
|
||||
"has_geo": new_settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"typo_tolerance": {
|
||||
"enabled": settings.typo_tolerance
|
||||
"enabled": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.enabled.as_ref().set())
|
||||
.copied(),
|
||||
"disable_on_attributes": settings.typo_tolerance
|
||||
"disable_on_attributes": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": settings.typo_tolerance
|
||||
"disable_on_words": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": settings.typo_tolerance
|
||||
"min_word_size_for_one_typo": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
|
@ -402,7 +400,7 @@ pub async fn update_all(
|
|||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": settings.typo_tolerance
|
||||
"min_word_size_for_two_typos": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
|
@ -412,13 +410,13 @@ pub async fn update_all(
|
|||
.flatten(),
|
||||
},
|
||||
"faceting": {
|
||||
"max_values_per_facet": settings.faceting
|
||||
"max_values_per_facet": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_values_per_facet.as_ref().set()),
|
||||
},
|
||||
"pagination": {
|
||||
"max_total_hits": settings.pagination
|
||||
"max_total_hits": new_settings.pagination
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_total_hits.as_ref().set()),
|
||||
|
@ -428,45 +426,42 @@ pub async fn update_all(
|
|||
);
|
||||
|
||||
let allow_index_creation = meilisearch.filters().allow_index_creation;
|
||||
let update = Update::Settings {
|
||||
settings,
|
||||
let task = KindWithContent::Settings {
|
||||
index_uid: index_uid.into_inner(),
|
||||
new_settings,
|
||||
is_deletion: false,
|
||||
allow_index_creation,
|
||||
};
|
||||
let task: SummarizedTaskView = meilisearch
|
||||
.register_update(index_uid.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
let task = meilisearch.register_task(task).await?;
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
pub async fn get_all(
|
||||
data: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, MeiliSearch>,
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = data.settings(index_uid.into_inner()).await?;
|
||||
debug!("returns: {:?}", settings);
|
||||
Ok(HttpResponse::Ok().json(settings))
|
||||
let index = meilisearch.get_index(index_uid.into_inner()).await?;
|
||||
let new_settings = index.settings()?;
|
||||
debug!("returns: {:?}", new_settings);
|
||||
Ok(HttpResponse::Ok().json(new_settings))
|
||||
}
|
||||
|
||||
pub async fn delete_all(
|
||||
data: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = Settings::cleared().into_unchecked();
|
||||
let new_settings = Settings::cleared().into_unchecked();
|
||||
|
||||
let allow_index_creation = data.filters().allow_index_creation;
|
||||
let update = Update::Settings {
|
||||
settings,
|
||||
let task = KindWithContent::Settings {
|
||||
index_uid: index_uid.into_inner(),
|
||||
new_settings,
|
||||
is_deletion: true,
|
||||
allow_index_creation,
|
||||
};
|
||||
let task: SummarizedTaskView = data
|
||||
.register_update(index_uid.into_inner(), update)
|
||||
.await?
|
||||
.into();
|
||||
let task = data.register_task(task).await?;
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
|
|
|
@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
|
|||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use index::{Settings, Unchecked};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId};
|
||||
use meilisearch_lib::tasks::TaskFilter;
|
||||
use index_scheduler::TaskId;
|
||||
use index_scheduler::{Kind, Status};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
|
@ -12,7 +12,6 @@ use serde_json::json;
|
|||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::{TaskListView, TaskStatus, TaskType, TaskView};
|
||||
|
||||
use super::fold_star_or;
|
||||
|
||||
|
@ -27,8 +26,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct TasksFilterQuery {
|
||||
#[serde(rename = "type")]
|
||||
type_: Option<CS<StarOr<TaskType>>>,
|
||||
status: Option<CS<StarOr<TaskStatus>>>,
|
||||
type_: Option<CS<StarOr<Kind>>>,
|
||||
status: Option<CS<StarOr<Status>>>,
|
||||
index_uid: Option<CS<StarOr<IndexUid>>>,
|
||||
#[serde(default = "DEFAULT_LIMIT")]
|
||||
limit: usize,
|
||||
|
@ -92,65 +91,43 @@ async fn get_tasks(
|
|||
Some(&req),
|
||||
);
|
||||
|
||||
let mut filters = index_scheduler::Query::default();
|
||||
|
||||
// Then we filter on potential indexes and make sure that the search filter
|
||||
// restrictions are also applied.
|
||||
let indexes_filters = match index_uid {
|
||||
match index_uid {
|
||||
Some(indexes) => {
|
||||
let mut filters = TaskFilter::default();
|
||||
for name in indexes {
|
||||
if search_rules.is_index_authorized(&name) {
|
||||
filters.filter_index(name.to_string());
|
||||
filters = filters.with_index(name.to_string());
|
||||
}
|
||||
}
|
||||
Some(filters)
|
||||
}
|
||||
None => {
|
||||
if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
if !search_rules.is_index_authorized("*") {
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
filters = filters.with_index(index.to_string());
|
||||
}
|
||||
Some(filters)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Then we complete the task filter with other potential status and types filters.
|
||||
let filters = if type_.is_some() || status.is_some() {
|
||||
let mut filters = indexes_filters.unwrap_or_default();
|
||||
filters.filter_fn(Box::new(move |task| {
|
||||
let matches_type = match &type_ {
|
||||
Some(types) => types
|
||||
.iter()
|
||||
.any(|t| task_type_matches_content(t, &task.content)),
|
||||
None => true,
|
||||
};
|
||||
if let Some(kinds) = type_ {
|
||||
for kind in kinds {
|
||||
filters = filters.with_kind(kind);
|
||||
}
|
||||
}
|
||||
|
||||
let matches_status = match &status {
|
||||
Some(statuses) => statuses
|
||||
.iter()
|
||||
.any(|t| task_status_matches_events(t, &task.events)),
|
||||
None => true,
|
||||
};
|
||||
|
||||
matches_type && matches_status
|
||||
}));
|
||||
Some(filters)
|
||||
} else {
|
||||
indexes_filters
|
||||
};
|
||||
if let Some(statuses) = status {
|
||||
for status in statuses {
|
||||
filters = filters.with_status(status);
|
||||
}
|
||||
}
|
||||
|
||||
// We +1 just to know if there is more after this "page" or not.
|
||||
let limit = limit.saturating_add(1);
|
||||
|
||||
let mut tasks_results: Vec<_> = meilisearch
|
||||
.list_tasks(filters, Some(limit), from)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect();
|
||||
let mut tasks_results: Vec<_> = meilisearch.list_tasks(filters).await?.into_iter().collect();
|
||||
|
||||
// If we were able to fetch the number +1 tasks we asked
|
||||
// it means that there is more to come.
|
||||
|
@ -162,12 +139,13 @@ async fn get_tasks(
|
|||
|
||||
let from = tasks_results.first().map(|t| t.uid);
|
||||
|
||||
let tasks = TaskListView {
|
||||
results: tasks_results,
|
||||
limit: limit.saturating_sub(1),
|
||||
from,
|
||||
next,
|
||||
};
|
||||
// TODO: TAMO: define a structure to represent this type
|
||||
let tasks = json!({
|
||||
"results": tasks_results,
|
||||
"limit": limit.saturating_sub(1),
|
||||
"from": from,
|
||||
"next": next,
|
||||
});
|
||||
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
|
@ -185,20 +163,17 @@ async fn get_task(
|
|||
);
|
||||
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let filters = if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
let mut filters = index_scheduler::Query::default();
|
||||
if !search_rules.is_index_authorized("*") {
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
filters = filters.with_index(index);
|
||||
}
|
||||
Some(filters)
|
||||
};
|
||||
}
|
||||
|
||||
let task: TaskView = meilisearch
|
||||
.get_task(task_id.into_inner(), filters)
|
||||
.await?
|
||||
.into();
|
||||
filters.limit = 1;
|
||||
filters.from = Some(*task_id);
|
||||
|
||||
let task = meilisearch.list_tasks(filters).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue