mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-29 16:24:26 +01:00
wip
This commit is contained in:
parent
c12a1cd956
commit
4b92f1b269
@ -4,13 +4,13 @@ use actix_web::http::header::CONTENT_TYPE;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
|
||||
use bstr::ByteSlice;
|
||||
use deserr::actix_web::AwebQueryParameter;
|
||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||
use deserr::Deserr;
|
||||
use futures::StreamExt;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
use meilisearch_types::deserr::DeserrQueryParamError;
|
||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
@ -71,8 +71,11 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.route(web::put().to(SeqHandler(update_documents)))
|
||||
.route(web::delete().to(SeqHandler(clear_all_documents))),
|
||||
)
|
||||
// this route needs to be before the /documents/{document_id} to match properly
|
||||
.service(web::resource("/delete-batch").route(web::post().to(SeqHandler(delete_documents))))
|
||||
// these routes needs to be before the /documents/{document_id} to match properly
|
||||
.service(
|
||||
web::resource("/delete-batch").route(web::post().to(SeqHandler(delete_documents_batch))),
|
||||
)
|
||||
.service(web::resource("/delete").route(web::post().to(SeqHandler(delete_documents_by_filter))))
|
||||
.service(
|
||||
web::resource("/{document_id}")
|
||||
.route(web::get().to(SeqHandler(get_document)))
|
||||
@ -373,59 +376,22 @@ async fn document_addition(
|
||||
Ok(task.into())
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum DocumentDeletionQuery {
|
||||
Ids(Vec<Value>),
|
||||
Object { filter: Option<Value> },
|
||||
}
|
||||
|
||||
pub async fn delete_documents(
|
||||
pub async fn delete_documents_batch(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
body: web::Json<DocumentDeletionQuery>,
|
||||
body: web::Json<Vec<Value>>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", body);
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let index_uid = index_uid.into_inner();
|
||||
|
||||
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
||||
|
||||
let ids = match body.into_inner() {
|
||||
DocumentDeletionQuery::Ids(body) => body
|
||||
let ids = body
|
||||
.iter()
|
||||
.map(|v| v.as_str().map(String::from).unwrap_or_else(|| v.to_string()))
|
||||
.collect(),
|
||||
DocumentDeletionQuery::Object { filter } => {
|
||||
debug!("filter: {:?}", filter);
|
||||
|
||||
// FIXME: spawn_blocking
|
||||
if let Some(mut filter) = filter {
|
||||
if let Some(facets) = crate::search::parse_filter(&filter)? {
|
||||
debug!("facets: {:?}", facets);
|
||||
|
||||
let task = KindWithContent::DocumentDeletionByFilter {
|
||||
index_uid: index_uid.to_string(),
|
||||
filter_expr: filter.take(),
|
||||
};
|
||||
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
||||
.await??
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
return Ok(HttpResponse::Accepted().json(task));
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
};
|
||||
.collect();
|
||||
|
||||
let task =
|
||||
KindWithContent::DocumentDeletion { index_uid: index_uid.to_string(), documents_ids: ids };
|
||||
@ -436,6 +402,45 @@ pub async fn delete_documents(
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct DocumentDeletionByFilter {
|
||||
// TODO: Update the error code to something more appropriate
|
||||
#[deserr(error = DeserrJsonError<InvalidDocumentOffset>)]
|
||||
filter: Value,
|
||||
}
|
||||
|
||||
pub async fn delete_documents_by_filter(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
body: AwebJson<DocumentDeletionByFilter, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
println!("here");
|
||||
debug!("called with params: {:?}", body);
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let index_uid = index_uid.into_inner();
|
||||
let filter = body.into_inner().filter;
|
||||
|
||||
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
||||
|
||||
debug!("filter: {:?}", filter);
|
||||
|
||||
// FIXME: spawn_blocking => tamo: but why, it's making zero IO and almost no allocation?
|
||||
// TODO: what should we do in case of an empty filter? Create a task that does nothing (then we should be able to store a None in the task queue)
|
||||
// or refuse the payload with a cool™️ error message 😎
|
||||
let _ = crate::search::parse_filter(&filter)?.expect("You can't send an empty filter");
|
||||
|
||||
let task = KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr: filter };
|
||||
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
return Ok(HttpResponse::Accepted().json(task));
|
||||
}
|
||||
|
||||
pub async fn clear_all_documents(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
|
@ -226,7 +226,7 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn delete_document_by_filter(&self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
|
||||
let url = format!("/indexes/{}/documents/delete", urlencode(self.uid.as_ref()));
|
||||
self.service.post_encoded(url, body, self.encoder).await
|
||||
}
|
||||
|
||||
|
@ -156,14 +156,13 @@ async fn delete_document_by_filter() {
|
||||
index.wait_task(1).await;
|
||||
let (response, code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
// snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
"message": "Missing fied `filter`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
}
|
||||
"###);
|
||||
|
||||
|
@ -429,10 +429,10 @@ async fn delete_document_by_filter() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Json deserialize error: data did not match any variant of untagged enum DocumentDeletionQuery",
|
||||
"code": "bad_request",
|
||||
"message": "Invalid syntax for the filter parameter: `expected String, Array, found: null`.",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"###);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user