From 3f80468f1827c537c0a578cc9d3f8c612203e809 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 25 May 2022 12:05:24 +0200 Subject: [PATCH] Rename the Tasks Types --- meilisearch-http/src/routes/tasks.rs | 32 ++----- meilisearch-http/src/task.rs | 94 +++++++++---------- .../tests/documents/add_documents.rs | 10 +- meilisearch-http/tests/dumps/mod.rs | 18 ++-- meilisearch-http/tests/tasks/mod.rs | 17 ++-- 5 files changed, 75 insertions(+), 96 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 02f700ccd..66f4bbbdb 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -1,7 +1,6 @@ use actix_web::{web, HttpRequest, HttpResponse}; use meilisearch_error::ResponseError; -use meilisearch_lib::milli::update::IndexDocumentsMethod; -use meilisearch_lib::tasks::task::{DocumentDeletion, TaskContent, TaskEvent, TaskId}; +use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId}; use meilisearch_lib::tasks::TaskFilter; use meilisearch_lib::{IndexUid, MeiliSearch}; use serde::Deserialize; @@ -30,34 +29,23 @@ pub struct TaskFilterQuery { #[rustfmt::skip] fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool { matches!((type_, content), - (TaskType::IndexCreation, TaskContent::IndexCreation { .. }) + (TaskType::IndexCreation, TaskContent::IndexCreation { .. }) | (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. }) | (TaskType::IndexDeletion, TaskContent::IndexDeletion) - | (TaskType::DocumentAddition, TaskContent::DocumentAddition { - merge_strategy: IndexDocumentsMethod::ReplaceDocuments, - .. - }) - | (TaskType::DocumentPartial, TaskContent::DocumentAddition { - merge_strategy: IndexDocumentsMethod::UpdateDocuments, - .. - }) - | (TaskType::DocumentDeletion, TaskContent::DocumentDeletion(DocumentDeletion::Ids(_))) + | (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. }) + | (TaskType::DocumentDeletion, TaskContent::DocumentDeletion(_)) | (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. }) - | (TaskType::ClearAll, TaskContent::DocumentDeletion(DocumentDeletion::Clear)) ) } +#[rustfmt::skip] fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool { events.last().map_or(false, |event| { - matches!( - (status, event), - (TaskStatus::Enqueued, TaskEvent::Created(_)) - | ( - TaskStatus::Processing, - TaskEvent::Processing(_) | TaskEvent::Batched { .. } - ) - | (TaskStatus::Succeeded, TaskEvent::Succeded { .. }) - | (TaskStatus::Failed, TaskEvent::Failed { .. }), + matches!((status, event), + (TaskStatus::Enqueued, TaskEvent::Created(_)) + | (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. }) + | (TaskStatus::Succeeded, TaskEvent::Succeded { .. }) + | (TaskStatus::Failed, TaskEvent::Failed { .. }), ) }) } diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 4ecb6cead..c7aaf0030 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -4,7 +4,6 @@ use std::write; use meilisearch_error::ResponseError; use meilisearch_lib::index::{Settings, Unchecked}; -use meilisearch_lib::milli::update::IndexDocumentsMethod; use meilisearch_lib::tasks::batch::BatchId; use meilisearch_lib::tasks::task::{ DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult, @@ -20,33 +19,22 @@ pub enum TaskType { IndexCreation, IndexUpdate, IndexDeletion, - DocumentAddition, - DocumentPartial, + DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, - ClearAll, DumpCreation, } impl From for TaskType { fn from(other: TaskContent) -> Self { match other { - TaskContent::DocumentAddition { - merge_strategy: IndexDocumentsMethod::ReplaceDocuments, - .. - } => TaskType::DocumentAddition, - TaskContent::DocumentAddition { - merge_strategy: IndexDocumentsMethod::UpdateDocuments, - .. - } => TaskType::DocumentPartial, - TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll, - TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion, - TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate, - TaskContent::IndexDeletion => TaskType::IndexDeletion, TaskContent::IndexCreation { .. } => TaskType::IndexCreation, TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, + TaskContent::IndexDeletion => TaskType::IndexDeletion, + TaskContent::DocumentAddition { .. } => TaskType::DocumentAdditionOrUpdate, + TaskContent::DocumentDeletion(_) => TaskType::DocumentDeletion, + TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate, TaskContent::Dump { .. } => TaskType::DumpCreation, - _ => unreachable!("unexpected task type"), } } } @@ -55,21 +43,27 @@ impl FromStr for TaskType { type Err = String; fn from_str(status: &str) -> Result { - match status { - "indexCreation" => Ok(TaskType::IndexCreation), - "indexUpdate" => Ok(TaskType::IndexUpdate), - "indexDeletion" => Ok(TaskType::IndexDeletion), - "documentAddition" => Ok(TaskType::DocumentAddition), - "documentPartial" => Ok(TaskType::DocumentPartial), - "documentDeletion" => Ok(TaskType::DocumentDeletion), - "settingsUpdate" => Ok(TaskType::SettingsUpdate), - "clearAll" => Ok(TaskType::ClearAll), - unknown => Err(format!( - "invalid task type `{}` value, expecting one of: \ - indexCreation, indexUpdate, indexDeletion, documentAddition, \ - documentPartial, documentDeletion, settingsUpdate, or clearAll", - unknown - )), + if status.eq_ignore_ascii_case("indexCreation") { + Ok(TaskType::IndexCreation) + } else if status.eq_ignore_ascii_case("indexUpdate") { + Ok(TaskType::IndexUpdate) + } else if status.eq_ignore_ascii_case("indexDeletion") { + Ok(TaskType::IndexDeletion) + } else if status.eq_ignore_ascii_case("documentAdditionOrUpdate") { + Ok(TaskType::DocumentAdditionOrUpdate) + } else if status.eq_ignore_ascii_case("documentDeletion") { + Ok(TaskType::DocumentDeletion) + } else if status.eq_ignore_ascii_case("settingsUpdate") { + Ok(TaskType::SettingsUpdate) + } else if status.eq_ignore_ascii_case("dumpCreation") { + Ok(TaskType::DumpCreation) + } else { + Err(format!( + "invalid task type `{}`, expecting one of: \ + indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \ + documentDeletion, settingsUpdate, dumpCreation", + status + )) } } } @@ -87,16 +81,20 @@ impl FromStr for TaskStatus { type Err = String; fn from_str(status: &str) -> Result { - match status { - "enqueued" => Ok(TaskStatus::Enqueued), - "processing" => Ok(TaskStatus::Processing), - "succeeded" => Ok(TaskStatus::Succeeded), - "failed" => Ok(TaskStatus::Failed), - unknown => Err(format!( - "invalid task status `{}` value, expecting one of: \ + if status.eq_ignore_ascii_case("enqueued") { + Ok(TaskStatus::Enqueued) + } else if status.eq_ignore_ascii_case("processing") { + Ok(TaskStatus::Processing) + } else if status.eq_ignore_ascii_case("succeeded") { + Ok(TaskStatus::Succeeded) + } else if status.eq_ignore_ascii_case("failed") { + Ok(TaskStatus::Failed) + } else { + Err(format!( + "invalid task status `{}`, expecting one of: \ enqueued, processing, succeeded, or failed", - unknown - )), + status, + )) } } } @@ -214,22 +212,14 @@ impl From for TaskView { let (task_type, mut details) = match content { TaskContent::DocumentAddition { - merge_strategy, - documents_count, - .. + documents_count, .. } => { let details = TaskDetails::DocumentAddition { received_documents: documents_count, indexed_documents: None, }; - let task_type = match merge_strategy { - IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial, - IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition, - _ => unreachable!("Unexpected document merge strategy."), - }; - - (task_type, Some(details)) + (TaskType::DocumentAdditionOrUpdate, Some(details)) } TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => ( TaskType::DocumentDeletion, @@ -239,7 +229,7 @@ impl From for TaskView { }), ), TaskContent::DocumentDeletion(DocumentDeletion::Clear) => ( - TaskType::ClearAll, + TaskType::DocumentDeletion, Some(TaskDetails::ClearAll { deleted_documents: None, }), diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index 238df6332..ab271ce18 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -615,7 +615,7 @@ async fn add_documents_no_index_creation() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentAddition"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1); @@ -685,7 +685,7 @@ async fn document_addition_with_primary_key() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentAddition"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1); @@ -714,7 +714,7 @@ async fn document_update_with_primary_key() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentPartial"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["indexedDocuments"], 1); assert_eq!(response["details"]["receivedDocuments"], 1); @@ -818,7 +818,7 @@ async fn add_larger_dataset() { let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); - assert_eq!(response["type"], "documentAddition"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["indexedDocuments"], 77); assert_eq!(response["details"]["receivedDocuments"], 77); let (response, code) = index @@ -840,7 +840,7 @@ async fn update_larger_dataset() { index.wait_task(0).await; let (response, code) = index.get_task(0).await; assert_eq!(code, 200); - assert_eq!(response["type"], "documentPartial"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["indexedDocuments"], 77); let (response, code) = index .get_all_documents(GetAllDocumentsOptions { diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 22625f17f..6d6e6494a 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -69,7 +69,7 @@ async fn import_dump_v2_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) ); // finally we're just going to check that we can still get a few documents by id @@ -134,7 +134,7 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) ); // finally we're just going to check that we can still get a few documents by id @@ -199,7 +199,7 @@ async fn import_dump_v2_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); assert_eq!( tasks["results"][92], @@ -268,7 +268,7 @@ async fn import_dump_v3_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) ); // finally we're just going to check that we can still get a few documents by id @@ -333,7 +333,7 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) ); // finally we're just going to check that we can still get a few documents by id @@ -398,7 +398,7 @@ async fn import_dump_v3_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); assert_eq!( tasks["results"][92], @@ -467,7 +467,7 @@ async fn import_dump_v4_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) ); // finally we're just going to check that we can still get a few documents by id @@ -532,7 +532,7 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) ); // finally we're just going to check that we can still get a few documents by id @@ -597,7 +597,7 @@ async fn import_dump_v4_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); assert_eq!( tasks["results"][92], diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index 300cddde7..80bf6cb3d 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -76,9 +76,10 @@ async fn list_tasks_status_filtered() { assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 1); - let (response, code) = index.filtered_tasks(&[], &["processing"]).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(response["results"].as_array().unwrap().len(), 1); + // We can't be sure that the update isn't already processed so we can't test this + // let (response, code) = index.filtered_tasks(&[], &["processing"]).await; + // assert_eq!(code, 200, "{}", response); + // assert_eq!(response["results"].as_array().unwrap().len(), 1); index.wait_task(1).await; @@ -105,7 +106,7 @@ async fn list_tasks_type_filtered() { assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index - .filtered_tasks(&["indexCreation", "documentAddition"], &[]) + .filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[]) .await; assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 2); @@ -130,7 +131,7 @@ async fn list_tasks_status_and_type_filtered() { let (response, code) = index .filtered_tasks( - &["indexCreation", "documentAddition"], + &["indexCreation", "documentAdditionOrUpdate"], &["succeeded", "processing"], ) .await; @@ -166,16 +167,16 @@ async fn test_summarized_task_view() { assert_valid_summarized_task!(response, "settingsUpdate", "test"); let (response, _) = index.update_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentPartial", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); let (response, _) = index.add_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentAddition", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); let (response, _) = index.delete_document(1).await; assert_valid_summarized_task!(response, "documentDeletion", "test"); let (response, _) = index.clear_all_documents().await; - assert_valid_summarized_task!(response, "clearAll", "test"); + assert_valid_summarized_task!(response, "documentDeletion", "test"); let (response, _) = index.delete().await; assert_valid_summarized_task!(response, "indexDeletion", "test");