mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 12:27:13 +02:00
#4840 - Partial fix - Confirm task success after waiting for it - continued, few missing cases - batch 2
This commit is contained in:
parent
15062e7dba
commit
d7cb319217
14 changed files with 188 additions and 188 deletions
|
@ -278,7 +278,7 @@ async fn test_summarized_document_addition_or_update() {
|
|||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task,_status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -309,7 +309,7 @@ async fn test_summarized_document_addition_or_update() {
|
|||
"#);
|
||||
|
||||
let (task,_status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -345,7 +345,7 @@ async fn test_summarized_delete_documents_by_batch() {
|
|||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task,_status_code) = index.delete_batch(vec![1, 2, 3]).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -377,7 +377,7 @@ async fn test_summarized_delete_documents_by_batch() {
|
|||
|
||||
index.create(None).await;
|
||||
let (task,_status_code) = index.delete_batch(vec![42]).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(2).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -414,7 +414,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
|||
let index = server.index("test");
|
||||
|
||||
let (task,_status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -447,7 +447,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
|||
|
||||
index.create(None).await;
|
||||
let (task,_status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(2).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -480,7 +480,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
|||
|
||||
index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await;
|
||||
let (task,_status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(4).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -517,7 +517,7 @@ async fn test_summarized_delete_document_by_id() {
|
|||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task,_status_code) = index.delete_document(1).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -549,7 +549,7 @@ async fn test_summarized_delete_document_by_id() {
|
|||
|
||||
index.create(None).await;
|
||||
let (task,_status_code) = index.delete_document(42).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(2).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -597,7 +597,7 @@ async fn test_summarized_settings_update() {
|
|||
"###);
|
||||
|
||||
let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -642,7 +642,7 @@ async fn test_summarized_index_creation() {
|
|||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -670,7 +670,7 @@ async fn test_summarized_index_creation() {
|
|||
"#);
|
||||
|
||||
let (task,_status_code) = index.create(Some("doggos")).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -815,7 +815,7 @@ async fn test_summarized_index_update() {
|
|||
let index = server.index("test");
|
||||
// If the index doesn't exist yet, we should get errors with or without the primary key.
|
||||
let (task,_status_code) = index.update(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -843,7 +843,7 @@ async fn test_summarized_index_update() {
|
|||
"#);
|
||||
|
||||
let (task,_status_code) = index.update(Some("bones")).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -876,7 +876,7 @@ async fn test_summarized_index_update() {
|
|||
index.create(None).await;
|
||||
|
||||
let (task,_status_code) = index.update(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(3).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -904,7 +904,7 @@ async fn test_summarized_index_update() {
|
|||
"#);
|
||||
|
||||
let (task,_status_code) = index.update(Some("bones")).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(4).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -1024,9 +1024,9 @@ async fn test_summarized_batch_cancelation() {
|
|||
let index = server.index("doggos");
|
||||
// to avoid being flaky we're only going to cancel an already finished batch :(
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = server.cancel_tasks("uids=0").await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
//TODO: create a get_batch function interface that accepts u64, and remove the following cast.
|
||||
let (batch, _) = index.get_batch(task.uid().to_u32().unwrap()).await;
|
||||
assert_json_snapshot!(batch,
|
||||
|
@ -1063,9 +1063,9 @@ async fn test_summarized_batch_deletion() {
|
|||
let index = server.index("doggos");
|
||||
// to avoid being flaky we're only going to delete an already finished batch :(
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = server.delete_tasks("uids=0").await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue