fix the tests

This commit is contained in:
Tamo 2025-01-13 16:17:50 +01:00
parent 27155f845c
commit 8ff15b3dfb
No known key found for this signature in database
GPG Key ID: 20CD8020AFA88D69
6 changed files with 88 additions and 101 deletions

View File

@ -143,23 +143,19 @@ async fn list_batches_status_filtered() {
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
// We can't be sure that the update isn't already processed so we can't test this
// let (response, code) = index.filtered_batches(&[], &["processing"]).await;
// assert_eq!(code, 200, "{}", response);
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index.filtered_batches(&[], &["succeeded", "failed"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
@ -176,9 +172,13 @@ async fn list_batches_type_filtered() {
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) =
index.filtered_batches(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await;
index.filtered_batches(&["indexCreation", "IndexDeletion"], &[], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.filtered_batches(&["indexCreation"], &[], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
}
#[actix_rt::test]
@ -344,7 +344,7 @@ async fn test_summarized_delete_documents_by_batch() {
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -414,7 +414,7 @@ async fn test_summarized_delete_documents_by_filter() {
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -448,7 +448,7 @@ async fn test_summarized_delete_documents_by_filter() {
index.create(None).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(2).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -519,13 +519,12 @@ async fn test_summarized_delete_document_by_id() {
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.delete_document(1).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(batch,
@r#"
{
"uid": 0,
"uid": "[uid]",
"progress": null,
"details": {
"providedIds": 1,
@ -672,7 +671,7 @@ async fn test_summarized_index_creation() {
"#);
let (task, _status_code) = index.create(Some("doggos")).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -707,7 +706,7 @@ async fn test_summarized_index_deletion() {
let server = Server::new().await;
let index = server.index("test");
let (ret, _code) = index.delete().await;
let batch = index.wait_task(ret.uid()).await;
let batch = index.wait_task(ret.uid()).await.failed();
snapshot!(batch,
@r###"
{
@ -738,7 +737,7 @@ async fn test_summarized_index_deletion() {
// both batches may get autobatched and the deleted documents count will be wrong.
let (ret, _code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
let batch = index.wait_task(ret.uid()).await;
let batch = index.wait_task(ret.uid()).await.succeeded();
snapshot!(batch,
@r###"
{
@ -761,7 +760,7 @@ async fn test_summarized_index_deletion() {
"###);
let (ret, _code) = index.delete().await;
let batch = index.wait_task(ret.uid()).await;
let batch = index.wait_task(ret.uid()).await.succeeded();
snapshot!(batch,
@r###"
{
@ -784,7 +783,7 @@ async fn test_summarized_index_deletion() {
// What happens when you delete an index that doesn't exists.
let (ret, _code) = index.delete().await;
let batch = index.wait_task(ret.uid()).await;
let batch = index.wait_task(ret.uid()).await.failed();
snapshot!(batch,
@r###"
{
@ -817,7 +816,7 @@ async fn test_summarized_index_update() {
let index = server.index("test");
// If the index doesn't exist yet, we should get errors with or without the primary key.
let (task, _status_code) = index.update(None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -845,7 +844,7 @@ async fn test_summarized_index_update() {
"#);
let (task, _status_code) = index.update(Some("bones")).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -944,7 +943,7 @@ async fn test_summarized_index_swap() {
{ "indexes": ["doggos", "cattos"] }
]))
.await;
server.wait_task(task.uid()).await;
server.wait_task(task.uid()).await.failed();
let (batch, _) = server.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -985,33 +984,26 @@ async fn test_summarized_index_swap() {
{ "indexes": ["doggos", "cattos"] }
]))
.await;
server.wait_task(task.uid()).await;
server.wait_task(task.uid()).await.succeeded();
let (batch, _) = server.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r#"
{
"uid": 3,
"uid": 1,
"progress": null,
"details": {
"swaps": [
{
"indexes": [
"doggos",
"cattos"
]
}
]
},
"details": {},
"stats": {
"totalNbTasks": 1,
"status": {
"succeeded": 1
},
"types": {
"indexSwap": 1
"indexCreation": 1
},
"indexUids": {}
"indexUids": {
"doggos": 1
}
},
"duration": "[duration]",
"startedAt": "[date]",

View File

@ -46,11 +46,11 @@ impl Value {
// Panic if the json doesn't contain the `status` field set to "succeeded"
#[track_caller]
pub fn succeeded(&self) -> &Self {
pub fn succeeded(&self) -> Self {
if !self.is_success() {
panic!("Called succeeded on {}", serde_json::to_string_pretty(&self.0).unwrap());
}
self
self.clone()
}
/// Return `true` if the `status` field is set to `failed`.
@ -65,11 +65,11 @@ impl Value {
// Panic if the json doesn't contain the `status` field set to "succeeded"
#[track_caller]
pub fn failed(&self) -> &Self {
pub fn failed(&self) -> Self {
if !self.is_fail() {
panic!("Called failed on {}", serde_json::to_string_pretty(&self.0).unwrap());
}
self
self.clone()
}
}

View File

@ -1274,8 +1274,8 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(1).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1311,7 +1311,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1348,7 +1348,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1390,8 +1390,8 @@ async fn error_add_documents_missing_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(1).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1439,7 +1439,7 @@ async fn error_document_field_limit_reached_in_one_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@ -1741,8 +1741,8 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(2).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1779,8 +1779,8 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(3).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1817,8 +1817,8 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(4).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1855,7 +1855,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1893,7 +1893,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1931,7 +1931,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1969,7 +1969,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2007,7 +2007,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2045,7 +2045,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2083,7 +2083,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2121,7 +2121,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2159,7 +2159,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2200,7 +2200,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2237,7 +2237,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2274,7 +2274,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2318,7 +2318,7 @@ async fn add_invalid_geo_and_then_settings() {
]);
let (ret, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await;
let ret = index.wait_task(ret.uid()).await.succeeded();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2341,7 +2341,7 @@ async fn add_invalid_geo_and_then_settings() {
let (ret, code) = index.update_settings(json!({ "sortableAttributes": ["_geo"] })).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await;
let ret = index.wait_task(ret.uid()).await.failed();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2409,8 +2409,8 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(0).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2450,7 +2450,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);

View File

@ -84,7 +84,6 @@ async fn clear_all_documents_empty_index() {
let _update = index.wait_task(task.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
index.wait_task(response.uid()).await.succeeded();
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
}

View File

@ -115,7 +115,7 @@ async fn create_index_with_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.succeeded();
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -130,8 +130,7 @@ async fn create_index_with_invalid_primary_key() {
let index = server.unique_index();
let (response, code) = index.add_documents(documents, Some("title")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@ -141,8 +140,7 @@ async fn create_index_with_invalid_primary_key() {
let (response, code) = index.add_documents(documents, Some("id")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);

View File

@ -151,23 +151,19 @@ async fn list_tasks_status_filtered() {
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
// We can't be sure that the update isn't already processed so we can't test this
// let (response, code) = index.filtered_tasks(&[], &["processing"]).await;
// assert_eq!(code, 200, "{}", response);
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
@ -336,7 +332,7 @@ async fn test_summarized_delete_documents_by_batch() {
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(0).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -401,7 +397,7 @@ async fn test_summarized_delete_documents_by_filter() {
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -434,7 +430,7 @@ async fn test_summarized_delete_documents_by_filter() {
index.create(None).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -498,7 +494,7 @@ async fn test_summarized_delete_document_by_id() {
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.delete_document(1).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -636,7 +632,7 @@ async fn test_summarized_index_creation() {
"###);
let (task, _status_code) = index.create(Some("doggos")).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -780,7 +776,7 @@ async fn test_summarized_index_update() {
let index = server.index("test");
// If the index doesn't exist yet, we should get errors with or without the primary key.
let (task, _status_code) = index.update(None).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -809,7 +805,7 @@ async fn test_summarized_index_update() {
"###);
let (task, _status_code) = index.update(Some("bones")).await;
index.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -897,7 +893,7 @@ async fn test_summarized_index_swap() {
{ "indexes": ["doggos", "cattos"] }
]))
.await;
server.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.failed();
let (task, _) = server.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -932,9 +928,11 @@ async fn test_summarized_index_swap() {
}
"###);
server.index("doggos").create(None).await;
let (task, _status_code) = server.index("cattos").create(None).await;
server
let (task, _code) = server.index("doggos").create(None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = server.index("cattos").create(None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = server
.index_swap(json!([
{ "indexes": ["doggos", "cattos"] }
]))