mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 11:57:07 +02:00
#4840 - Partial fix - Remove hard coded task ids to prevent flaky tests.
# Conflicts: # crates/meilisearch/tests/documents/add_documents.rs # crates/meilisearch/tests/search/facet_search.rs # crates/meilisearch/tests/settings/get_settings.rs # crates/meilisearch/tests/snapshot/mod.rs
This commit is contained in:
parent
fc23a0ee52
commit
91c7ef8723
24 changed files with 555 additions and 547 deletions
|
@ -1,5 +1,6 @@
|
|||
mod errors;
|
||||
|
||||
use byte_unit::rust_decimal::prelude::ToPrimitive;
|
||||
use meili_snap::insta::assert_json_snapshot;
|
||||
use meili_snap::snapshot;
|
||||
|
||||
|
@ -10,8 +11,8 @@ use crate::json;
|
|||
async fn error_get_unexisting_batch_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_coder) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (response, code) = index.get_batch(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
|
@ -29,8 +30,8 @@ async fn error_get_unexisting_batch_status() {
|
|||
async fn get_batch_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (_response, code) = index.get_batch(0).await;
|
||||
assert_eq!(code, 200);
|
||||
}
|
||||
|
@ -39,8 +40,8 @@ async fn get_batch_status() {
|
|||
async fn list_batches() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
@ -141,9 +142,9 @@ async fn list_batches_with_star_filters() {
|
|||
async fn list_batches_status_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (task,_status_code) = index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
||||
|
@ -156,7 +157,7 @@ async fn list_batches_status_filtered() {
|
|||
// assert_eq!(code, 200, "{}", response);
|
||||
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
index.wait_task(1).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
|
||||
let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
|
@ -167,17 +168,18 @@ async fn list_batches_status_filtered() {
|
|||
async fn list_batches_type_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.delete().await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_batches(&["indexCreation"], &[], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) =
|
||||
index.filtered_batches(&["indexCreation", "indexDeletion"], &[], &[]).await;
|
||||
index.filtered_batches(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
@ -186,8 +188,8 @@ async fn list_batches_type_filtered() {
|
|||
async fn list_batches_invalid_canceled_by_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
@ -201,8 +203,8 @@ async fn list_batches_invalid_canceled_by_filter() {
|
|||
async fn list_batches_status_and_type_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
@ -275,8 +277,8 @@ async fn list_batch_filter_error() {
|
|||
async fn test_summarized_document_addition_or_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -306,8 +308,8 @@ async fn test_summarized_document_addition_or_update() {
|
|||
}
|
||||
"#);
|
||||
|
||||
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
|
||||
index.wait_task(1).await;
|
||||
let (task,_status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -342,8 +344,8 @@ async fn test_summarized_document_addition_or_update() {
|
|||
async fn test_summarized_delete_documents_by_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.delete_batch(vec![1, 2, 3]).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.delete_batch(vec![1, 2, 3]).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -374,8 +376,8 @@ async fn test_summarized_delete_documents_by_batch() {
|
|||
"#);
|
||||
|
||||
index.create(None).await;
|
||||
index.delete_batch(vec![42]).await;
|
||||
index.wait_task(2).await;
|
||||
let (task,_status_code) = index.delete_batch(vec![42]).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(2).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -411,8 +413,8 @@ async fn test_summarized_delete_documents_by_filter() {
|
|||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -444,8 +446,8 @@ async fn test_summarized_delete_documents_by_filter() {
|
|||
"#);
|
||||
|
||||
index.create(None).await;
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(2).await;
|
||||
let (task,_status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(2).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -477,8 +479,8 @@ async fn test_summarized_delete_documents_by_filter() {
|
|||
"#);
|
||||
|
||||
index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await;
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(4).await;
|
||||
let (task,_status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(4).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -514,8 +516,8 @@ async fn test_summarized_delete_documents_by_filter() {
|
|||
async fn test_summarized_delete_document_by_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.delete_document(1).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.delete_document(1).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -546,8 +548,8 @@ async fn test_summarized_delete_document_by_id() {
|
|||
"#);
|
||||
|
||||
index.create(None).await;
|
||||
index.delete_document(42).await;
|
||||
index.wait_task(2).await;
|
||||
let (task,_status_code) = index.delete_document(42).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(2).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -594,8 +596,8 @@ async fn test_summarized_settings_update() {
|
|||
}
|
||||
"###);
|
||||
|
||||
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -639,8 +641,8 @@ async fn test_summarized_settings_update() {
|
|||
async fn test_summarized_index_creation() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -667,8 +669,8 @@ async fn test_summarized_index_creation() {
|
|||
}
|
||||
"#);
|
||||
|
||||
index.create(Some("doggos")).await;
|
||||
index.wait_task(1).await;
|
||||
let (task,_status_code) = index.create(Some("doggos")).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -812,8 +814,8 @@ async fn test_summarized_index_update() {
|
|||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
// If the index doesn't exist yet, we should get errors with or without the primary key.
|
||||
index.update(None).await;
|
||||
index.wait_task(0).await;
|
||||
let (task,_status_code) = index.update(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -840,8 +842,8 @@ async fn test_summarized_index_update() {
|
|||
}
|
||||
"#);
|
||||
|
||||
index.update(Some("bones")).await;
|
||||
index.wait_task(1).await;
|
||||
let (task,_status_code) = index.update(Some("bones")).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -873,8 +875,8 @@ async fn test_summarized_index_update() {
|
|||
// And run the same two tests once the index do exists.
|
||||
index.create(None).await;
|
||||
|
||||
index.update(None).await;
|
||||
index.wait_task(3).await;
|
||||
let (task,_status_code) = index.update(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(3).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -901,8 +903,8 @@ async fn test_summarized_index_update() {
|
|||
}
|
||||
"#);
|
||||
|
||||
index.update(Some("bones")).await;
|
||||
index.wait_task(4).await;
|
||||
let (task,_status_code) = index.update(Some("bones")).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(4).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -935,12 +937,12 @@ async fn test_summarized_index_update() {
|
|||
#[actix_web::test]
|
||||
async fn test_summarized_index_swap() {
|
||||
let server = Server::new().await;
|
||||
server
|
||||
let (task,_status_code) = server
|
||||
.index_swap(json!([
|
||||
{ "indexes": ["doggos", "cattos"] }
|
||||
]))
|
||||
.await;
|
||||
server.wait_task(0).await;
|
||||
server.wait_task(task.uid()).await;
|
||||
let (batch, _) = server.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -975,14 +977,14 @@ async fn test_summarized_index_swap() {
|
|||
"#);
|
||||
|
||||
server.index("doggos").create(None).await;
|
||||
server.index("cattos").create(None).await;
|
||||
let (task,_status_code) = server.index("cattos").create(None).await;
|
||||
server
|
||||
.index_swap(json!([
|
||||
{ "indexes": ["doggos", "cattos"] }
|
||||
]))
|
||||
.await;
|
||||
server.wait_task(3).await;
|
||||
let (batch, _) = server.get_batch(3).await;
|
||||
server.wait_task(task.uid()).await;
|
||||
let (batch, _) = server.get_batch(task.uid().to_u32().unwrap()).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
@r#"
|
||||
|
@ -1021,11 +1023,12 @@ async fn test_summarized_batch_cancelation() {
|
|||
let server = Server::new().await;
|
||||
let index = server.index("doggos");
|
||||
// to avoid being flaky we're only going to cancel an already finished batch :(
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
server.cancel_tasks("uids=0").await;
|
||||
index.wait_task(1).await;
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (task,_status_code) = server.cancel_tasks("uids=0").await;
|
||||
index.wait_task(task.uid()).await;
|
||||
//TODO: create a get_batch function interface that accepts u64, and remove the following cast.
|
||||
let (batch, _) = index.get_batch(task.uid().to_u32().unwrap()).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
@r#"
|
||||
|
@ -1059,10 +1062,10 @@ async fn test_summarized_batch_deletion() {
|
|||
let server = Server::new().await;
|
||||
let index = server.index("doggos");
|
||||
// to avoid being flaky we're only going to delete an already finished batch :(
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
server.delete_tasks("uids=0").await;
|
||||
index.wait_task(1).await;
|
||||
let (task,_status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (task,_status_code) = server.delete_tasks("uids=0").await;
|
||||
index.wait_task(task.uid()).await;
|
||||
let (batch, _) = index.get_batch(1).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
@ -1095,8 +1098,8 @@ async fn test_summarized_batch_deletion() {
|
|||
#[actix_web::test]
|
||||
async fn test_summarized_dump_creation() {
|
||||
let server = Server::new().await;
|
||||
server.create_dump().await;
|
||||
server.wait_task(0).await;
|
||||
let (task,_status_code) = server.create_dump().await;
|
||||
server.wait_task(task.uid()).await;
|
||||
let (batch, _) = server.get_batch(0).await;
|
||||
assert_json_snapshot!(batch,
|
||||
{ ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue