From 38b1c57fa80560685ff422e44196d05621c6afcd Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 May 2025 16:03:44 +0300 Subject: [PATCH 01/11] Faster IT tests for add_documents.rs Use Shared server where possible Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/documents/add_documents.rs | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 6569bb9a5..2c8925833 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -18,8 +18,8 @@ async fn add_documents_test_json_content_types() { } ]); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -75,8 +75,8 @@ async fn add_single_document_test_json_content_types() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -132,8 +132,8 @@ async fn add_single_document_gzip_encoded() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post let document = serde_json::to_string(&document).unwrap(); @@ -187,8 +187,8 @@ async fn add_single_document_gzip_encoded() { async fn add_single_document_gzip_encoded_with_incomplete_error() { let document = json!("kefir"); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post let document = serde_json::to_string(&document).unwrap(); @@ -244,8 +244,8 @@ async fn add_single_document_with_every_encoding() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post let document = serde_json::to_string(&document).unwrap(); @@ -518,7 +518,7 @@ async fn error_add_documents_test_bad_content_types() { } ]); - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -574,7 +574,7 @@ async fn error_add_documents_test_no_content_type() { } ]); - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -622,7 +622,7 @@ async fn error_add_documents_test_no_content_type() { async fn error_add_malformed_csv_documents() { let document = "id, content\n1234, hello, world\n12, hello world"; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -672,7 +672,7 @@ async fn error_add_malformed_csv_documents() { async fn error_add_malformed_json_documents() { let document = r#"[{"id": 1}, {id: 2}]"#; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -768,7 +768,7 @@ async fn error_add_malformed_json_documents() { async fn error_add_malformed_ndjson_documents() { let document = "{\"id\": 1}\n{id: 2}"; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -818,7 +818,7 @@ async fn error_add_malformed_ndjson_documents() { async fn error_add_missing_payload_csv_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -868,7 +868,7 @@ async fn error_add_missing_payload_csv_documents() { async fn error_add_missing_payload_json_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -918,7 +918,7 @@ async fn error_add_missing_payload_json_documents() { async fn error_add_missing_payload_ndjson_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post From cf5d26124a9ec856f41e07e4b0c8b80b4db49f49 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 May 2025 16:12:02 +0300 Subject: [PATCH 02/11] Call .succeeded() or .failed() on the waited task Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/documents/add_documents.rs | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 2c8925833..57d0864ad 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -1111,7 +1111,7 @@ async fn document_addition_with_huge_int_primary_key() { let (response, code) = index.add_documents(documents, Some("primary")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(response, @r###" { @@ -1568,7 +1568,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" @@ -1603,7 +1603,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.failed(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" @@ -1652,7 +1652,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); // Documents without a primary key are not accepted. snapshot!(response, @@ -1697,7 +1697,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" @@ -1733,7 +1733,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" @@ -1782,7 +1782,7 @@ async fn add_documents_with_geo_field() { ]); let (task, _status_code) = index.add_documents(documents, None).await; - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { @@ -1906,7 +1906,7 @@ async fn update_documents_with_geo_field() { ]); let (task, _status_code) = index.add_documents(documents, None).await; - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { @@ -1975,7 +1975,7 @@ async fn update_documents_with_geo_field() { } ]); let (task, _status_code) = index.update_documents(updated_documents, None).await; - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { @@ -2913,7 +2913,7 @@ async fn batch_several_documents_addition() { // wait first batch of documents to finish futures::future::join_all(waiter).await; - index.wait_task(4).await; + index.wait_task(4).await.succeeded(); // run a second completely failing batch documents[40] = json!({"title": "error", "desc": "error"}); @@ -2925,7 +2925,7 @@ async fn batch_several_documents_addition() { } // wait second batch of documents to finish futures::future::join_all(waiter).await; - index.wait_task(9).await; + index.wait_task(9).await.failed(); let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await; From fc88b003b47301006348e2cd6ddd0f189f9d7c7d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 26 May 2025 11:28:23 +0300 Subject: [PATCH 03/11] Use shared server and unique indices for add_documents IT tests Signed-off-by: Martin Tzvetanov Grigorov --- Cargo.lock | 102 +++--- crates/index-scheduler/src/error.rs | 14 +- crates/meili-snap/src/lib.rs | 10 + crates/meilisearch/Cargo.toml | 2 +- .../tests/documents/add_documents.rs | 328 +++++++++--------- 5 files changed, 243 insertions(+), 213 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dc2aa5af4..a36c568b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1067,9 +1067,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.38" +version = "4.5.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000" +checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f" dependencies = [ "clap_builder", "clap_derive", @@ -1077,9 +1077,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.38" +version = "4.5.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120" +checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51" dependencies = [ "anstream", "anstyle", @@ -2715,17 +2715,21 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9f1e950e0d9d1d3c47184416723cf29c0d1f93bd8cccf37e4beb6b44f31710" +checksum = "b1c293b6b3d21eca78250dc7dbebd6b9210ec5530e038cbfe0661b5c47ab06e8" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", "http 1.3.1", "http-body", "hyper", + "ipnet", "libc", + "percent-encoding", "pin-project-lite", "socket2", "tokio", @@ -2988,6 +2992,16 @@ dependencies = [ "serde", ] +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-terminal" version = "0.4.16" @@ -3067,9 +3081,9 @@ dependencies = [ [[package]] name = "jieba-rs" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d1bcad6332969e4d48ee568d430e14ee6dea70740c2549d005d87677ebefb0c" +checksum = "b06096b4b61fb4bfdbf16c6a968ea2d6be1ac9617cf3db741c3b641e6c290a35" dependencies = [ "cedarwood", "fxhash", @@ -3207,9 +3221,9 @@ dependencies = [ [[package]] name = "libloading" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a793df0d7afeac54f95b471d3af7f0d4fb975699f972341a4b76988d49cdf0c" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" dependencies = [ "cfg-if", "windows-targets 0.53.0", @@ -4889,9 +4903,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "2bf597b113be201cb2269b4c39b39a804d01b99ee95a4278f0ed04e45cff1c71" dependencies = [ "base64 0.22.1", "bytes", @@ -4913,7 +4927,6 @@ dependencies = [ "pin-project-lite", "quinn", "rustls", - "rustls-pemfile", "rustls-pki-types", "serde", "serde_json", @@ -4923,14 +4936,14 @@ dependencies = [ "tokio-rustls", "tokio-util", "tower", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.26.11", - "windows-registry", + "webpki-roots 1.0.0", ] [[package]] @@ -5435,9 +5448,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.9" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", @@ -5969,6 +5982,24 @@ dependencies = [ "tower-service", ] +[[package]] +name = "tower-http" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fdb0c213ca27a9f57ab69ddb290fd80d970922355b83ae380b395d3986b8a2e" +dependencies = [ + "bitflags 2.9.1", + "bytes", + "futures-util", + "http 1.3.1", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -6619,7 +6650,7 @@ checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" dependencies = [ "windows-implement", "windows-interface", - "windows-result 0.1.2", + "windows-result", "windows-targets 0.52.6", ] @@ -6645,23 +6676,6 @@ dependencies = [ "syn 2.0.101", ] -[[package]] -name = "windows-link" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" - -[[package]] -name = "windows-registry" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" -dependencies = [ - "windows-result 0.3.4", - "windows-strings", - "windows-targets 0.53.0", -] - [[package]] name = "windows-result" version = "0.1.2" @@ -6671,24 +6685,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-result" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" -dependencies = [ - "windows-link", -] - [[package]] name = "windows-sys" version = "0.48.0" diff --git a/crates/index-scheduler/src/error.rs b/crates/index-scheduler/src/error.rs index cb798b385..a0945d8a3 100644 --- a/crates/index-scheduler/src/error.rs +++ b/crates/index-scheduler/src/error.rs @@ -126,7 +126,7 @@ pub enum Error { #[error(transparent)] Heed(#[from] heed::Error), #[error("{}", match .index_uid { - Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", uid), + Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", Error::index_name(uid)), _ => format!("{error}") })] Milli { error: milli::Error, index_uid: Option }, @@ -177,6 +177,18 @@ pub enum Error { PlannedFailure, } +impl Error { + + #[inline] + fn index_name(index_name: &str) -> &str { + if let Ok(_) = uuid::Uuid::parse_str(index_name) { + "[uuid]" + } else { + index_name + } + } +} + #[derive(Debug, thiserror::Error)] #[error( "{disabled_action} requires enabling the `{feature}` experimental feature. See {issue_link}" diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 688c87494..17a57c082 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -33,6 +33,7 @@ pub fn default_snapshot_settings_for_test<'a>( let filename = path.file_name().unwrap().to_str().unwrap(); settings.set_omit_expression(true); +<<<<<<< HEAD fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content { match &content { Content::String(s) => { @@ -45,6 +46,15 @@ pub fn default_snapshot_settings_for_test<'a>( settings.add_dynamic_redaction(".message", uuid_in_message_redaction); settings.add_dynamic_redaction(".error.message", uuid_in_message_redaction); + settings.add_dynamic_redaction(".indexUid", |content, _content_path| { + match &content { + Content::String(s) => match uuid::Uuid::parse_str(s) { + Ok(_) => Content::String("[uuid]".to_owned()), + Err(_) => content, + }, + _ => content, + } + }); let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name); let test_name = test_name.rsplit("::").next().unwrap().to_owned(); diff --git a/crates/meilisearch/Cargo.toml b/crates/meilisearch/Cargo.toml index 40c0d98b5..dffa60326 100644 --- a/crates/meilisearch/Cargo.toml +++ b/crates/meilisearch/Cargo.toml @@ -116,7 +116,7 @@ utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] } actix-rt = "2.10.0" brotli = "6.0.0" # fixed version due to format breakages in v1.40 -insta = "=1.39.0" +insta = { version = "=1.39.0", features = ["redactions"] } manifest-dir-macros = "0.1.18" maplit = "1.0.2" meili-snap = { path = "../meili-snap" } diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 57d0864ad..e8ef43b40 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -3,7 +3,7 @@ use meili_snap::{json_string, snapshot}; use meilisearch::Opt; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; - +use uuid::Uuid; use crate::common::encoder::Encoder; use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value}; use crate::json; @@ -21,10 +21,11 @@ async fn add_documents_test_json_content_types() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -37,7 +38,7 @@ async fn add_documents_test_json_content_types() { @r###" { "taskUid": 0, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -46,7 +47,7 @@ async fn add_documents_test_json_content_types() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -59,7 +60,7 @@ async fn add_documents_test_json_content_types() { @r###" { "taskUid": 1, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -78,10 +79,11 @@ async fn add_single_document_test_json_content_types() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -94,7 +96,7 @@ async fn add_single_document_test_json_content_types() { @r###" { "taskUid": 0, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -103,7 +105,7 @@ async fn add_single_document_test_json_content_types() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -116,7 +118,7 @@ async fn add_single_document_test_json_content_types() { @r###" { "taskUid": 1, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -135,11 +137,12 @@ async fn add_single_document_gzip_encoded() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); let encoder = Encoder::Gzip; let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document.clone())) .insert_header(("content-type", "application/json")) .insert_header(encoder.header().unwrap()) @@ -153,7 +156,7 @@ async fn add_single_document_gzip_encoded() { @r###" { "taskUid": 0, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -162,7 +165,7 @@ async fn add_single_document_gzip_encoded() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document)) .insert_header(("content-type", "application/json")) .insert_header(encoder.header().unwrap()) @@ -176,7 +179,7 @@ async fn add_single_document_gzip_encoded() { @r###" { "taskUid": 1, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -190,10 +193,11 @@ async fn add_single_document_gzip_encoded_with_incomplete_error() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .insert_header(("content-encoding", "gzip")) @@ -215,7 +219,7 @@ async fn add_single_document_gzip_encoded_with_incomplete_error() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .insert_header(("content-encoding", "gzip")) @@ -247,12 +251,13 @@ async fn add_single_document_with_every_encoding() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); for (task_uid, encoder) in Encoder::iterator().enumerate() { let mut req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document.clone())) .insert_header(("content-type", "application/json")); req = match encoder.header() { @@ -271,8 +276,8 @@ async fn add_single_document_with_every_encoding() { #[actix_rt::test] async fn add_csv_document() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id,name,race 0,jean,bernese mountain @@ -283,18 +288,18 @@ async fn add_csv_document() { snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" { "taskUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { "uid": 0, "batchUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -335,8 +340,8 @@ async fn add_csv_document() { #[actix_rt::test] async fn add_csv_document_with_types() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id:number,name:string,race:string,age:number,cute:boolean 0,jean,bernese mountain,2.5,true @@ -348,18 +353,18 @@ async fn add_csv_document_with_types() { snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" { "taskUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { "uid": 0, "batchUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -411,8 +416,8 @@ async fn add_csv_document_with_types() { #[actix_rt::test] async fn add_csv_document_with_custom_delimiter() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id|name|race 0|jean|bernese mountain @@ -424,18 +429,18 @@ async fn add_csv_document_with_custom_delimiter() { snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" { "taskUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { "uid": 0, "batchUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -476,8 +481,8 @@ async fn add_csv_document_with_custom_delimiter() { #[actix_rt::test] async fn add_csv_document_with_types_error() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id:number,a:boolean,b:number 0,doggo,1"; @@ -520,10 +525,11 @@ async fn error_add_documents_test_bad_content_types() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/plain")) .to_request(); @@ -544,7 +550,7 @@ async fn error_add_documents_test_bad_content_types() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/plain")) .to_request(); @@ -576,10 +582,11 @@ async fn error_add_documents_test_no_content_type() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .to_request(); let res = test::call_service(&app, req).await; @@ -599,7 +606,7 @@ async fn error_add_documents_test_no_content_type() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .to_request(); let res = test::call_service(&app, req).await; @@ -624,10 +631,11 @@ async fn error_add_malformed_csv_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -648,7 +656,7 @@ async fn error_add_malformed_csv_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -674,10 +682,11 @@ async fn error_add_malformed_json_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -698,7 +707,7 @@ async fn error_add_malformed_json_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -724,7 +733,7 @@ async fn error_add_malformed_json_documents() { let document = format!("\"{}\"", long); let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document) .insert_header(("content-type", "application/json")) .to_request(); @@ -745,7 +754,7 @@ async fn error_add_malformed_json_documents() { // add one more char to the long string to test if the truncating works. let document = format!("\"{}m\"", long); let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document) .insert_header(("content-type", "application/json")) .to_request(); @@ -770,10 +779,11 @@ async fn error_add_malformed_ndjson_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -794,7 +804,7 @@ async fn error_add_malformed_ndjson_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -820,10 +830,11 @@ async fn error_add_missing_payload_csv_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -844,7 +855,7 @@ async fn error_add_missing_payload_csv_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -870,10 +881,11 @@ async fn error_add_missing_payload_json_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -894,7 +906,7 @@ async fn error_add_missing_payload_json_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -920,10 +932,11 @@ async fn error_add_missing_payload_ndjson_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -944,7 +957,7 @@ async fn error_add_missing_payload_ndjson_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -966,8 +979,8 @@ async fn error_add_missing_payload_ndjson_documents() { #[actix_rt::test] async fn add_documents_no_index_creation() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -989,7 +1002,7 @@ async fn add_documents_no_index_creation() { { "uid": 0, "batchUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1037,8 +1050,8 @@ async fn error_document_add_create_index_bad_uid() { #[actix_rt::test] async fn document_addition_with_primary_key() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1052,7 +1065,7 @@ async fn document_addition_with_primary_key() { @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -1068,7 +1081,7 @@ async fn document_addition_with_primary_key() { { "uid": 0, "batchUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1086,10 +1099,10 @@ async fn document_addition_with_primary_key() { let (response, code) = index.get().await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]" }), + snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]", ".uid" => "[uuid]" }), @r###" { - "uid": "test", + "uid": "[uuid]", "createdAt": "[date]", "updatedAt": "[date]", "primaryKey": "primary" @@ -1099,8 +1112,8 @@ async fn document_addition_with_primary_key() { #[actix_rt::test] async fn document_addition_with_huge_int_primary_key() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1117,7 +1130,7 @@ async fn document_addition_with_huge_int_primary_key() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1146,8 +1159,8 @@ async fn document_addition_with_huge_int_primary_key() { #[actix_rt::test] async fn replace_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1162,7 +1175,7 @@ async fn replace_document() { @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -1190,7 +1203,7 @@ async fn replace_document() { { "uid": 1, "batchUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1219,17 +1232,17 @@ async fn replace_document() { #[actix_rt::test] async fn add_no_documents() { - let server = Server::new().await; - let index = server.index("kefir"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.add_documents(json!([]), None).await; snapshot!(code, @"202 Accepted"); - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1246,13 +1259,13 @@ async fn add_no_documents() { "#); let (task, _code) = index.add_documents(json!([]), Some("kefkef")).await; - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1269,13 +1282,13 @@ async fn add_no_documents() { "#); let (task, _code) = index.add_documents(json!([{ "kefkef": 1 }]), None).await; - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1307,8 +1320,8 @@ async fn add_no_documents() { #[actix_rt::test] async fn add_larger_dataset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let update_id = index.load_test_set().await; let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); @@ -1319,12 +1332,11 @@ async fn add_larger_dataset() { let (response, code) = index .get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() }) .await; - assert_eq!(code, 200, "failed with `{}`", response); + assert_eq!(code, 200, "failed with `{response}`"); assert_eq!(response["results"].as_array().unwrap().len(), 77); // x-ndjson add large test - let server = Server::new().await; - let index = server.index("test"); + let index = server.unique_index(); let update_id = index.load_test_set_ndjson().await; let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); @@ -1341,8 +1353,8 @@ async fn add_larger_dataset() { #[actix_rt::test] async fn error_add_documents_bad_document_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("docid")).await; // unsupported characters @@ -1362,7 +1374,7 @@ async fn error_add_documents_bad_document_id() { { "uid": 1, "batchUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1399,7 +1411,7 @@ async fn error_add_documents_bad_document_id() { { "uid": 2, "batchUid": 2, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1436,7 +1448,7 @@ async fn error_add_documents_bad_document_id() { { "uid": 3, "batchUid": 3, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1460,8 +1472,8 @@ async fn error_add_documents_bad_document_id() { #[actix_rt::test] async fn error_add_documents_missing_document_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("docid")).await; let documents = json!([ { @@ -1478,7 +1490,7 @@ async fn error_add_documents_missing_document_id() { { "uid": 1, "batchUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1502,8 +1514,8 @@ async fn error_add_documents_missing_document_id() { #[actix_rt::test] async fn error_document_field_limit_reached_in_one_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1527,7 +1539,7 @@ async fn error_document_field_limit_reached_in_one_document() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1551,8 +1563,8 @@ async fn error_document_field_limit_reached_in_one_document() { #[actix_rt::test] async fn error_document_field_limit_reached_over_multiple_documents() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1575,7 +1587,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1610,7 +1622,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1634,8 +1646,8 @@ async fn error_document_field_limit_reached_over_multiple_documents() { #[actix_rt::test] async fn error_document_field_limit_reached_in_one_nested_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1660,7 +1672,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1679,8 +1691,8 @@ async fn error_document_field_limit_reached_in_one_nested_document() { #[actix_rt::test] async fn error_document_field_limit_reached_over_multiple_documents_with_nested_fields() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1704,7 +1716,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1740,7 +1752,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1759,8 +1771,8 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ #[actix_rt::test] async fn add_documents_with_geo_field() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; let documents = json!([ @@ -1788,7 +1800,7 @@ async fn add_documents_with_geo_field() { { "uid": 1, "batchUid": 1, - "indexUid": "doggo", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1883,8 +1895,8 @@ async fn add_documents_with_geo_field() { #[actix_rt::test] async fn update_documents_with_geo_field() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; let documents = json!([ @@ -1912,7 +1924,7 @@ async fn update_documents_with_geo_field() { { "uid": 1, "batchUid": 1, - "indexUid": "doggo", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1981,7 +1993,7 @@ async fn update_documents_with_geo_field() { { "uid": 2, "batchUid": 2, - "indexUid": "doggo", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2075,8 +2087,8 @@ async fn update_documents_with_geo_field() { #[actix_rt::test] async fn add_documents_invalid_geo_field() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; @@ -2092,12 +2104,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }), @r###" { "uid": 2, "batchUid": 2, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2106,7 +2118,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: The `_geo` field in the document with the id: `\"11\"` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", + "message": "Index `[uuid]`: The `_geo` field in the document with the id: `\"11\"` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2135,7 +2147,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 3, "batchUid": 3, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2144,7 +2156,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", + "message": "Index `[uuid]`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2173,7 +2185,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 4, "batchUid": 4, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2182,7 +2194,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", + "message": "Index `[uuid]`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2211,7 +2223,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 5, "batchUid": 5, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2220,7 +2232,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2249,7 +2261,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 6, "batchUid": 6, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2258,7 +2270,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2287,7 +2299,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 7, "batchUid": 7, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2296,7 +2308,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2325,7 +2337,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 8, "batchUid": 8, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2334,7 +2346,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2363,7 +2375,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 9, "batchUid": 9, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2372,7 +2384,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `false` and `true`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `false` and `true`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2401,7 +2413,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 10, "batchUid": 10, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2410,7 +2422,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2439,7 +2451,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 11, "batchUid": 11, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2448,7 +2460,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2477,7 +2489,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 12, "batchUid": 12, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2486,7 +2498,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2515,7 +2527,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 13, "batchUid": 13, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2524,7 +2536,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: The `_geo` field in the document with the id: `\"11\"` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", + "message": "Index `[uuid]`: The `_geo` field in the document with the id: `\"11\"` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2554,7 +2566,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 14, "batchUid": 14, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2563,7 +2575,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse longitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse longitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2591,7 +2603,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 15, "batchUid": 15, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2600,7 +2612,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse latitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2628,7 +2640,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 16, "batchUid": 16, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2637,7 +2649,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"13\"`. Was expecting finite numbers but instead got `null` and `null`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"13\"`. Was expecting finite numbers but instead got `null` and `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2653,8 +2665,8 @@ async fn add_documents_invalid_geo_field() { // Related to #4333 #[actix_rt::test] async fn add_invalid_geo_and_then_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; // _geo is not a correct object @@ -2671,7 +2683,7 @@ async fn add_invalid_geo_and_then_settings() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2694,7 +2706,7 @@ async fn add_invalid_geo_and_then_settings() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "settingsUpdate", "canceledBy": null, @@ -2704,7 +2716,7 @@ async fn add_invalid_geo_and_then_settings() { ] }, "error": { - "message": "Index `test`: Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2719,8 +2731,8 @@ async fn add_invalid_geo_and_then_settings() { #[actix_rt::test] async fn error_add_documents_payload_size() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; let document = json!( { @@ -2746,8 +2758,8 @@ async fn error_add_documents_payload_size() { #[actix_rt::test] async fn error_primary_key_inference() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -2766,7 +2778,7 @@ async fn error_primary_key_inference() { { "uid": 0, "batchUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2807,7 +2819,7 @@ async fn error_primary_key_inference() { { "uid": 1, "batchUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2846,7 +2858,7 @@ async fn error_primary_key_inference() { { "uid": 2, "batchUid": 2, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2865,8 +2877,8 @@ async fn error_primary_key_inference() { #[actix_rt::test] async fn add_documents_with_primary_key_twice() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -2888,8 +2900,8 @@ async fn add_documents_with_primary_key_twice() { #[actix_rt::test] async fn batch_several_documents_addition() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let mut documents: Vec<_> = (0..150usize) .map(|id| { From ce9c930d1070dc60f172db146e074bb9099125b8 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 26 May 2025 11:33:59 +0300 Subject: [PATCH 04/11] Fix clippy and fmt Signed-off-by: Martin Tzvetanov Grigorov --- crates/index-scheduler/src/error.rs | 3 +-- crates/meili-snap/src/lib.rs | 1 - crates/meilisearch/tests/documents/add_documents.rs | 6 +++--- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/index-scheduler/src/error.rs b/crates/index-scheduler/src/error.rs index a0945d8a3..cbedf827d 100644 --- a/crates/index-scheduler/src/error.rs +++ b/crates/index-scheduler/src/error.rs @@ -178,10 +178,9 @@ pub enum Error { } impl Error { - #[inline] fn index_name(index_name: &str) -> &str { - if let Ok(_) = uuid::Uuid::parse_str(index_name) { + if uuid::Uuid::parse_str(index_name).is_ok() { "[uuid]" } else { index_name diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 17a57c082..30769c7c1 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -33,7 +33,6 @@ pub fn default_snapshot_settings_for_test<'a>( let filename = path.file_name().unwrap().to_str().unwrap(); settings.set_omit_expression(true); -<<<<<<< HEAD fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content { match &content { Content::String(s) => { diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index e8ef43b40..efe1bfbf0 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -1,12 +1,12 @@ +use crate::common::encoder::Encoder; +use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value}; +use crate::json; use actix_web::test; use meili_snap::{json_string, snapshot}; use meilisearch::Opt; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; use uuid::Uuid; -use crate::common::encoder::Encoder; -use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value}; -use crate::json; /// This is the basic usage of our API and every other tests uses the content-type application/json #[actix_rt::test] From f3d691667d47985347766f134b294a2a8354bbe2 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 26 May 2025 13:39:15 +0300 Subject: [PATCH 05/11] Use a Regex in insta dynamic redaction to replace Uuids with [uuid] Signed-off-by: Martin Tzvetanov Grigorov --- crates/index-scheduler/src/error.rs | 13 +------------ crates/meili-snap/src/lib.rs | 8 ++++++++ 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/crates/index-scheduler/src/error.rs b/crates/index-scheduler/src/error.rs index cbedf827d..cb798b385 100644 --- a/crates/index-scheduler/src/error.rs +++ b/crates/index-scheduler/src/error.rs @@ -126,7 +126,7 @@ pub enum Error { #[error(transparent)] Heed(#[from] heed::Error), #[error("{}", match .index_uid { - Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", Error::index_name(uid)), + Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", uid), _ => format!("{error}") })] Milli { error: milli::Error, index_uid: Option }, @@ -177,17 +177,6 @@ pub enum Error { PlannedFailure, } -impl Error { - #[inline] - fn index_name(index_name: &str) -> &str { - if uuid::Uuid::parse_str(index_name).is_ok() { - "[uuid]" - } else { - index_name - } - } -} - #[derive(Debug, thiserror::Error)] #[error( "{disabled_action} requires enabling the `{feature}` experimental feature. See {issue_link}" diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 30769c7c1..0f709b3de 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -55,6 +55,14 @@ pub fn default_snapshot_settings_for_test<'a>( } }); + settings.add_dynamic_redaction(".error.message", |content, _content_path| match &content { + Content::String(s) => { + let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "$before[uuid]$after"); + Content::String(uuid_replaced.to_string()) + } + _ => content, + }); + let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name); let test_name = test_name.rsplit("::").next().unwrap().to_owned(); From 3e0de6cb83a219a38428a4c68b5b6217c4b8fb91 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 27 May 2025 14:39:45 +0300 Subject: [PATCH 06/11] Wait for the batched tasks bu their real uid. Some of them succeed, others fail. Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/documents/add_documents.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index efe1bfbf0..35f3d793a 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -2924,8 +2924,10 @@ async fn batch_several_documents_addition() { } // wait first batch of documents to finish - futures::future::join_all(waiter).await; - index.wait_task(4).await.succeeded(); + let finished_tasks = futures::future::join_all(waiter).await; + for (task, _code) in finished_tasks { + index.wait_task(task.uid()).await; + } // run a second completely failing batch documents[40] = json!({"title": "error", "desc": "error"}); @@ -2936,8 +2938,10 @@ async fn batch_several_documents_addition() { waiter.push(index.add_documents(json!(chunk), Some("id"))); } // wait second batch of documents to finish - futures::future::join_all(waiter).await; - index.wait_task(9).await.failed(); + let finished_tasks = futures::future::join_all(waiter).await; + for (task, _code) in finished_tasks { + index.wait_task(task.uid()).await; + } let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await; From ce65ad213bf85fcc8ec1fb1d7534f30688160eb1 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 27 May 2025 16:27:08 +0300 Subject: [PATCH 07/11] Add dynamic redactions for `uid`, `batchUid` and `taskUid` Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/documents/add_documents.rs | 286 +++++++++--------- 1 file changed, 142 insertions(+), 144 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 35f3d793a..99aa566d7 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -34,16 +34,16 @@ async fn add_documents_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() @@ -56,10 +56,10 @@ async fn add_documents_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", @@ -92,16 +92,16 @@ async fn add_single_document_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() @@ -114,10 +114,10 @@ async fn add_single_document_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", @@ -152,16 +152,16 @@ async fn add_single_document_gzip_encoded() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() @@ -175,10 +175,10 @@ async fn add_single_document_gzip_encoded() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", @@ -255,7 +255,7 @@ async fn add_single_document_with_every_encoding() { // post let document = serde_json::to_string(&document).unwrap(); - for (task_uid, encoder) in Encoder::iterator().enumerate() { + for encoder in Encoder::iterator() { let mut req = test::TestRequest::post() .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document.clone())) @@ -268,9 +268,8 @@ async fn add_single_document_with_every_encoding() { let res = test::call_service(&app, req).await; let status_code = res.status(); let body = test::read_body(res).await; - let response: Value = serde_json::from_slice(&body).unwrap_or_default(); + let _response: Value = serde_json::from_slice(&body).unwrap_or_default(); assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], task_uid); } } @@ -285,20 +284,20 @@ async fn add_csv_document() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); let response = index.wait_task(response.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -350,20 +349,20 @@ async fn add_csv_document_with_types() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); let response = index.wait_task(response.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -426,20 +425,20 @@ async fn add_csv_document_with_custom_delimiter() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "?csvDelimiter=|").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); let response = index.wait_task(response.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -991,17 +990,16 @@ async fn add_documents_no_index_creation() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); - assert_eq!(response["taskUid"], 0); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.get_task(0).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1061,26 +1059,26 @@ async fn document_addition_with_primary_key() { ]); let (response, code) = index.add_documents(documents, Some("primary")).await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.get_task(response.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1171,16 +1169,16 @@ async fn replace_document() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code,@"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); index.wait_task(response.uid()).await.succeeded(); @@ -1198,11 +1196,11 @@ async fn replace_document() { let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1369,11 +1367,11 @@ async fn error_add_documents_bad_document_id() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -1393,7 +1391,7 @@ async fn error_add_documents_bad_document_id() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); // More than 512 bytes let documents = json!([ @@ -1406,11 +1404,11 @@ async fn error_add_documents_bad_document_id() { index.wait_task(value.uid()).await.failed(); let (response, code) = index.get_task(value.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -1443,11 +1441,11 @@ async fn error_add_documents_bad_document_id() { index.wait_task(value.uid()).await.failed(); let (response, code) = index.get_task(value.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -1485,11 +1483,11 @@ async fn error_add_documents_missing_document_id() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -1509,7 +1507,7 @@ async fn error_add_documents_missing_document_id() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); } #[actix_rt::test] @@ -1795,11 +1793,11 @@ async fn add_documents_with_geo_field() { let (task, _status_code) = index.add_documents(documents, None).await; let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1814,7 +1812,7 @@ async fn add_documents_with_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; @@ -1919,11 +1917,11 @@ async fn update_documents_with_geo_field() { let (task, _status_code) = index.add_documents(documents, None).await; let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1938,7 +1936,7 @@ async fn update_documents_with_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await; snapshot!(code, @"200 OK"); @@ -1988,11 +1986,11 @@ async fn update_documents_with_geo_field() { ]); let (task, _status_code) = index.update_documents(updated_documents, None).await; let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -2104,11 +2102,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }), + @r#" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2128,7 +2126,7 @@ async fn add_documents_invalid_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); // _geo is an object but is missing both the lat and lng let documents = json!([ @@ -2142,11 +2140,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2180,11 +2178,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 4, - "batchUid": 4, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2218,11 +2216,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 5, - "batchUid": 5, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2256,11 +2254,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 6, - "batchUid": 6, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2294,11 +2292,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 7, - "batchUid": 7, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2332,11 +2330,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 8, - "batchUid": 8, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2370,11 +2368,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 9, - "batchUid": 9, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2408,11 +2406,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 10, - "batchUid": 10, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2446,11 +2444,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 11, - "batchUid": 11, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2484,11 +2482,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 12, - "batchUid": 12, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2522,11 +2520,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 13, - "batchUid": 13, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2561,11 +2559,11 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 14, - "batchUid": 14, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2598,11 +2596,11 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 15, - "batchUid": 15, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2635,11 +2633,11 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 16, - "batchUid": 16, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2773,11 +2771,11 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2797,7 +2795,7 @@ async fn error_primary_key_inference() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let documents = json!([ { @@ -2814,11 +2812,11 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2853,11 +2851,11 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", From 36f0a1492ce7f4185381c99b286d03f919d8a0e9 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Wed, 28 May 2025 14:17:54 +0300 Subject: [PATCH 08/11] Apply suggestions from code review Co-authored-by: Tamo --- crates/meilisearch/tests/documents/add_documents.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 99aa566d7..522fc530b 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -991,11 +991,9 @@ async fn add_documents_no_index_creation() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); - index.wait_task(response.uid()).await.succeeded(); - - let (response, code) = index.get_task(0).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(response, @r###" { "uid": "[uid]", From c13efde04241d496b2eada49c8da39c0f24cbb0b Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 28 May 2025 14:35:50 +0300 Subject: [PATCH 09/11] uuid is a production dependency of meili-snap Signed-off-by: Martin Tzvetanov Grigorov --- crates/meili-snap/Cargo.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/meili-snap/Cargo.toml b/crates/meili-snap/Cargo.toml index 9dba56256..be96769ab 100644 --- a/crates/meili-snap/Cargo.toml +++ b/crates/meili-snap/Cargo.toml @@ -16,6 +16,4 @@ insta = { version = "=1.39.0", features = ["json", "redactions"] } md5 = "0.7.0" once_cell = "1.20" regex-lite = "0.1.6" - -[dev-dependencies] uuid = { version = "1.17.0", features = ["v4"] } From 02929e241b8af615f149ea44e0738ab99d33c37d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 28 May 2025 14:36:13 +0300 Subject: [PATCH 10/11] Update the status code Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/documents/add_documents.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 522fc530b..1cf492fc0 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -992,7 +992,7 @@ async fn add_documents_no_index_creation() { snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.succeeded(); - snapshot!(code, @"200 OK"); + snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" { From 43ec97fe457be2e01fde6a502044037e9bdf4c02 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 28 May 2025 15:01:04 +0300 Subject: [PATCH 11/11] format the code Signed-off-by: Martin Tzvetanov Grigorov --- crates/meili-snap/src/lib.rs | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 0f709b3de..1641a6335 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -45,14 +45,12 @@ pub fn default_snapshot_settings_for_test<'a>( settings.add_dynamic_redaction(".message", uuid_in_message_redaction); settings.add_dynamic_redaction(".error.message", uuid_in_message_redaction); - settings.add_dynamic_redaction(".indexUid", |content, _content_path| { - match &content { - Content::String(s) => match uuid::Uuid::parse_str(s) { - Ok(_) => Content::String("[uuid]".to_owned()), - Err(_) => content, - }, - _ => content, - } + settings.add_dynamic_redaction(".indexUid", |content, _content_path| match &content { + Content::String(s) => match uuid::Uuid::parse_str(s) { + Ok(_) => Content::String("[uuid]".to_owned()), + Err(_) => content, + }, + _ => content, }); settings.add_dynamic_redaction(".error.message", |content, _content_path| match &content {