diff --git a/.github/workflows/db-change-missing.yml b/.github/workflows/db-change-missing.yml index 12a616f88..6d15afd69 100644 --- a/.github/workflows/db-change-missing.yml +++ b/.github/workflows/db-change-missing.yml @@ -4,22 +4,22 @@ on: pull_request: types: [opened, synchronize, reopened, labeled, unlabeled] -env: - GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }} - jobs: check-labels: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Check db change labels id: check_labels + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels echo ${{ github.event.pull_request.number }} echo $URL - LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name) + LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/labels -q .[].name) + echo "Labels: $LABELS" if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then echo "::error::Pull request must contain either the 'db change' or 'no db change' label." exit 1 diff --git a/Cargo.lock b/Cargo.lock index 9975e5ed0..c487b6ac4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3722,6 +3722,8 @@ dependencies = [ "insta", "md5", "once_cell", + "regex-lite", + "uuid", ] [[package]] diff --git a/crates/index-scheduler/src/scheduler/process_index_operation.rs b/crates/index-scheduler/src/scheduler/process_index_operation.rs index 9b12d61cf..093c6209d 100644 --- a/crates/index-scheduler/src/scheduler/process_index_operation.rs +++ b/crates/index-scheduler/src/scheduler/process_index_operation.rs @@ -5,7 +5,7 @@ use meilisearch_types::milli::documents::PrimaryKey; use meilisearch_types::milli::progress::Progress; use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction}; use meilisearch_types::milli::update::DocumentAdditionResult; -use meilisearch_types::milli::{self, ChannelCongestion, Filter, ThreadPoolNoAbortBuilder}; +use meilisearch_types::milli::{self, ChannelCongestion, Filter}; use meilisearch_types::settings::apply_settings_to_builder; use meilisearch_types::tasks::{Details, KindWithContent, Status, Task}; use meilisearch_types::Index; @@ -113,18 +113,8 @@ impl IndexScheduler { } } - let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|i| format!("indexing-thread-{i}")) - .build() - .unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; progress.update_progress(DocumentOperationProgress::ComputingDocumentChanges); let (document_changes, operation_stats, primary_key) = indexer @@ -266,18 +256,8 @@ impl IndexScheduler { let mut congestion = None; if task.error.is_none() { - let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|i| format!("indexing-thread-{i}")) - .build() - .unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let candidates_count = candidates.len(); progress.update_progress(DocumentEditionProgress::ComputingDocumentChanges); @@ -429,18 +409,8 @@ impl IndexScheduler { let mut congestion = None; if !tasks.iter().all(|res| res.error.is_some()) { - let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|i| format!("indexing-thread-{i}")) - .build() - .unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; progress.update_progress(DocumentDeletionProgress::DeleteDocuments); let mut indexer = indexer::DocumentDeletion::new(); diff --git a/crates/meili-snap/Cargo.toml b/crates/meili-snap/Cargo.toml index 0c48ff824..be96769ab 100644 --- a/crates/meili-snap/Cargo.toml +++ b/crates/meili-snap/Cargo.toml @@ -15,3 +15,5 @@ license.workspace = true insta = { version = "=1.39.0", features = ["json", "redactions"] } md5 = "0.7.0" once_cell = "1.20" +regex-lite = "0.1.6" +uuid = { version = "1.17.0", features = ["v4"] } diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index c467aef49..1641a6335 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -4,9 +4,16 @@ use std::path::{Path, PathBuf}; use std::sync::Mutex; pub use insta; +use insta::internals::{Content, ContentPath}; use once_cell::sync::Lazy; +use regex_lite::Regex; static SNAPSHOT_NAMES: Lazy>> = Lazy::new(Mutex::default); +/// A regex to match UUIDs in messages, specifically looking for the UUID v4 format +static UUID_IN_MESSAGE_RE: Lazy = Lazy::new(|| { + Regex::new(r"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}") + .unwrap() +}); /// Return the md5 hash of the given string pub fn hash_snapshot(snap: &str) -> String { @@ -26,6 +33,34 @@ pub fn default_snapshot_settings_for_test<'a>( let filename = path.file_name().unwrap().to_str().unwrap(); settings.set_omit_expression(true); + fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content { + match &content { + Content::String(s) => { + let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "[uuid]"); + Content::String(uuid_replaced.to_string()) + } + _ => content, + } + } + + settings.add_dynamic_redaction(".message", uuid_in_message_redaction); + settings.add_dynamic_redaction(".error.message", uuid_in_message_redaction); + settings.add_dynamic_redaction(".indexUid", |content, _content_path| match &content { + Content::String(s) => match uuid::Uuid::parse_str(s) { + Ok(_) => Content::String("[uuid]".to_owned()), + Err(_) => content, + }, + _ => content, + }); + + settings.add_dynamic_redaction(".error.message", |content, _content_path| match &content { + Content::String(s) => { + let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "$before[uuid]$after"); + Content::String(uuid_replaced.to_string()) + } + _ => content, + }); + let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name); let test_name = test_name.rsplit("::").next().unwrap().to_owned(); @@ -232,6 +267,9 @@ macro_rules! json_string { #[cfg(test)] mod tests { use crate as meili_snap; + use crate::UUID_IN_MESSAGE_RE; + use uuid::Uuid; + #[test] fn snap() { snapshot_hash!(10, @"d3d9446802a44259755d38e6d163e820"); @@ -279,4 +317,14 @@ mod tests { // snapshot_hash!("", name: "", @"d41d8cd98f00b204e9800998ecf8427e"); } } + + #[test] + fn uuid_in_message_regex() { + let uuid1 = Uuid::new_v4(); + let uuid2 = Uuid::new_v4(); + let uuid3 = Uuid::new_v4(); + let to_replace = format!("1 {uuid1} 2 {uuid2} 3 {uuid3} 4"); + let replaced = UUID_IN_MESSAGE_RE.replace_all(to_replace.as_str(), "[uuid]"); + assert_eq!(replaced, "1 [uuid] 2 [uuid] 3 [uuid] 4"); + } } diff --git a/crates/meilisearch-types/src/settings.rs b/crates/meilisearch-types/src/settings.rs index d26c575d6..1c225b355 100644 --- a/crates/meilisearch-types/src/settings.rs +++ b/crates/meilisearch-types/src/settings.rs @@ -697,7 +697,7 @@ pub fn apply_settings_to_builder( match typo_tolerance { Setting::Set(ref value) => { match value.enabled { - Setting::Set(val) => builder.set_autorize_typos(val), + Setting::Set(val) => builder.set_authorize_typos(val), Setting::Reset => builder.reset_authorize_typos(), Setting::NotSet => (), } diff --git a/crates/meilisearch/Cargo.toml b/crates/meilisearch/Cargo.toml index c75b55bc2..931d91d51 100644 --- a/crates/meilisearch/Cargo.toml +++ b/crates/meilisearch/Cargo.toml @@ -120,7 +120,7 @@ actix-web-lab = { version = "0.24.1", default-features = false } actix-rt = "2.10.0" brotli = "6.0.0" # fixed version due to format breakages in v1.40 -insta = "=1.39.0" +insta = { version = "=1.39.0", features = ["redactions"] } manifest-dir-macros = "0.1.18" maplit = "1.0.2" meili-snap = { path = "../meili-snap" } diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index f4df7efa0..1e0c205d0 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -37,7 +37,9 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions}; use meilisearch_auth::{open_auth_store_env, AuthController}; use meilisearch_types::milli::constants::VERSION_MAJOR; use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; -use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod}; +use meilisearch_types::milli::update::{ + default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig, +}; use meilisearch_types::settings::apply_settings_to_builder; use meilisearch_types::tasks::KindWithContent; use meilisearch_types::versioning::{ @@ -501,7 +503,19 @@ fn import_dump( let network = dump_reader.network()?.cloned().unwrap_or_default(); index_scheduler.put_network(network)?; - let indexer_config = index_scheduler.indexer_config(); + // 3.1 Use all cpus to process dump if `max_indexing_threads` not configured + let backup_config; + let base_config = index_scheduler.indexer_config(); + + let indexer_config = if base_config.max_threads.is_none() { + let (thread_pool, _) = default_thread_pool_and_threads(); + + let _config = IndexerConfig { thread_pool, ..*base_config }; + backup_config = _config; + &backup_config + } else { + base_config + }; // /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might // try to process tasks while we're trying to import the indexes. diff --git a/crates/meilisearch/src/option.rs b/crates/meilisearch/src/option.rs index 6e06b161d..35ce71cf4 100644 --- a/crates/meilisearch/src/option.rs +++ b/crates/meilisearch/src/option.rs @@ -761,10 +761,12 @@ impl IndexerOpts { max_indexing_memory.to_string(), ); } - export_to_env_if_not_present( - MEILI_MAX_INDEXING_THREADS, - max_indexing_threads.0.to_string(), - ); + if let Some(max_indexing_threads) = max_indexing_threads.0 { + export_to_env_if_not_present( + MEILI_MAX_INDEXING_THREADS, + max_indexing_threads.to_string(), + ); + } } } @@ -772,15 +774,15 @@ impl TryFrom<&IndexerOpts> for IndexerConfig { type Error = anyhow::Error; fn try_from(other: &IndexerOpts) -> Result { - let thread_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|index| format!("indexing-thread:{index}")) - .num_threads(*other.max_indexing_threads) + let thread_pool = ThreadPoolNoAbortBuilder::new_for_indexing() + .num_threads(other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2)) .build()?; Ok(Self { + thread_pool, log_every_n: Some(DEFAULT_LOG_EVERY_N), max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize), - thread_pool: Some(thread_pool), + max_threads: *other.max_indexing_threads, max_positions_per_attributes: None, skip_index_budget: other.skip_index_budget, ..Default::default() @@ -843,31 +845,31 @@ fn total_memory_bytes() -> Option { } } -#[derive(Debug, Clone, Copy, Deserialize, Serialize)] -pub struct MaxThreads(usize); +#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)] +pub struct MaxThreads(Option); impl FromStr for MaxThreads { type Err = ParseIntError; - fn from_str(s: &str) -> Result { - usize::from_str(s).map(Self) - } -} - -impl Default for MaxThreads { - fn default() -> Self { - MaxThreads(num_cpus::get() / 2) + fn from_str(s: &str) -> Result { + if s.is_empty() || s == "unlimited" { + return Ok(MaxThreads::default()); + } + usize::from_str(s).map(Some).map(MaxThreads) } } impl fmt::Display for MaxThreads { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.0) + match self.0 { + Some(threads) => write!(f, "{}", threads), + None => write!(f, "unlimited"), + } } } impl Deref for MaxThreads { - type Target = usize; + type Target = Option; fn deref(&self) -> &Self::Target { &self.0 diff --git a/crates/meilisearch/src/search_queue.rs b/crates/meilisearch/src/search_queue.rs index 6ab910164..9ff8da36c 100644 --- a/crates/meilisearch/src/search_queue.rs +++ b/crates/meilisearch/src/search_queue.rs @@ -1,5 +1,5 @@ //! This file implements a queue of searches to process and the ability to control how many searches can be run in parallel. -//! We need this because we don't want to process more search requests than we have cores. +//! We need this because we don't want to process more search requests than the available CPU cores. //! That slows down everything and consumes RAM for no reason. //! The steps to do a search are to get the `SearchQueue` data structure and try to get a search permit. //! This can fail if the queue is full, and we need to drop your search request to register a new one. @@ -8,7 +8,7 @@ //! //! In order to do a search request you should try to get a search permit. //! Retrieve the `SearchQueue` structure from actix-web (`search_queue: Data`) -//! and right before processing the search, calls the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;` +//! and right before processing the search, call the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;` //! //! What is going to happen at this point is that you're going to send a oneshot::Sender over an async mpsc channel. //! Then, the queue/scheduler is going to either: @@ -121,12 +121,12 @@ impl SearchQueue { let mut queue: Vec> = Default::default(); let mut rng: StdRng = StdRng::from_entropy(); let mut searches_running: usize = 0; - // By having a capacity of parallelism we ensures that every time a search finish it can release its RAM asap + // By having a capacity of parallelism we ensure that every time a search finish it can release its RAM asap let (sender, mut search_finished) = mpsc::channel(parallelism.into()); loop { tokio::select! { - // biased select because we wants to free up space before trying to register new tasks + // biased select because we want to free up space before trying to register new tasks biased; _ = search_finished.recv() => { searches_running = searches_running.saturating_sub(1); @@ -148,11 +148,11 @@ impl SearchQueue { if searches_running < usize::from(parallelism) && queue.is_empty() { searches_running += 1; - // if the search requests die it's not a hard error on our side + // if the search requests die, it's not a hard error on our side let _ = search_request.send(Permit { sender: sender.clone() }); continue; } else if capacity == 0 { - // in the very specific case where we have a capacity of zero + // in the very specific case where we have a capacity of zero, // we must refuse the request straight away without going through // the queue stuff. drop(search_request); @@ -183,7 +183,7 @@ impl SearchQueue { .map_err(|_| MeilisearchHttpError::TooManySearchRequests(self.capacity))?; // If we've been for more than one minute to get a search permit, it's better to simply - // abort the search request than spending time processing something were the client + // abort the search request than spending time processing something where the client // most certainly exited or got a timeout a long time ago. // We may find a better solution in https://github.com/actix/actix-web/issues/3462. if now.elapsed() > self.time_to_abort { diff --git a/crates/meilisearch/tests/auth/api_keys.rs b/crates/meilisearch/tests/auth/api_keys.rs index 84ed24217..5a18b4dbf 100644 --- a/crates/meilisearch/tests/auth/api_keys.rs +++ b/crates/meilisearch/tests/auth/api_keys.rs @@ -538,7 +538,7 @@ async fn error_add_api_key_parameters_uid_already_exist() { let (response, code) = server.add_api_key(content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.", + "message": "`uid` field value `[uuid]` is already an existing API key.", "code": "api_key_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#api_key_already_exists" diff --git a/crates/meilisearch/tests/common/index.rs b/crates/meilisearch/tests/common/index.rs index 09a7d623c..e324d2ff5 100644 --- a/crates/meilisearch/tests/common/index.rs +++ b/crates/meilisearch/tests/common/index.rs @@ -29,6 +29,10 @@ impl<'a> Index<'a, Owned> { } } + pub fn with_encoder(&self, encoder: Encoder) -> Index<'a, Owned> { + Index { uid: self.uid.clone(), service: self.service, encoder, marker: PhantomData } + } + pub async fn load_test_set(&self) -> u64 { let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref())); let (response, code) = self @@ -290,6 +294,20 @@ impl Index<'_, Shared> { } (task, code) } + + pub async fn update_index_fail(&self, primary_key: Option<&str>) -> (Value, StatusCode) { + let (mut task, code) = self._update(primary_key).await; + if code.is_success() { + task = self.wait_task(task.uid()).await; + if task.is_success() { + panic!( + "`update_index_fail` succeeded: {}", + serde_json::to_string_pretty(&task).unwrap() + ); + } + } + (task, code) + } } #[allow(dead_code)] @@ -333,6 +351,14 @@ impl Index<'_, State> { self.service.post_encoded("/indexes", body, self.encoder).await } + pub(super) async fn _update(&self, primary_key: Option<&str>) -> (Value, StatusCode) { + let body = json!({ + "primaryKey": primary_key, + }); + let url = format!("/indexes/{}", urlencode(self.uid.as_ref())); + self.service.patch_encoded(url, body, self.encoder).await + } + pub(super) async fn _delete(&self) -> (Value, StatusCode) { let url = format!("/indexes/{}", urlencode(self.uid.as_ref())); self.service.delete(url).await diff --git a/crates/meilisearch/tests/common/mod.rs b/crates/meilisearch/tests/common/mod.rs index 4d57a6163..373f89f78 100644 --- a/crates/meilisearch/tests/common/mod.rs +++ b/crates/meilisearch/tests/common/mod.rs @@ -128,7 +128,8 @@ impl Display for Value { ".finishedAt" => "[date]", ".duration" => "[duration]", ".processingTimeMs" => "[duration]", - ".details.embedders.*.url" => "[url]" + ".details.embedders.*.url" => "[url]", + ".details.dumpUid" => "[dump_uid]", }) ) } @@ -264,6 +265,24 @@ pub static SCORE_DOCUMENTS: Lazy = Lazy::new(|| { ]) }); +pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shared> { + static INDEX: OnceCell> = OnceCell::const_new(); + INDEX.get_or_init(|| async { + let server = Server::new_shared(); + let index = server._index("SCORE_DOCUMENTS").to_shared(); + let documents = SCORE_DOCUMENTS.clone(); + let (response, _code) = index._add_documents(documents, None).await; + index.wait_task(response.uid()).await.succeeded(); + let (response, _code) = index + ._update_settings( + json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}), + ) + .await; + index.wait_task(response.uid()).await.succeeded(); + index + }).await +} + pub static NESTED_DOCUMENTS: Lazy = Lazy::new(|| { json!([ { @@ -333,7 +352,7 @@ pub async fn shared_index_with_nested_documents() -> &'static Index<'static, Sha index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index ._update_settings( - json!({"filterableAttributes": ["father", "doggos"], "sortableAttributes": ["doggos"]}), + json!({"filterableAttributes": ["father", "doggos", "cattos"], "sortableAttributes": ["doggos"]}), ) .await; index.wait_task(response.uid()).await.succeeded(); @@ -435,3 +454,57 @@ pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> { }) .await } + +pub static GEO_DOCUMENTS: Lazy = Lazy::new(|| { + json!([ + { + "id": 1, + "name": "Taco Truck", + "address": "444 Salsa Street, Burritoville", + "type": "Mexican", + "rating": 9, + "_geo": { + "lat": 34.0522, + "lng": -118.2437 + } + }, + { + "id": 2, + "name": "La Bella Italia", + "address": "456 Elm Street, Townsville", + "type": "Italian", + "rating": 9, + "_geo": { + "lat": "45.4777599", + "lng": "9.1967508" + } + }, + { + "id": 3, + "name": "Crêpe Truck", + "address": "2 Billig Avenue, Rouenville", + "type": "French", + "rating": 10 + } + ]) +}); + +pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared> { + static INDEX: OnceCell> = OnceCell::const_new(); + INDEX + .get_or_init(|| async { + let server = Server::new_shared(); + let index = server._index("SHARED_GEO_DOCUMENTS").to_shared(); + let (response, _code) = index._add_documents(GEO_DOCUMENTS.clone(), None).await; + index.wait_task(response.uid()).await.succeeded(); + + let (response, _code) = index + ._update_settings( + json!({"filterableAttributes": ["_geo"], "sortableAttributes": ["_geo"]}), + ) + .await; + index.wait_task(response.uid()).await.succeeded(); + index + }) + .await +} diff --git a/crates/meilisearch/tests/common/server.rs b/crates/meilisearch/tests/common/server.rs index 7e30c5d17..431972983 100644 --- a/crates/meilisearch/tests/common/server.rs +++ b/crates/meilisearch/tests/common/server.rs @@ -399,18 +399,9 @@ impl Server { pub async fn wait_task(&self, update_id: u64) -> Value { // try several times to get status, or panic to not wait forever let url = format!("/tasks/{}", update_id); - // Increase timeout for vector-related tests - let max_attempts = if url.contains("/tasks/") { - if update_id > 1000 { - 400 // 200 seconds for vector tests - } else { - 100 // 50 seconds for other tests - } - } else { - 100 // 50 seconds for other tests - }; + let max_attempts = 400; // 200 seconds total, 0.5s per attempt - for _ in 0..max_attempts { + for i in 0..max_attempts { let (response, status_code) = self.service.get(&url).await; assert_eq!(200, status_code, "response: {}", response); @@ -420,6 +411,10 @@ impl Server { // wait 0.5 second. sleep(Duration::from_millis(500)).await; + + if i == max_attempts - 1 { + dbg!(response); + } } panic!("Timeout waiting for update id"); } diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 6569bb9a5..1cf492fc0 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -1,12 +1,12 @@ +use crate::common::encoder::Encoder; +use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value}; +use crate::json; use actix_web::test; use meili_snap::{json_string, snapshot}; use meilisearch::Opt; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; - -use crate::common::encoder::Encoder; -use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value}; -use crate::json; +use uuid::Uuid; /// This is the basic usage of our API and every other tests uses the content-type application/json #[actix_rt::test] @@ -18,13 +18,14 @@ async fn add_documents_test_json_content_types() { } ]); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -33,20 +34,20 @@ async fn add_documents_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, - "indexUid": "dog", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -55,11 +56,11 @@ async fn add_documents_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, - "indexUid": "dog", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -75,13 +76,14 @@ async fn add_single_document_test_json_content_types() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -90,20 +92,20 @@ async fn add_single_document_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, - "indexUid": "dog", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -112,11 +114,11 @@ async fn add_single_document_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, - "indexUid": "dog", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -132,14 +134,15 @@ async fn add_single_document_gzip_encoded() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); let encoder = Encoder::Gzip; let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document.clone())) .insert_header(("content-type", "application/json")) .insert_header(encoder.header().unwrap()) @@ -149,20 +152,20 @@ async fn add_single_document_gzip_encoded() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, - "indexUid": "dog", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document)) .insert_header(("content-type", "application/json")) .insert_header(encoder.header().unwrap()) @@ -172,11 +175,11 @@ async fn add_single_document_gzip_encoded() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, - "indexUid": "dog", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -187,13 +190,14 @@ async fn add_single_document_gzip_encoded() { async fn add_single_document_gzip_encoded_with_incomplete_error() { let document = json!("kefir"); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .insert_header(("content-encoding", "gzip")) @@ -215,7 +219,7 @@ async fn add_single_document_gzip_encoded_with_incomplete_error() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .insert_header(("content-encoding", "gzip")) @@ -244,15 +248,16 @@ async fn add_single_document_with_every_encoding() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); - for (task_uid, encoder) in Encoder::iterator().enumerate() { + for encoder in Encoder::iterator() { let mut req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document.clone())) .insert_header(("content-type", "application/json")); req = match encoder.header() { @@ -263,16 +268,15 @@ async fn add_single_document_with_every_encoding() { let res = test::call_service(&app, req).await; let status_code = res.status(); let body = test::read_body(res).await; - let response: Value = serde_json::from_slice(&body).unwrap_or_default(); + let _response: Value = serde_json::from_slice(&body).unwrap_or_default(); assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], task_uid); } } #[actix_rt::test] async fn add_csv_document() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id,name,race 0,jean,bernese mountain @@ -280,21 +284,21 @@ async fn add_csv_document() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, - "indexUid": "pets", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + "#); + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "pets", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -335,8 +339,8 @@ async fn add_csv_document() { #[actix_rt::test] async fn add_csv_document_with_types() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id:number,name:string,race:string,age:number,cute:boolean 0,jean,bernese mountain,2.5,true @@ -345,21 +349,21 @@ async fn add_csv_document_with_types() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, - "indexUid": "pets", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + "#); + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "pets", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -411,8 +415,8 @@ async fn add_csv_document_with_types() { #[actix_rt::test] async fn add_csv_document_with_custom_delimiter() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id|name|race 0|jean|bernese mountain @@ -421,21 +425,21 @@ async fn add_csv_document_with_custom_delimiter() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "?csvDelimiter=|").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, - "indexUid": "pets", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + "#); + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "pets", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -476,8 +480,8 @@ async fn add_csv_document_with_custom_delimiter() { #[actix_rt::test] async fn add_csv_document_with_types_error() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id:number,a:boolean,b:number 0,doggo,1"; @@ -518,12 +522,13 @@ async fn error_add_documents_test_bad_content_types() { } ]); - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/plain")) .to_request(); @@ -544,7 +549,7 @@ async fn error_add_documents_test_bad_content_types() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/plain")) .to_request(); @@ -574,12 +579,13 @@ async fn error_add_documents_test_no_content_type() { } ]); - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .to_request(); let res = test::call_service(&app, req).await; @@ -599,7 +605,7 @@ async fn error_add_documents_test_no_content_type() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .to_request(); let res = test::call_service(&app, req).await; @@ -622,12 +628,13 @@ async fn error_add_documents_test_no_content_type() { async fn error_add_malformed_csv_documents() { let document = "id, content\n1234, hello, world\n12, hello world"; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -648,7 +655,7 @@ async fn error_add_malformed_csv_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -672,12 +679,13 @@ async fn error_add_malformed_csv_documents() { async fn error_add_malformed_json_documents() { let document = r#"[{"id": 1}, {id: 2}]"#; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -698,7 +706,7 @@ async fn error_add_malformed_json_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -724,7 +732,7 @@ async fn error_add_malformed_json_documents() { let document = format!("\"{}\"", long); let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document) .insert_header(("content-type", "application/json")) .to_request(); @@ -745,7 +753,7 @@ async fn error_add_malformed_json_documents() { // add one more char to the long string to test if the truncating works. let document = format!("\"{}m\"", long); let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document) .insert_header(("content-type", "application/json")) .to_request(); @@ -768,12 +776,13 @@ async fn error_add_malformed_json_documents() { async fn error_add_malformed_ndjson_documents() { let document = "{\"id\": 1}\n{id: 2}"; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -794,7 +803,7 @@ async fn error_add_malformed_ndjson_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -818,12 +827,13 @@ async fn error_add_malformed_ndjson_documents() { async fn error_add_missing_payload_csv_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -844,7 +854,7 @@ async fn error_add_missing_payload_csv_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -868,12 +878,13 @@ async fn error_add_missing_payload_csv_documents() { async fn error_add_missing_payload_json_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -894,7 +905,7 @@ async fn error_add_missing_payload_json_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -918,12 +929,13 @@ async fn error_add_missing_payload_json_documents() { async fn error_add_missing_payload_ndjson_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -944,7 +956,7 @@ async fn error_add_missing_payload_ndjson_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -966,8 +978,8 @@ async fn error_add_missing_payload_ndjson_documents() { #[actix_rt::test] async fn add_documents_no_index_creation() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -978,18 +990,15 @@ async fn add_documents_no_index_creation() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); - assert_eq!(response["taskUid"], 0); - index.wait_task(response.uid()).await.succeeded(); - - let (response, code) = index.get_task(0).await; - snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(code, @"202 Accepted"); + snapshot!(response, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1037,8 +1046,8 @@ async fn error_document_add_create_index_bad_uid() { #[actix_rt::test] async fn document_addition_with_primary_key() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1048,27 +1057,27 @@ async fn document_addition_with_primary_key() { ]); let (response, code) = index.add_documents(documents, Some("primary")).await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.get_task(response.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1086,10 +1095,10 @@ async fn document_addition_with_primary_key() { let (response, code) = index.get().await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]" }), + snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]", ".uid" => "[uuid]" }), @r###" { - "uid": "test", + "uid": "[uuid]", "createdAt": "[date]", "updatedAt": "[date]", "primaryKey": "primary" @@ -1099,8 +1108,8 @@ async fn document_addition_with_primary_key() { #[actix_rt::test] async fn document_addition_with_huge_int_primary_key() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1111,13 +1120,13 @@ async fn document_addition_with_huge_int_primary_key() { let (response, code) = index.add_documents(documents, Some("primary")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(response, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1146,8 +1155,8 @@ async fn document_addition_with_huge_int_primary_key() { #[actix_rt::test] async fn replace_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1158,16 +1167,16 @@ async fn replace_document() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code,@"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); index.wait_task(response.uid()).await.succeeded(); @@ -1185,12 +1194,12 @@ async fn replace_document() { let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1219,17 +1228,17 @@ async fn replace_document() { #[actix_rt::test] async fn add_no_documents() { - let server = Server::new().await; - let index = server.index("kefir"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.add_documents(json!([]), None).await; snapshot!(code, @"202 Accepted"); - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1246,13 +1255,13 @@ async fn add_no_documents() { "#); let (task, _code) = index.add_documents(json!([]), Some("kefkef")).await; - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1269,13 +1278,13 @@ async fn add_no_documents() { "#); let (task, _code) = index.add_documents(json!([{ "kefkef": 1 }]), None).await; - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1307,8 +1316,8 @@ async fn add_no_documents() { #[actix_rt::test] async fn add_larger_dataset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let update_id = index.load_test_set().await; let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); @@ -1319,12 +1328,11 @@ async fn add_larger_dataset() { let (response, code) = index .get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() }) .await; - assert_eq!(code, 200, "failed with `{}`", response); + assert_eq!(code, 200, "failed with `{response}`"); assert_eq!(response["results"].as_array().unwrap().len(), 77); // x-ndjson add large test - let server = Server::new().await; - let index = server.index("test"); + let index = server.unique_index(); let update_id = index.load_test_set_ndjson().await; let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); @@ -1341,8 +1349,8 @@ async fn add_larger_dataset() { #[actix_rt::test] async fn error_add_documents_bad_document_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("docid")).await; // unsupported characters @@ -1357,12 +1365,12 @@ async fn error_add_documents_bad_document_id() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1381,7 +1389,7 @@ async fn error_add_documents_bad_document_id() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); // More than 512 bytes let documents = json!([ @@ -1394,12 +1402,12 @@ async fn error_add_documents_bad_document_id() { index.wait_task(value.uid()).await.failed(); let (response, code) = index.get_task(value.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1431,12 +1439,12 @@ async fn error_add_documents_bad_document_id() { index.wait_task(value.uid()).await.failed(); let (response, code) = index.get_task(value.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1460,8 +1468,8 @@ async fn error_add_documents_bad_document_id() { #[actix_rt::test] async fn error_add_documents_missing_document_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("docid")).await; let documents = json!([ { @@ -1473,12 +1481,12 @@ async fn error_add_documents_missing_document_id() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1497,13 +1505,13 @@ async fn error_add_documents_missing_document_id() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); } #[actix_rt::test] async fn error_document_field_limit_reached_in_one_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1527,7 +1535,7 @@ async fn error_document_field_limit_reached_in_one_document() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1551,8 +1559,8 @@ async fn error_document_field_limit_reached_in_one_document() { #[actix_rt::test] async fn error_document_field_limit_reached_over_multiple_documents() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1568,14 +1576,14 @@ async fn error_document_field_limit_reached_over_multiple_documents() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1603,14 +1611,14 @@ async fn error_document_field_limit_reached_over_multiple_documents() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.failed(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1634,8 +1642,8 @@ async fn error_document_field_limit_reached_over_multiple_documents() { #[actix_rt::test] async fn error_document_field_limit_reached_in_one_nested_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1652,7 +1660,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); // Documents without a primary key are not accepted. snapshot!(response, @@ -1660,7 +1668,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1679,8 +1687,8 @@ async fn error_document_field_limit_reached_in_one_nested_document() { #[actix_rt::test] async fn error_document_field_limit_reached_over_multiple_documents_with_nested_fields() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1697,14 +1705,14 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1733,14 +1741,14 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1759,8 +1767,8 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ #[actix_rt::test] async fn add_documents_with_geo_field() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; let documents = json!([ @@ -1782,13 +1790,13 @@ async fn add_documents_with_geo_field() { ]); let (task, _status_code) = index.add_documents(documents, None).await; - let response = index.wait_task(task.uid()).await; - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + let response = index.wait_task(task.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1802,7 +1810,7 @@ async fn add_documents_with_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; @@ -1883,8 +1891,8 @@ async fn add_documents_with_geo_field() { #[actix_rt::test] async fn update_documents_with_geo_field() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; let documents = json!([ @@ -1906,13 +1914,13 @@ async fn update_documents_with_geo_field() { ]); let (task, _status_code) = index.add_documents(documents, None).await; - let response = index.wait_task(task.uid()).await; - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + let response = index.wait_task(task.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1926,7 +1934,7 @@ async fn update_documents_with_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await; snapshot!(code, @"200 OK"); @@ -1975,13 +1983,13 @@ async fn update_documents_with_geo_field() { } ]); let (task, _status_code) = index.update_documents(updated_documents, None).await; - let response = index.wait_task(task.uid()).await; - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + let response = index.wait_task(task.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2075,8 +2083,8 @@ async fn update_documents_with_geo_field() { #[actix_rt::test] async fn add_documents_invalid_geo_field() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; @@ -2092,12 +2100,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }), + @r#" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2106,7 +2114,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: The `_geo` field in the document with the id: `\"11\"` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", + "message": "Index `[uuid]`: The `_geo` field in the document with the id: `\"11\"` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2116,7 +2124,7 @@ async fn add_documents_invalid_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); // _geo is an object but is missing both the lat and lng let documents = json!([ @@ -2130,12 +2138,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2144,7 +2152,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", + "message": "Index `[uuid]`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2168,12 +2176,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 4, - "batchUid": 4, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2182,7 +2190,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", + "message": "Index `[uuid]`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2206,12 +2214,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 5, - "batchUid": 5, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2220,7 +2228,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2244,12 +2252,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 6, - "batchUid": 6, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2258,7 +2266,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2282,12 +2290,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 7, - "batchUid": 7, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2296,7 +2304,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2320,12 +2328,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 8, - "batchUid": 8, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2334,7 +2342,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2358,12 +2366,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 9, - "batchUid": 9, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2372,7 +2380,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `false` and `true`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `false` and `true`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2396,12 +2404,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 10, - "batchUid": 10, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2410,7 +2418,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2434,12 +2442,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 11, - "batchUid": 11, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2448,7 +2456,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2472,12 +2480,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 12, - "batchUid": 12, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2486,7 +2494,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2510,12 +2518,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 13, - "batchUid": 13, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2524,7 +2532,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: The `_geo` field in the document with the id: `\"11\"` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", + "message": "Index `[uuid]`: The `_geo` field in the document with the id: `\"11\"` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2549,12 +2557,12 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 14, - "batchUid": 14, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2563,7 +2571,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse longitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse longitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2586,12 +2594,12 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 15, - "batchUid": 15, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2600,7 +2608,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse latitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2623,12 +2631,12 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 16, - "batchUid": 16, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2637,7 +2645,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"13\"`. Was expecting finite numbers but instead got `null` and `null`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"13\"`. Was expecting finite numbers but instead got `null` and `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2653,8 +2661,8 @@ async fn add_documents_invalid_geo_field() { // Related to #4333 #[actix_rt::test] async fn add_invalid_geo_and_then_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; // _geo is not a correct object @@ -2671,7 +2679,7 @@ async fn add_invalid_geo_and_then_settings() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2694,7 +2702,7 @@ async fn add_invalid_geo_and_then_settings() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "settingsUpdate", "canceledBy": null, @@ -2704,7 +2712,7 @@ async fn add_invalid_geo_and_then_settings() { ] }, "error": { - "message": "Index `test`: Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2719,8 +2727,8 @@ async fn add_invalid_geo_and_then_settings() { #[actix_rt::test] async fn error_add_documents_payload_size() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; let document = json!( { @@ -2746,8 +2754,8 @@ async fn error_add_documents_payload_size() { #[actix_rt::test] async fn error_primary_key_inference() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -2761,12 +2769,12 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2785,7 +2793,7 @@ async fn error_primary_key_inference() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let documents = json!([ { @@ -2802,12 +2810,12 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2841,12 +2849,12 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2865,8 +2873,8 @@ async fn error_primary_key_inference() { #[actix_rt::test] async fn add_documents_with_primary_key_twice() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -2888,8 +2896,8 @@ async fn add_documents_with_primary_key_twice() { #[actix_rt::test] async fn batch_several_documents_addition() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let mut documents: Vec<_> = (0..150usize) .map(|id| { @@ -2912,8 +2920,10 @@ async fn batch_several_documents_addition() { } // wait first batch of documents to finish - futures::future::join_all(waiter).await; - index.wait_task(4).await; + let finished_tasks = futures::future::join_all(waiter).await; + for (task, _code) in finished_tasks { + index.wait_task(task.uid()).await; + } // run a second completely failing batch documents[40] = json!({"title": "error", "desc": "error"}); @@ -2924,8 +2934,10 @@ async fn batch_several_documents_addition() { waiter.push(index.add_documents(json!(chunk), Some("id"))); } // wait second batch of documents to finish - futures::future::join_all(waiter).await; - index.wait_task(9).await; + let finished_tasks = futures::future::join_all(waiter).await; + for (task, _code) in finished_tasks { + index.wait_task(task.uid()).await; + } let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await; diff --git a/crates/meilisearch/tests/documents/delete_documents.rs b/crates/meilisearch/tests/documents/delete_documents.rs index 060f17958..5ea122bd0 100644 --- a/crates/meilisearch/tests/documents/delete_documents.rs +++ b/crates/meilisearch/tests/documents/delete_documents.rs @@ -1,39 +1,35 @@ use meili_snap::{json_string, snapshot}; -use crate::common::{GetAllDocumentsOptions, Server}; +use crate::common::{shared_does_not_exists_index, GetAllDocumentsOptions, Server}; use crate::json; #[actix_rt::test] async fn delete_one_document_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); - let (task, code) = index.delete_document(0).await; + let index = shared_does_not_exists_index().await; + let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await; assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "failed"); + index.wait_task(task.uid()).await.failed(); } #[actix_rt::test] async fn delete_one_unexisting_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(None).await; let (response, code) = index.delete_document(0).await; - assert_eq!(code, 202, "{}", response); - let update = index.wait_task(response.uid()).await; - assert_eq!(update["status"], "succeeded"); + assert_eq!(code, 202, "{response}"); + index.wait_task(response.uid()).await.succeeded(); } #[actix_rt::test] async fn delete_one_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await; index.wait_task(task.uid()).await.succeeded(); - let (task, status_code) = server.index("test").delete_document(0).await; + let (task, status_code) = index.delete_document(0).await; assert_eq!(status_code, 202); index.wait_task(task.uid()).await.succeeded(); @@ -43,20 +39,18 @@ async fn delete_one_document() { #[actix_rt::test] async fn clear_all_documents_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.clear_all_documents().await; assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "failed"); + index.wait_task(task.uid()).await.failed(); } #[actix_rt::test] async fn clear_all_documents() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index .add_documents( json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]), @@ -67,7 +61,7 @@ async fn clear_all_documents() { let (task, code) = index.clear_all_documents().await; assert_eq!(code, 202); - let _update = index.wait_task(task.uid()).await; + let _update = index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; assert_eq!(code, 200); assert!(response["results"].as_array().unwrap().is_empty()); @@ -75,14 +69,14 @@ async fn clear_all_documents() { #[actix_rt::test] async fn clear_all_documents_empty_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.clear_all_documents().await; assert_eq!(code, 202); - let _update = index.wait_task(task.uid()).await; + let _update = index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; assert_eq!(code, 200); assert!(response["results"].as_array().unwrap().is_empty()); @@ -90,33 +84,31 @@ async fn clear_all_documents_empty_index() { #[actix_rt::test] async fn error_delete_batch_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.delete_batch(vec![]).await; let expected_response = json!({ - "message": "Index `test` not found.", + "message": format!("Index `{}` not found.", index.uid), "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" }); assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "failed"); + let response = index.wait_task(task.uid()).await.failed(); assert_eq!(response["error"], expected_response); } #[actix_rt::test] async fn delete_batch() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await; index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.delete_batch(vec![1, 0]).await; assert_eq!(code, 202); - let _update = index.wait_task(task.uid()).await; + let _update = index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 1); @@ -125,14 +117,14 @@ async fn delete_batch() { #[actix_rt::test] async fn delete_no_document_batch() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await; index.wait_task(task.uid()).await.succeeded(); - let (_response, code) = index.delete_batch(vec![]).await; - assert_eq!(code, 202, "{}", _response); + let (response, code) = index.delete_batch(vec![]).await; + assert_eq!(code, 202, "{response}"); - let _update = index.wait_task(_response.uid()).await; + let _update = index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 3); @@ -140,8 +132,8 @@ async fn delete_no_document_batch() { #[actix_rt::test] async fn delete_document_by_filter() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings_filterable_attributes(json!(["color"])).await; let (task, _status_code) = index .add_documents( @@ -178,22 +170,22 @@ async fn delete_document_by_filter() { let (response, code) = index.delete_document_by_filter(json!({ "filter": "color = blue"})).await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 2, - "indexUid": "doggo", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentDeletion", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response.uid()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -251,22 +243,22 @@ async fn delete_document_by_filter() { let (response, code) = index.delete_document_by_filter(json!({ "filter": "color NOT EXISTS"})).await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "taskUid": 3, - "indexUid": "doggo", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentDeletion", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response.uid()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -321,8 +313,8 @@ async fn delete_document_by_filter() { #[actix_rt::test] async fn delete_document_by_complex_filter() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings_filterable_attributes(json!(["color"])).await; let (task, _status_code) = index .add_documents( @@ -343,22 +335,22 @@ async fn delete_document_by_complex_filter() { ) .await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 2, - "indexUid": "doggo", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentDeletion", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response.uid()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -402,22 +394,22 @@ async fn delete_document_by_complex_filter() { .delete_document_by_filter(json!({ "filter": [["color = green", "color NOT EXISTS"]] })) .await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "taskUid": 3, - "indexUid": "doggo", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentDeletion", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response.uid()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, diff --git a/crates/meilisearch/tests/documents/get_documents.rs b/crates/meilisearch/tests/documents/get_documents.rs index f87a18b9f..4f82faf99 100644 --- a/crates/meilisearch/tests/documents/get_documents.rs +++ b/crates/meilisearch/tests/documents/get_documents.rs @@ -832,8 +832,8 @@ async fn get_document_by_ids_and_filter() { #[actix_rt::test] async fn get_document_with_vectors() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ diff --git a/crates/meilisearch/tests/index/delete_index.rs b/crates/meilisearch/tests/index/delete_index.rs index 03185d21a..713891420 100644 --- a/crates/meilisearch/tests/index/delete_index.rs +++ b/crates/meilisearch/tests/index/delete_index.rs @@ -28,6 +28,7 @@ async fn error_delete_unexisting_index() { let (task, code) = index.delete_index_fail().await; assert_eq!(code, 202); + index.wait_task(task.uid()).await.failed(); let expected_response = json!({ "message": "Index `DOES_NOT_EXISTS` not found.", @@ -57,7 +58,7 @@ async fn loop_delete_add_documents() { } for task in tasks { - let response = index.wait_task(task).await; + let response = index.wait_task(task).await.succeeded(); assert_eq!(response["status"], "succeeded", "{}", response); } } diff --git a/crates/meilisearch/tests/index/get_index.rs b/crates/meilisearch/tests/index/get_index.rs index b07b7821b..11b1817f0 100644 --- a/crates/meilisearch/tests/index/get_index.rs +++ b/crates/meilisearch/tests/index/get_index.rs @@ -52,19 +52,28 @@ async fn no_index_return_empty_list() { #[actix_rt::test] async fn list_multiple_indexes() { - let server = Server::new().await; - server.index("test").create(None).await; - let (task, _status_code) = server.index("test1").create(Some("key")).await; + let server = Server::new_shared(); - server.index("test").wait_task(task.uid()).await.succeeded(); + let index_without_key = server.unique_index(); + let (response_without_key, _status_code) = index_without_key.create(None).await; - let (response, code) = server.list_indexes(None, None).await; + let index_with_key = server.unique_index(); + let (response_with_key, _status_code) = index_with_key.create(Some("key")).await; + + index_without_key.wait_task(response_without_key.uid()).await.succeeded(); + index_with_key.wait_task(response_with_key.uid()).await.succeeded(); + + let (response, code) = server.list_indexes(None, Some(1000)).await; assert_eq!(code, 200); assert!(response["results"].is_array()); let arr = response["results"].as_array().unwrap(); - assert_eq!(arr.len(), 2); - assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null)); - assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key")); + assert!(arr.len() >= 2, "Expected at least 2 indexes."); + assert!(arr + .iter() + .any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null)); + assert!(arr + .iter() + .any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key")); } #[actix_rt::test] diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index 291cb0ce0..90c77cec8 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -1,10 +1,11 @@ -use crate::common::Server; +use crate::common::{shared_does_not_exists_index, Server}; + use crate::json; #[actix_rt::test] async fn stats() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.create(Some("id")).await; assert_eq!(code, 202); @@ -15,7 +16,7 @@ async fn stats() { assert_eq!(code, 200); assert_eq!(response["numberOfDocuments"], 0); - assert!(response["isIndexing"] == false); + assert_eq!(response["isIndexing"], false); assert!(response["fieldDistribution"].as_object().unwrap().is_empty()); let documents = json!([ @@ -31,7 +32,6 @@ async fn stats() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - assert_eq!(response["taskUid"], 1); index.wait_task(response.uid()).await.succeeded(); @@ -39,7 +39,7 @@ async fn stats() { assert_eq!(code, 200); assert_eq!(response["numberOfDocuments"], 2); - assert!(response["isIndexing"] == false); + assert_eq!(response["isIndexing"], false); assert_eq!(response["fieldDistribution"]["id"], 2); assert_eq!(response["fieldDistribution"]["name"], 1); assert_eq!(response["fieldDistribution"]["age"], 1); @@ -47,11 +47,11 @@ async fn stats() { #[actix_rt::test] async fn error_get_stats_unexisting_index() { - let server = Server::new().await; - let (response, code) = server.index("test").stats().await; + let index = shared_does_not_exists_index().await; + let (response, code) = index.stats().await; let expected_response = json!({ - "message": "Index `test` not found.", + "message": format!("Index `{}` not found.", index.uid), "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index a9b02e7d4..291700728 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -2,28 +2,26 @@ use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; use crate::common::encoder::Encoder; -use crate::common::Server; +use crate::common::{shared_does_not_exists_index, shared_index_with_documents, Server}; use crate::json; #[actix_rt::test] async fn update_primary_key() { - let server = Server::new().await; - let index = server.index("test"); - let (_, code) = index.create(None).await; + let server = Server::new_shared(); + let index = server.unique_index(); + let (task, code) = index.create(None).await; assert_eq!(code, 202); + index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = index.update(Some("primary")).await; - - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "succeeded"); + index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.get().await; assert_eq!(code, 200); - assert_eq!(response["uid"], "test"); + assert_eq!(response["uid"], index.uid); assert!(response.get("createdAt").is_some()); assert!(response.get("updatedAt").is_some()); @@ -39,24 +37,23 @@ async fn update_primary_key() { #[actix_rt::test] async fn create_and_update_with_different_encoding() { - let server = Server::new().await; - let index = server.index_with_encoder("test", Encoder::Gzip); - let (_, code) = index.create(None).await; + let server = Server::new_shared(); + let index = server.unique_index_with_encoder(Encoder::Gzip); + let (create_task, code) = index.create(None).await; assert_eq!(code, 202); + index.wait_task(create_task.uid()).await.succeeded(); - let index = server.index_with_encoder("test", Encoder::Brotli); + let index = index.with_encoder(Encoder::Brotli); let (task, _status_code) = index.update(Some("primary")).await; - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "succeeded"); + index.wait_task(task.uid()).await.succeeded(); } #[actix_rt::test] async fn update_nothing() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task1, code) = index.create(None).await; assert_eq!(code, 202); @@ -67,35 +64,20 @@ async fn update_nothing() { assert_eq!(code, 202); - let response = index.wait_task(task2.uid()).await; - - assert_eq!(response["status"], "succeeded"); + index.wait_task(task2.uid()).await.succeeded(); } #[actix_rt::test] async fn error_update_existing_primary_key() { - let server = Server::new().await; - let index = server.index("test"); - let (_response, code) = index.create(Some("id")).await; + let index = shared_index_with_documents().await; + + let (update_task, code) = index.update_index_fail(Some("primary")).await; assert_eq!(code, 202); - - let documents = json!([ - { - "id": "11", - "content": "foobar" - } - ]); - index.add_documents(documents, None).await; - - let (task, code) = index.update(Some("primary")).await; - - assert_eq!(code, 202); - - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(update_task.uid()).await.failed(); let expected_response = json!({ - "message": "Index `test`: Index already has a primary key: `id`.", + "message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid), "code": "index_primary_key_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists" @@ -106,15 +88,15 @@ async fn error_update_existing_primary_key() { #[actix_rt::test] async fn error_update_unexisting_index() { - let server = Server::new().await; - let (task, code) = server.index("test").update(None).await; + let index = shared_does_not_exists_index().await; + let (task, code) = index.update_index_fail(Some("my-primary-key")).await; assert_eq!(code, 202); - let response = server.index("test").wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.failed(); let expected_response = json!({ - "message": "Index `test` not found.", + "message": format!("Index `{}` not found.", index.uid), "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" diff --git a/crates/meilisearch/tests/search/distinct.rs b/crates/meilisearch/tests/search/distinct.rs index 094ef7bbf..bdc5875e0 100644 --- a/crates/meilisearch/tests/search/distinct.rs +++ b/crates/meilisearch/tests/search/distinct.rs @@ -146,8 +146,8 @@ static DOCUMENT_DISTINCT_KEY: &str = "product_id"; /// testing: https://github.com/meilisearch/meilisearch/issues/4078 #[actix_rt::test] async fn distinct_search_with_offset_no_ranking() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await; @@ -163,50 +163,50 @@ async fn distinct_search_with_offset_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#); + snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#); snapshot!(response["estimatedTotalHits"] , @"11"); let (response, code) = index.search_post(json!({"offset": 2, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#); + snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#); snapshot!(response["estimatedTotalHits"], @"10"); let (response, code) = index.search_post(json!({"offset": 4, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#); + snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#); snapshot!(response["estimatedTotalHits"], @"6"); let (response, code) = index.search_post(json!({"offset": 5, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"1"); - snapshot!(format!("{:?}", hits), @r#"["345678"]"#); + snapshot!(format!("{hits:?}"), @r#"["345678"]"#); snapshot!(response["estimatedTotalHits"], @"6"); let (response, code) = index.search_post(json!({"offset": 6, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"0"); - snapshot!(format!("{:?}", hits), @r#"[]"#); + snapshot!(format!("{hits:?}"), @r#"[]"#); snapshot!(response["estimatedTotalHits"], @"6"); let (response, code) = index.search_post(json!({"offset": 7, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"0"); - snapshot!(format!("{:?}", hits), @r#"[]"#); + snapshot!(format!("{hits:?}"), @r#"[]"#); snapshot!(response["estimatedTotalHits"], @"6"); } /// testing: https://github.com/meilisearch/meilisearch/issues/4130 #[actix_rt::test] async fn distinct_search_with_pagination_no_ranking() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await; @@ -222,7 +222,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"0"); - snapshot!(format!("{:?}", hits), @r#"[]"#); + snapshot!(format!("{hits:?}"), @r#"[]"#); snapshot!(response["page"], @"0"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -231,7 +231,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#); + snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#); snapshot!(response["page"], @"1"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -240,7 +240,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#); + snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#); snapshot!(response["page"], @"2"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -249,7 +249,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#); + snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#); snapshot!(response["page"], @"3"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -258,7 +258,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"0"); - snapshot!(format!("{:?}", hits), @r#"[]"#); + snapshot!(format!("{hits:?}"), @r#"[]"#); snapshot!(response["page"], @"4"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -267,7 +267,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"3"); - snapshot!(format!("{:?}", hits), @r#"["987654", "234567", "345678"]"#); + snapshot!(format!("{hits:?}"), @r#"["987654", "234567", "345678"]"#); snapshot!(response["page"], @"2"); snapshot!(response["totalPages"], @"2"); snapshot!(response["totalHits"], @"6"); @@ -275,13 +275,13 @@ async fn distinct_search_with_pagination_no_ranking() { #[actix_rt::test] async fn distinct_at_search_time() { - let server = Server::new().await; - let index = server.index("tamo"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = NESTED_DOCUMENTS.clone(); index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await; let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await; - let task = index.wait_task(task.uid()).await; + let task = index.wait_task(task.uid()).await.succeeded(); snapshot!(task, name: "succeed"); fn get_hits(response: &Value) -> Vec { @@ -299,7 +299,7 @@ async fn distinct_at_search_time() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"3"); - snapshot!(format!("{:?}", hits), @r###"["1", "2", "3"]"###); + snapshot!(format!("{hits:?}"), @r###"["1", "2", "3"]"###); snapshot!(response["page"], @"1"); snapshot!(response["totalPages"], @"1"); snapshot!(response["totalHits"], @"3"); diff --git a/crates/meilisearch/tests/search/errors.rs b/crates/meilisearch/tests/search/errors.rs index 7490544bf..eca4a8cfb 100644 --- a/crates/meilisearch/tests/search/errors.rs +++ b/crates/meilisearch/tests/search/errors.rs @@ -707,7 +707,7 @@ async fn filter_invalid_attribute_array() { |response, code| { snapshot!(response, @r###" { - "message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass", + "message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -728,7 +728,7 @@ async fn filter_invalid_attribute_string() { |response, code| { snapshot!(response, @r###" { - "message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass", + "message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -885,7 +885,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -911,7 +911,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -932,7 +932,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -958,7 +958,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -984,7 +984,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -1143,7 +1143,7 @@ async fn search_on_unknown_field() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" @@ -1164,7 +1164,7 @@ async fn search_on_unknown_field_plus_joker() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" @@ -1182,7 +1182,7 @@ async fn search_on_unknown_field_plus_joker() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" @@ -1195,10 +1195,8 @@ async fn search_on_unknown_field_plus_joker() { #[actix_rt::test] async fn distinct_at_search_time() { - let server = Server::new().await; - let index = server.index("test"); - let (task, _) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, _code) = index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await; index.wait_task(response.uid()).await.succeeded(); @@ -1208,7 +1206,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.", + "message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" @@ -1223,7 +1221,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.", + "message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" @@ -1238,7 +1236,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.", + "message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" diff --git a/crates/meilisearch/tests/search/facet_search.rs b/crates/meilisearch/tests/search/facet_search.rs index 65e204702..57d2cfcd2 100644 --- a/crates/meilisearch/tests/search/facet_search.rs +++ b/crates/meilisearch/tests/search/facet_search.rs @@ -50,13 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = index.add_documents(documents.clone(), None).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.update_settings(settings.clone()).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.facet_search(query.clone()).await; insta::allow_duplicates! { @@ -65,21 +63,18 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = server.delete_index("test").await; assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + server.wait_task(task.uid()).await.succeeded(); eprintln!("Settings -> Documents -> test"); let index = server.index("test"); let (task, code) = index.update_settings(settings.clone()).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.add_documents(documents.clone(), None).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.facet_search(query.clone()).await; insta::allow_duplicates! { @@ -88,14 +83,13 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = server.delete_index("test").await; assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + server.wait_task(task.uid()).await.succeeded(); } #[actix_rt::test] async fn simple_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["genres"])).await; @@ -105,20 +99,20 @@ async fn simple_facet_search() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 2); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "adventure"})).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["facetHits"].as_array().unwrap().len(), 1); } #[actix_rt::test] async fn simple_facet_search_on_movies() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -212,23 +206,23 @@ async fn simple_facet_search_on_movies() { ]); let (response, code) = index.update_settings_filterable_attributes(json!(["genres", "color"])).await; - assert_eq!(202, code, "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!(202, code, "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(response["facetHits"], @r###"[{"value":"Action","count":2},{"value":"Adventure","count":3},{"value":"Drama","count":3},{"value":"Fantasy","count":1},{"value":"Romance","count":1},{"value":"Science Fiction","count":1}]"###); } #[actix_rt::test] async fn advanced_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["genres"])).await; @@ -251,8 +245,8 @@ async fn advanced_facet_search() { #[actix_rt::test] async fn more_advanced_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["genres"])).await; @@ -275,8 +269,8 @@ async fn more_advanced_facet_search() { #[actix_rt::test] async fn simple_facet_search_with_max_values() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await; @@ -287,14 +281,14 @@ async fn simple_facet_search_with_max_values() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 1); } #[actix_rt::test] async fn simple_facet_search_by_count_with_max_values() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index @@ -309,14 +303,14 @@ async fn simple_facet_search_by_count_with_max_values() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 1); } #[actix_rt::test] async fn non_filterable_facet_search_error() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -324,17 +318,17 @@ async fn non_filterable_facet_search_error() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "adv"})).await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); } #[actix_rt::test] async fn facet_search_dont_support_words() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["genres"])).await; @@ -344,14 +338,14 @@ async fn facet_search_dont_support_words() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["facetHits"].as_array().unwrap().len(), 0); } #[actix_rt::test] async fn simple_facet_search_with_sort_by_count() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await; @@ -362,7 +356,7 @@ async fn simple_facet_search_with_sort_by_count() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["facetHits"].as_array().unwrap(); assert_eq!(hits.len(), 2); assert_eq!(hits[0], json!({ "value": "Action", "count": 3 })); @@ -371,25 +365,25 @@ async fn simple_facet_search_with_sort_by_count() { #[actix_rt::test] async fn add_documents_and_deactivate_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index .update_settings(json!({ "facetSearch": false, "filterableAttributes": ["genres"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); snapshot!(response, @r###" { "message": "The facet search is disabled for this index", @@ -402,8 +396,8 @@ async fn add_documents_and_deactivate_facet_search() { #[actix_rt::test] async fn deactivate_facet_search_and_add_documents() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -411,16 +405,16 @@ async fn deactivate_facet_search_and_add_documents() { "filterableAttributes": ["genres"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); snapshot!(response, @r###" { "message": "The facet search is disabled for this index", @@ -433,8 +427,8 @@ async fn deactivate_facet_search_and_add_documents() { #[actix_rt::test] async fn deactivate_facet_search_add_documents_and_activate_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -442,31 +436,31 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() { "filterableAttributes": ["genres"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index .update_settings(json!({ "facetSearch": true, })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn deactivate_facet_search_add_documents_and_reset_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -474,25 +468,25 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() { "filterableAttributes": ["genres"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index .update_settings(json!({ "facetSearch": serde_json::Value::Null, })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 2); } #[actix_rt::test] @@ -618,8 +612,8 @@ async fn facet_search_with_filterable_attributes_rules_errors() { #[actix_rt::test] async fn distinct_facet_search_on_movies() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -925,26 +919,26 @@ async fn distinct_facet_search_on_movies() { ]); let (response, code) = index.update_settings_filterable_attributes(json!(["genres", "color"])).await; - assert_eq!(202, code, "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!(202, code, "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.update_settings_distinct_attribute(json!("color")).await; - assert_eq!(202, code, "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!(202, code, "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await; // non-exhaustive facet count is counting 27 documents with the facet query "blob" but there are only 23 documents with a distinct color. - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":27}]"###); let (response, code) = index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "", "exhaustiveFacetCount": true })).await; // exhaustive facet count is counting 23 documents with the facet query "blob" which is the number of distinct colors. - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":23}]"###); } diff --git a/crates/meilisearch/tests/search/filters.rs b/crates/meilisearch/tests/search/filters.rs index ec80eb63e..564cae5a5 100644 --- a/crates/meilisearch/tests/search/filters.rs +++ b/crates/meilisearch/tests/search/filters.rs @@ -4,23 +4,14 @@ use tempfile::TempDir; use super::test_settings_documents_indexing_swapping_and_search; use crate::common::{ - default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS, + default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server, + DOCUMENTS, NESTED_DOCUMENTS, }; use crate::json; #[actix_rt::test] async fn search_with_filter_string_notation() { - let server = Server::new().await; - let index = server.index("test"); - - let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await; - meili_snap::snapshot!(code, @"202 Accepted"); - - let documents = DOCUMENTS.clone(); - let (task, code) = index.add_documents(documents, None).await; - meili_snap::snapshot!(code, @"202 Accepted"); - let res = index.wait_task(task.uid()).await; - meili_snap::snapshot!(res["status"], @r###""succeeded""###); + let index = shared_index_with_documents().await; index .search( @@ -28,44 +19,34 @@ async fn search_with_filter_string_notation() { "filter": "title = Gläss" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }, ) .await; - let index = server.index("nested"); + let nested_index = shared_index_with_nested_documents().await; - let (_, code) = - index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await; - meili_snap::snapshot!(code, @"202 Accepted"); - - let documents = NESTED_DOCUMENTS.clone(); - let (task, code) = index.add_documents(documents, None).await; - meili_snap::snapshot!(code, @"202 Accepted"); - let res = index.wait_task(task.uid()).await; - meili_snap::snapshot!(res["status"], @r###""succeeded""###); - - index + nested_index .search( json!({ "filter": "cattos = pésti" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); assert_eq!(response["hits"][0]["id"], json!(852)); }, ) .await; - index + nested_index .search( json!({ "filter": "doggos.age > 5" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 2); assert_eq!(response["hits"][0]["id"], json!(654)); assert_eq!(response["hits"][1]["id"], json!(951)); @@ -82,7 +63,7 @@ async fn search_with_filter_array_notation() { "filter": ["title = Gläss"] })) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); let (response, code) = index @@ -90,7 +71,7 @@ async fn search_with_filter_array_notation() { "filter": [["title = Gläss", "title = \"Shazam!\"", "title = \"Escape Room\""]] })) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 3); } @@ -116,7 +97,7 @@ async fn search_with_contains_filter() { "filter": "title CONTAINS cap" })) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 2); } @@ -269,16 +250,14 @@ async fn search_with_pattern_filter_settings() { #[actix_rt::test] async fn search_with_pattern_filter_settings_scenario_1() { - let temp = TempDir::new().unwrap(); - let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap(); + let server = Server::new_shared(); eprintln!("Documents -> Settings -> test"); - let index = server.index("test"); + let index = server.unique_index(); let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - snapshot!(response["status"], @r###""succeeded""###); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index .update_settings(json!({"filterableAttributes": [{ @@ -289,9 +268,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { } }]})) .await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - snapshot!(response["status"], @r###""succeeded""###); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); // Check if the Equality filter works index @@ -335,7 +313,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -355,9 +333,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { } }]})) .await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - snapshot!(response["status"], @r###""succeeded""###); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); // Check if the Equality filter works index @@ -467,9 +444,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { } }]})) .await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - snapshot!(response["status"], @r###""succeeded""###); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); // Check if the Equality filter returns an error index @@ -481,7 +457,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -567,9 +543,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { } }]})) .await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - snapshot!(response["status"], @r###""succeeded""###); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); // Check if the Equality filter works index @@ -613,7 +588,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -720,7 +695,7 @@ async fn test_filterable_attributes_priority() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2", + "message": "Index `[uuid]`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -746,7 +721,7 @@ async fn test_filterable_attributes_priority() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS", + "message": "Index `[uuid]`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" diff --git a/crates/meilisearch/tests/search/geo.rs b/crates/meilisearch/tests/search/geo.rs index eda02e440..b241386ed 100644 --- a/crates/meilisearch/tests/search/geo.rs +++ b/crates/meilisearch/tests/search/geo.rs @@ -1,55 +1,13 @@ use meili_snap::{json_string, snapshot}; use meilisearch_types::milli::constants::RESERVED_GEO_FIELD_NAME; -use once_cell::sync::Lazy; use super::test_settings_documents_indexing_swapping_and_search; -use crate::common::{Server, Value}; +use crate::common::shared_index_with_geo_documents; use crate::json; -static DOCUMENTS: Lazy = Lazy::new(|| { - json!([ - { - "id": 1, - "name": "Taco Truck", - "address": "444 Salsa Street, Burritoville", - "type": "Mexican", - "rating": 9, - "_geo": { - "lat": 34.0522, - "lng": -118.2437 - } - }, - { - "id": 2, - "name": "La Bella Italia", - "address": "456 Elm Street, Townsville", - "type": "Italian", - "rating": 9, - "_geo": { - "lat": "45.4777599", - "lng": "9.1967508" - } - }, - { - "id": 3, - "name": "Crêpe Truck", - "address": "2 Billig Avenue, Rouenville", - "type": "French", - "rating": 10 - } - ]) -}); - #[actix_rt::test] async fn geo_sort_with_geo_strings() { - let server = Server::new().await; - let index = server.index("test"); - - let documents = DOCUMENTS.clone(); - index.update_settings_filterable_attributes(json!(["_geo"])).await; - index.update_settings_sortable_attributes(json!(["_geo"])).await; - let (task, _status_code) = index.add_documents(documents, None).await; - index.wait_task(task.uid()).await.succeeded(); + let index = shared_index_with_geo_documents().await; index .search( @@ -58,7 +16,7 @@ async fn geo_sort_with_geo_strings() { "sort": ["_geoPoint(0.0, 0.0):asc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); }, ) .await; @@ -66,14 +24,7 @@ async fn geo_sort_with_geo_strings() { #[actix_rt::test] async fn geo_bounding_box_with_string_and_number() { - let server = Server::new().await; - let index = server.index("test"); - - let documents = DOCUMENTS.clone(); - index.update_settings_filterable_attributes(json!(["_geo"])).await; - index.update_settings_sortable_attributes(json!(["_geo"])).await; - let (ret, _code) = index.add_documents(documents, None).await; - index.wait_task(ret.uid()).await.succeeded(); + let index = shared_index_with_geo_documents().await; index .search( @@ -81,7 +32,7 @@ async fn geo_bounding_box_with_string_and_number() { "filter": "_geoBoundingBox([89, 179], [-89, -179])", }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -123,14 +74,7 @@ async fn geo_bounding_box_with_string_and_number() { #[actix_rt::test] async fn bug_4640() { // https://github.com/meilisearch/meilisearch/issues/4640 - let server = Server::new().await; - let index = server.index("test"); - - let documents = DOCUMENTS.clone(); - index.add_documents(documents, None).await; - index.update_settings_filterable_attributes(json!(["_geo"])).await; - let (ret, _code) = index.update_settings_sortable_attributes(json!(["_geo"])).await; - index.wait_task(ret.uid()).await.succeeded(); + let index = shared_index_with_geo_documents().await; // Sort the document with the second one first index @@ -139,7 +83,7 @@ async fn bug_4640() { "sort": ["_geoPoint(45.4777599, 9.1967508):asc"], }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -202,7 +146,7 @@ async fn geo_asc_with_words() { &json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}), &json!({"q": "jean"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -247,7 +191,7 @@ async fn geo_asc_with_words() { &json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}), &json!({"q": "bob"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -284,7 +228,7 @@ async fn geo_asc_with_words() { &json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}), &json!({"q": "intel"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -324,7 +268,7 @@ async fn geo_sort_with_words() { &json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "sort"], "sortableAttributes": [RESERVED_GEO_FIELD_NAME]}), &json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ diff --git a/crates/meilisearch/tests/search/hybrid.rs b/crates/meilisearch/tests/search/hybrid.rs index 3282a357a..c6eb39a3a 100644 --- a/crates/meilisearch/tests/search/hybrid.rs +++ b/crates/meilisearch/tests/search/hybrid.rs @@ -2,31 +2,31 @@ use meili_snap::snapshot; use once_cell::sync::Lazy; use crate::common::index::Index; -use crate::common::{Server, Value}; +use crate::common::{Server, Shared, Value}; use crate::json; async fn index_with_documents_user_provided<'a>( - server: &'a Server, + server: &'a Server, documents: &Value, ) -> Index<'a> { - let index = server.index("test"); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ "embedders": {"default": { "source": "userProvided", "dimensions": 2}}} )) .await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.add_documents(documents.clone(), None).await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); index } -async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> { - let index = server.index("test"); +async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> { + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ "embedders": {"default": { @@ -36,11 +36,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> I "documentTemplate": "{{doc.title}}, {{doc.desc}}" }}} )) .await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.add_documents(documents.clone(), None).await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); index } @@ -76,6 +76,48 @@ static SINGLE_DOCUMENT_VEC: Lazy = Lazy::new(|| { }]) }); +static TEST_DISTINCT_DOCUMENTS: Lazy = Lazy::new(|| { + // for query "Captain Marvel" and vector [1.0, 1.0] + json!([ + { + "id": 0, + "search": "Captain Planet", + "desc": "#2 for keyword search, #3 for hybrid search", + "_vectors": { + "default": [-1.0, 0.0], + }, + "distinct": 0 + }, + { + "id": 1, + "search": "Captain Marvel", + "desc": "#1 for keyword search, #4 for hybrid search", + "_vectors": { + "default": [-1.0, -1.0], + }, + "distinct": 1 + }, + { + "id": 2, + "search": "Some Captain at least", + "desc": "#3 for keyword search, #1 for hybrid search", + "_vectors": { + "default": [1.0, 1.0], + }, + "distinct": 0 + }, + { + "id": 3, + "search": "Irrelevant Capitaine", + "desc": "#4 for keyword search, #2 for hybrid search", + "_vectors": { + "default": [1.0, 0.0], + }, + "distinct": 1 + }, + ]) +}); + static SIMPLE_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { json!([ { @@ -97,8 +139,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn simple_search() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post( @@ -130,8 +172,8 @@ async fn simple_search() { #[actix_rt::test] async fn limit_offset() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post( @@ -143,8 +185,8 @@ async fn limit_offset() { snapshot!(response["semanticHitCount"], @"0"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post( @@ -159,8 +201,8 @@ async fn limit_offset() { #[actix_rt::test] async fn simple_search_hf() { - let server = Server::new().await; - let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await; let (response, code) = index .search_post( @@ -211,8 +253,8 @@ async fn simple_search_hf() { #[actix_rt::test] async fn distribution_shift() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "retrieveVectors": true}); let (response, code) = index.search_post(search.clone()).await; @@ -233,7 +275,7 @@ async fn distribution_shift() { .await; snapshot!(code, @"202 Accepted"); - let response = server.wait_task(response.uid()).await; + let response = server.wait_task(response.uid()).await.succeeded(); snapshot!(response["details"], @r#"{"embedders":{"default":{"distribution":{"mean":0.998,"sigma":0.01}}}}"#); let (response, code) = index.search_post(search).await; @@ -243,8 +285,8 @@ async fn distribution_shift() { #[actix_rt::test] async fn highlighter() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0], @@ -298,8 +340,8 @@ async fn highlighter() { #[actix_rt::test] async fn invalid_semantic_ratio() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post( @@ -370,8 +412,8 @@ async fn invalid_semantic_ratio() { #[actix_rt::test] async fn single_document() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SINGLE_DOCUMENT_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SINGLE_DOCUMENT_VEC).await; let (response, code) = index .search_post( @@ -386,8 +428,8 @@ async fn single_document() { #[actix_rt::test] async fn query_combination() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; // search without query and vector, but with hybrid => still placeholder let (response, code) = index @@ -493,10 +535,54 @@ async fn query_combination() { snapshot!(response["semanticHitCount"], @"0"); } +// see +#[actix_rt::test] +async fn distinct_is_applied() { + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &TEST_DISTINCT_DOCUMENTS).await; + + let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await; + assert_eq!(202, code, "{:?}", response); + index.wait_task(response.uid()).await.succeeded(); + + // pure keyword + let (response, code) = index + .search_post( + json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.0, "embedder": "default"}}), + ) + .await; + snapshot!(code, @"200 OK"); + snapshot!(response["hits"], @r###"[{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1},{"id":0,"search":"Captain Planet","desc":"#2 for keyword search, #3 for hybrid search","distinct":0}]"###); + snapshot!(response["semanticHitCount"], @"null"); + snapshot!(response["estimatedTotalHits"], @"2"); + + // pure semantic + let (response, code) = index + .search_post( + json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "default"}}), + ) + .await; + snapshot!(code, @"200 OK"); + snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":3,"search":"Irrelevant Capitaine","desc":"#4 for keyword search, #2 for hybrid search","distinct":1}]"###); + snapshot!(response["semanticHitCount"], @"2"); + snapshot!(response["estimatedTotalHits"], @"2"); + + // hybrid + let (response, code) = index + .search_post( + json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5, "embedder": "default"}}), + ) + .await; + snapshot!(code, @"200 OK"); + snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1}]"###); + snapshot!(response["semanticHitCount"], @"1"); + snapshot!(response["estimatedTotalHits"], @"2"); +} + #[actix_rt::test] async fn retrieve_vectors() { - let server = Server::new().await; - let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await; let (response, code) = index .search_post( @@ -546,7 +632,7 @@ async fn retrieve_vectors() { let (response, code) = index .update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} )) .await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index @@ -596,7 +682,7 @@ async fn retrieve_vectors() { // remove `_vectors` from displayed attributes let (response, code) = index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index diff --git a/crates/meilisearch/tests/search/locales.rs b/crates/meilisearch/tests/search/locales.rs index 282589d6a..b1c9b2bc2 100644 --- a/crates/meilisearch/tests/search/locales.rs +++ b/crates/meilisearch/tests/search/locales.rs @@ -89,9 +89,9 @@ static DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn simple_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); index .update_settings( @@ -196,9 +196,9 @@ async fn simple_search() { #[actix_rt::test] async fn force_locales() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -211,10 +211,10 @@ async fn force_locales() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -274,9 +274,9 @@ async fn force_locales() { #[actix_rt::test] async fn force_locales_with_pattern() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -289,10 +289,10 @@ async fn force_locales_with_pattern() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -352,9 +352,9 @@ async fn force_locales_with_pattern() { #[actix_rt::test] async fn force_locales_with_pattern_nested() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = NESTED_DOCUMENTS.clone(); let (response, _) = index .update_settings(json!({ @@ -365,10 +365,10 @@ async fn force_locales_with_pattern_nested() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -423,9 +423,9 @@ async fn force_locales_with_pattern_nested() { } #[actix_rt::test] async fn force_different_locales_with_pattern() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -440,10 +440,10 @@ async fn force_different_locales_with_pattern() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -499,9 +499,9 @@ async fn force_different_locales_with_pattern() { #[actix_rt::test] async fn auto_infer_locales_at_search_with_attributes_to_search_on() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -518,10 +518,10 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -577,9 +577,9 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() { #[actix_rt::test] async fn auto_infer_locales_at_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -592,10 +592,10 @@ async fn auto_infer_locales_at_search() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -676,9 +676,9 @@ async fn auto_infer_locales_at_search() { #[actix_rt::test] async fn force_different_locales_with_pattern_nested() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = NESTED_DOCUMENTS.clone(); let (response, _) = index .update_settings(json!({ @@ -691,10 +691,10 @@ async fn force_different_locales_with_pattern_nested() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -774,9 +774,9 @@ async fn force_different_locales_with_pattern_nested() { #[actix_rt::test] async fn settings_change() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = NESTED_DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; index.wait_task(task.uid()).await.succeeded(); @@ -789,10 +789,10 @@ async fn settings_change() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 1, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -852,10 +852,10 @@ async fn settings_change() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 2, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -906,9 +906,9 @@ async fn settings_change() { #[actix_rt::test] async fn invalid_locales() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); index .update_settings( @@ -945,9 +945,9 @@ async fn invalid_locales() { #[actix_rt::test] async fn invalid_localized_attributes_rules() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let (response, _) = index .update_settings(json!({ "localizedAttributes": [ @@ -1015,19 +1015,19 @@ async fn invalid_localized_attributes_rules() { #[actix_rt::test] async fn simple_facet_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings(json!({ "filterableAttributes": ["name_en", "name_ja", "name_zh"], })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -1073,9 +1073,9 @@ async fn simple_facet_search() { #[actix_rt::test] async fn facet_search_with_localized_attributes() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings(json!({ @@ -1086,10 +1086,10 @@ async fn facet_search_with_localized_attributes() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -1146,9 +1146,9 @@ async fn facet_search_with_localized_attributes() { #[actix_rt::test] async fn swedish_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = json!([ {"id": "tra1-1", "product": "trä"}, {"id": "tra2-1", "product": "traktor"}, @@ -1269,9 +1269,9 @@ async fn swedish_search() { #[actix_rt::test] async fn german_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = json!([ {"id": 1, "product": "Interkulturalität"}, {"id": 2, "product": "Wissensorganisation"}, diff --git a/crates/meilisearch/tests/search/matching_strategy.rs b/crates/meilisearch/tests/search/matching_strategy.rs index 3b4325c10..ece320b2a 100644 --- a/crates/meilisearch/tests/search/matching_strategy.rs +++ b/crates/meilisearch/tests/search/matching_strategy.rs @@ -2,11 +2,11 @@ use meili_snap::snapshot; use once_cell::sync::Lazy; use crate::common::index::Index; -use crate::common::{Server, Value}; +use crate::common::{Server, Shared, Value}; use crate::json; -async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> { - let index = server.index("test"); +async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> { + let index = server.unique_index(); let (task, _status_code) = index.add_documents(documents.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -48,8 +48,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn simple_search() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search(json!({"q": "Captain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| { @@ -75,8 +75,8 @@ async fn simple_search() { #[actix_rt::test] async fn search_with_typo() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search(json!({"q": "Capitain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| { @@ -102,8 +102,8 @@ async fn search_with_typo() { #[actix_rt::test] async fn search_with_unknown_word() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search(json!({"q": "Captain Supercopter Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| { diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs index ff6d3e2b8..be476da35 100644 --- a/crates/meilisearch/tests/search/mod.rs +++ b/crates/meilisearch/tests/search/mod.rs @@ -1,4 +1,4 @@ -// This modules contains all the test concerning search. Each particular feature of the search +// This module contains all the test concerning search. Each particular feature of the search // should be tested in its own module to isolate tests and keep the tests readable. mod distinct; @@ -17,12 +17,11 @@ mod restrict_searchable; mod search_queue; use meili_snap::{json_string, snapshot}; -use meilisearch::Opt; -use tempfile::TempDir; use crate::common::{ - default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server, - Value, DOCUMENTS, FRUITS_DOCUMENTS, NESTED_DOCUMENTS, SCORE_DOCUMENTS, VECTOR_DOCUMENTS, + shared_index_with_documents, shared_index_with_nested_documents, + shared_index_with_score_documents, Server, Value, DOCUMENTS, FRUITS_DOCUMENTS, + NESTED_DOCUMENTS, SCORE_DOCUMENTS, VECTOR_DOCUMENTS, }; use crate::json; @@ -32,46 +31,33 @@ async fn test_settings_documents_indexing_swapping_and_search( query: &Value, test: impl Fn(Value, actix_http::StatusCode) + std::panic::UnwindSafe + Clone, ) { - let temp = TempDir::new().unwrap(); - let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap(); + let server = Server::new_shared(); eprintln!("Documents -> Settings -> test"); - let index = server.index("test"); + let index = server.unique_index(); let (task, code) = index.add_documents(documents.clone(), None).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.update_settings(settings.clone()).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); index.search(query.clone(), test.clone()).await; - let (task, code) = server.delete_index("test").await; - assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); eprintln!("Settings -> Documents -> test"); - let index = server.index("test"); + let index = server.unique_index(); let (task, code) = index.update_settings(settings.clone()).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.add_documents(documents.clone(), None).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); index.search(query.clone(), test.clone()).await; - let (task, code) = server.delete_index("test").await; - assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); } #[actix_rt::test] @@ -79,7 +65,7 @@ async fn simple_placeholder_search() { let index = shared_index_with_documents().await; index .search(json!({}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); }) .await; @@ -87,7 +73,7 @@ async fn simple_placeholder_search() { let index = shared_index_with_nested_documents().await; index .search(json!({}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 4); }) .await; @@ -98,7 +84,7 @@ async fn simple_search() { let index = shared_index_with_documents().await; index .search(json!({"q": "glass"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }) .await; @@ -106,7 +92,7 @@ async fn simple_search() { let index = shared_index_with_nested_documents().await; index .search(json!({"q": "pésti"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 2); }) .await; @@ -115,8 +101,8 @@ async fn simple_search() { /// See #[actix_rt::test] async fn bug_5547() { - let server = Server::new().await; - let index = server.index("big_fst"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, _code) = index.create(None).await; index.wait_task(response.uid()).await.succeeded(); @@ -135,22 +121,22 @@ async fn bug_5547() { #[actix_rt::test] async fn search_with_stop_word() { // related to https://github.com/meilisearch/meilisearch/issues/4984 - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_, code) = index .update_settings(json!({"stopWords": ["the", "The", "a", "an", "to", "in", "of"]})) .await; - meili_snap::snapshot!(code, @"202 Accepted"); + snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); - index.add_documents(documents, None).await; - index.wait_task(1).await; + let (task, _code) = index.add_documents(documents, None).await; + index.wait_task(task.uid()).await.succeeded(); // prefix search index .search(json!({"q": "to the", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @"[]"); }) .await; @@ -158,7 +144,7 @@ async fn search_with_stop_word() { // non-prefix search index .search(json!({"q": "to the ", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -200,13 +186,13 @@ async fn search_with_stop_word() { #[actix_rt::test] async fn search_with_typo_settings() { // related to https://github.com/meilisearch/meilisearch/issues/5240 - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_, code) = index .update_settings(json!({"typoTolerance": { "disableOnAttributes": ["title", "id"]}})) .await; - meili_snap::snapshot!(code, @"202 Accepted"); + snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -214,7 +200,7 @@ async fn search_with_typo_settings() { index .search(json!({"q": "287947" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -234,11 +220,11 @@ async fn search_with_typo_settings() { #[actix_rt::test] async fn phrase_search_with_stop_word() { // related to https://github.com/meilisearch/meilisearch/issues/3521 - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_, code) = index.update_settings(json!({"stopWords": ["the", "of"]})).await; - meili_snap::snapshot!(code, @"202 Accepted"); + snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -246,7 +232,7 @@ async fn phrase_search_with_stop_word() { index .search(json!({"q": "how \"to\" train \"the" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }) .await; @@ -257,7 +243,7 @@ async fn negative_phrase_search() { let index = shared_index_with_documents().await; index .search(json!({"q": "-\"train your dragon\"" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 4); assert_eq!(hits[0]["id"], "287947"); @@ -273,7 +259,7 @@ async fn negative_word_search() { let index = shared_index_with_documents().await; index .search(json!({"q": "-escape" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 4); assert_eq!(hits[0]["id"], "287947"); @@ -286,7 +272,7 @@ async fn negative_word_search() { // Everything that contains derivates of escape but not escape: nothing index .search(json!({"q": "-escape escape" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 0); }) @@ -298,7 +284,7 @@ async fn non_negative_search() { let index = shared_index_with_documents().await; index .search(json!({"q": "- escape" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 1); assert_eq!(hits[0]["id"], "522681"); @@ -307,7 +293,7 @@ async fn non_negative_search() { index .search(json!({"q": "- \"train your dragon\"" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 1); assert_eq!(hits[0]["id"], "166428"); @@ -317,8 +303,8 @@ async fn non_negative_search() { #[actix_rt::test] async fn negative_special_cases_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -331,7 +317,7 @@ async fn negative_special_cases_search() { // There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass index .search(json!({"q": "-escape escape" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 1); assert_eq!(hits[0]["id"], "450465"); @@ -343,8 +329,8 @@ async fn negative_special_cases_search() { #[cfg(not(feature = "chinese-pinyin"))] #[actix_rt::test] async fn test_kanji_language_detection() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { "id": 0, "title": "The quick (\"brown\") fox can't jump 32.3 feet, right? Brr, it's 29.3°F!" }, @@ -356,7 +342,7 @@ async fn test_kanji_language_detection() { index .search(json!({"q": "東京"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }) .await; @@ -365,8 +351,8 @@ async fn test_kanji_language_detection() { #[cfg(feature = "default")] #[actix_rt::test] async fn test_thai_language() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // We don't need documents, the issue is on the query side only. let documents = json!([ @@ -382,7 +368,7 @@ async fn test_thai_language() { index .search(json!({"q": "สบู"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); }) .await; } @@ -400,7 +386,7 @@ async fn search_multiple_params() { "offset": 0, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }, ) @@ -417,7 +403,7 @@ async fn search_multiple_params() { "offset": 0, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 2); }, ) @@ -433,7 +419,7 @@ async fn search_with_sort_on_numbers() { "sort": ["id:asc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); }, ) @@ -446,7 +432,7 @@ async fn search_with_sort_on_numbers() { "sort": ["doggos.age:asc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 4); }, ) @@ -462,7 +448,7 @@ async fn search_with_sort_on_strings() { "sort": ["title:desc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); }, ) @@ -475,7 +461,7 @@ async fn search_with_sort_on_strings() { "sort": ["doggos.name:asc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 4); }, ) @@ -490,7 +476,7 @@ async fn search_with_multiple_sort() { "sort": ["id:asc", "title:desc"] })) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); } @@ -503,7 +489,7 @@ async fn search_facet_distribution() { "facets": ["title"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert!(dist.get("title").is_some()); @@ -521,7 +507,7 @@ async fn search_facet_distribution() { "facets": ["father"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert_eq!( @@ -544,9 +530,9 @@ async fn search_facet_distribution() { "facets": ["doggos.name"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); - assert_eq!(dist.len(), 1, "{:?}", dist); + assert_eq!(dist.len(), 1, "{dist:?}"); assert_eq!( dist["doggos.name"], json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1}) @@ -561,9 +547,9 @@ async fn search_facet_distribution() { "facets": ["doggos"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); - assert_eq!(dist.len(), 3, "{:?}", dist); + assert_eq!(dist.len(), 3, "{dist:?}"); assert_eq!( dist["doggos.name"], json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1}) @@ -579,7 +565,7 @@ async fn search_facet_distribution() { "facets": ["doggos.name"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert_eq!( @@ -593,8 +579,8 @@ async fn search_facet_distribution() { #[actix_rt::test] async fn displayed_attributes() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({ "displayedAttributes": ["title"] })).await; @@ -604,14 +590,14 @@ async fn displayed_attributes() { let (response, code) = index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert!(response["hits"][0].get("title").is_some()); } #[actix_rt::test] async fn placeholder_search_is_hard_limited() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect(); let (task, _status_code) = index.add_documents(documents.into(), None).await; @@ -623,7 +609,7 @@ async fn placeholder_search_is_hard_limited() { "limit": 1500, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1000); }, ) @@ -636,7 +622,7 @@ async fn placeholder_search_is_hard_limited() { "limit": 400, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 200); }, ) @@ -652,7 +638,7 @@ async fn placeholder_search_is_hard_limited() { "limit": 1500, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1200); }, ) @@ -665,7 +651,7 @@ async fn placeholder_search_is_hard_limited() { "limit": 400, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 200); }, ) @@ -674,8 +660,8 @@ async fn placeholder_search_is_hard_limited() { #[actix_rt::test] async fn search_is_hard_limited() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect(); let (task, _status_code) = index.add_documents(documents.into(), None).await; @@ -688,7 +674,7 @@ async fn search_is_hard_limited() { "limit": 1500, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1000); }, ) @@ -702,7 +688,7 @@ async fn search_is_hard_limited() { "limit": 400, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 200); }, ) @@ -719,7 +705,7 @@ async fn search_is_hard_limited() { "limit": 1500, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1200); }, ) @@ -733,7 +719,7 @@ async fn search_is_hard_limited() { "limit": 400, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 200); }, ) @@ -742,8 +728,8 @@ async fn search_is_hard_limited() { #[actix_rt::test] async fn faceting_max_values_per_facet() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({ "filterableAttributes": ["number"] })).await; @@ -757,7 +743,7 @@ async fn faceting_max_values_per_facet() { "facets": ["number"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let numbers = response["facetDistribution"]["number"].as_object().unwrap(); assert_eq!(numbers.len(), 100); }, @@ -774,7 +760,7 @@ async fn faceting_max_values_per_facet() { "facets": ["number"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let numbers = &response["facetDistribution"]["number"].as_object().unwrap(); assert_eq!(numbers.len(), 10_000); }, @@ -784,13 +770,7 @@ async fn faceting_max_values_per_facet() { #[actix_rt::test] async fn test_score_details() { - let server = Server::new().await; - let index = server.index("test"); - - let documents = DOCUMENTS.clone(); - - let res = index.add_documents(json!(documents), None).await; - index.wait_task(res.0.uid()).await.succeeded(); + let index = shared_index_with_documents().await; index .search( @@ -799,8 +779,8 @@ async fn test_score_details() { "showRankingScoreDetails": true, }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "How to Train Your Dragon: The Hidden World", @@ -850,13 +830,7 @@ async fn test_score_details() { #[actix_rt::test] async fn test_score() { - let server = Server::new().await; - let index = server.index("test"); - - let documents = SCORE_DOCUMENTS.clone(); - - let res = index.add_documents(json!(documents), None).await; - index.wait_task(res.0.uid()).await.succeeded(); + let index = shared_index_with_score_documents().await; index .search( @@ -865,8 +839,8 @@ async fn test_score() { "showRankingScore": true, }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -903,13 +877,7 @@ async fn test_score() { #[actix_rt::test] async fn test_score_threshold() { let query = "Badman dark returns 1"; - let server = Server::new().await; - let index = server.index("test"); - - let documents = SCORE_DOCUMENTS.clone(); - - let res = index.add_documents(json!(documents), None).await; - index.wait_task(res.0.uid()).await.succeeded(); + let index = shared_index_with_score_documents().await; index .search( @@ -919,9 +887,9 @@ async fn test_score_threshold() { "rankingScoreThreshold": 0.0 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"5"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @"5"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -962,9 +930,9 @@ async fn test_score_threshold() { "rankingScoreThreshold": 0.2 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"3"###); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @r###"3"###); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -995,9 +963,9 @@ async fn test_score_threshold() { "rankingScoreThreshold": 0.5 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"2"###); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @r###"2"###); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -1023,9 +991,9 @@ async fn test_score_threshold() { "rankingScoreThreshold": 0.8 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"1"###); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @r###"1"###); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -1046,10 +1014,10 @@ async fn test_score_threshold() { "rankingScoreThreshold": 1.0 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"0"###); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @r###"0"###); // nobody is perfect - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @"[]"); + snapshot!(json_string!(response["hits"]), @"[]"); }, ) .await; @@ -1057,8 +1025,8 @@ async fn test_score_threshold() { #[actix_rt::test] async fn test_degraded_score_details() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = NESTED_DOCUMENTS.clone(); @@ -1075,8 +1043,8 @@ async fn test_degraded_score_details() { "showRankingScoreDetails": true, }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###" { "hits": [ { @@ -1146,8 +1114,8 @@ async fn test_degraded_score_details() { #[cfg(feature = "default")] #[actix_rt::test] async fn camelcased_words() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // related to https://github.com/meilisearch/meilisearch/issues/3818 let documents = json!([ @@ -1162,8 +1130,8 @@ async fn camelcased_words() { index .search(json!({"q": "deLonghi"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 0, @@ -1180,8 +1148,8 @@ async fn camelcased_words() { index .search(json!({"q": "dellonghi"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 0, @@ -1198,8 +1166,8 @@ async fn camelcased_words() { index .search(json!({"q": "testa"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1220,8 +1188,8 @@ async fn camelcased_words() { index .search(json!({"q": "testab"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1242,8 +1210,8 @@ async fn camelcased_words() { index .search(json!({"q": "TestaB"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1264,8 +1232,8 @@ async fn camelcased_words() { index .search(json!({"q": "Testab"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1286,8 +1254,8 @@ async fn camelcased_words() { index .search(json!({"q": "TestAb"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1309,8 +1277,8 @@ async fn camelcased_words() { // with Typos index .search(json!({"q": "dellonghi"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 0, @@ -1327,8 +1295,8 @@ async fn camelcased_words() { index .search(json!({"q": "TetsAB"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1349,8 +1317,8 @@ async fn camelcased_words() { index .search(json!({"q": "TetsAB"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1372,13 +1340,13 @@ async fn camelcased_words() { #[actix_rt::test] async fn simple_search_with_strange_synonyms() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await; - let r = index.wait_task(task.uid()).await; - meili_snap::snapshot!(r["status"], @r###""succeeded""###); + let r = index.wait_task(task.uid()).await.succeeded(); + snapshot!(r["status"], @r###""succeeded""###); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -1386,8 +1354,8 @@ async fn simple_search_with_strange_synonyms() { index .search(json!({"q": "How to train"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "How to Train Your Dragon: The Hidden World", @@ -1404,8 +1372,8 @@ async fn simple_search_with_strange_synonyms() { index .search(json!({"q": "How & train"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "How to Train Your Dragon: The Hidden World", @@ -1422,8 +1390,8 @@ async fn simple_search_with_strange_synonyms() { index .search(json!({"q": "to"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "How to Train Your Dragon: The Hidden World", @@ -1441,8 +1409,8 @@ async fn simple_search_with_strange_synonyms() { #[actix_rt::test] async fn change_attributes_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({ "searchableAttributes": ["father", "mother"] })).await; @@ -1462,8 +1430,8 @@ async fn change_attributes_settings() { "attributesToRetrieve": ["id", "doggos"] }), |response, code| { - assert_eq!(code, 200, "{}", response); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + assert_eq!(code, 200, "{response}"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 852, @@ -1493,8 +1461,8 @@ async fn change_attributes_settings() { "attributesToRetrieve": ["id", "doggos"] }), |response, code| { - assert_eq!(code, 200, "{}", response); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + assert_eq!(code, 200, "{response}"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 852, @@ -1563,7 +1531,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "document"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1609,7 +1577,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "zeroth"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1627,7 +1595,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "first"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1650,7 +1618,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "field"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1686,7 +1654,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "array"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); // nested is not searchable snapshot!(json_string!(response["hits"]), @"[]"); }, @@ -1698,7 +1666,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "lied"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); // nested is not searchable snapshot!(json_string!(response["hits"]), @"[]"); }, @@ -1711,7 +1679,7 @@ async fn test_nested_fields() { &settings, &json!({"filter": "nested.object = field"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1747,7 +1715,7 @@ async fn test_nested_fields() { &settings, &json!({"filter": "nested.machin = bidule"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1770,10 +1738,10 @@ async fn test_nested_fields() { &settings, &json!({"filter": "nested = array"}), |response, code| { - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = array", + "message": "Index `[uuid]`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = array", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -1789,10 +1757,10 @@ async fn test_nested_fields() { &settings, &json!({"filter": r#"nested = "I lied""#}), |response, code| { - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = \"I lied\"", + "message": "Index `[uuid]`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = \"I lied\"", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -1850,7 +1818,7 @@ async fn test_typo_settings() { }), &json!({"q": "document"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1902,7 +1870,7 @@ async fn test_typo_settings() { }), &json!({"q": "docume"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1946,8 +1914,8 @@ async fn test_typo_settings() { /// Modifying facets with different casing should work correctly #[actix_rt::test] async fn change_facet_casing() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -1955,7 +1923,7 @@ async fn change_facet_casing() { })) .await; assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index .add_documents( @@ -1968,7 +1936,7 @@ async fn change_facet_casing() { None, ) .await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index .add_documents( @@ -1981,12 +1949,12 @@ async fn change_facet_casing() { None, ) .await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); index .search(json!({ "facets": ["dog"] }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["facetDistribution"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["facetDistribution"]), @r###" { "dog": { "bouvier bernois": 1 diff --git a/crates/meilisearch/tests/search/multi/proxy.rs b/crates/meilisearch/tests/search/multi/proxy.rs index d267ee153..55736d058 100644 --- a/crates/meilisearch/tests/search/multi/proxy.rs +++ b/crates/meilisearch/tests/search/multi/proxy.rs @@ -2296,6 +2296,7 @@ async fn error_remote_500_once() { } #[actix_rt::test] +#[ignore] async fn error_remote_timeout() { let ms0 = Server::new().await; let ms1 = Server::new().await; diff --git a/crates/meilisearch/tests/search/pagination.rs b/crates/meilisearch/tests/search/pagination.rs index ff601bd5b..f8b698a95 100644 --- a/crates/meilisearch/tests/search/pagination.rs +++ b/crates/meilisearch/tests/search/pagination.rs @@ -7,7 +7,7 @@ async fn default_search_should_return_estimated_total_hit() { let index = shared_index_with_documents().await; index .search(json!({}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert!(response.get("estimatedTotalHits").is_some()); assert!(response.get("limit").is_some()); assert!(response.get("offset").is_some()); @@ -25,7 +25,7 @@ async fn simple_search() { let index = shared_index_with_documents().await; index .search(json!({"page": 1}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); assert!(response.get("totalHits").is_some()); assert_eq!(response["page"], 1); @@ -44,7 +44,7 @@ async fn page_zero_should_not_return_any_result() { let index = shared_index_with_documents().await; index .search(json!({"page": 0}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 0); assert!(response.get("totalHits").is_some()); assert_eq!(response["page"], 0); @@ -58,7 +58,7 @@ async fn hits_per_page_1() { let index = shared_index_with_documents().await; index .search(json!({"hitsPerPage": 1}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); assert_eq!(response["totalHits"], 5); assert_eq!(response["page"], 1); @@ -72,7 +72,7 @@ async fn hits_per_page_0_should_not_return_any_result() { let index = shared_index_with_documents().await; index .search(json!({"hitsPerPage": 0}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 0); assert_eq!(response["totalHits"], 5); assert_eq!(response["page"], 1); @@ -126,7 +126,7 @@ async fn ensure_placeholder_search_hit_count_valid() { for page in 0..=4 { index .search(json!({"page": page, "hitsPerPage": 1}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["totalHits"], 4); assert_eq!(response["totalPages"], 4); }) diff --git a/crates/meilisearch/tests/search/restrict_searchable.rs b/crates/meilisearch/tests/search/restrict_searchable.rs index ce99c4047..e5408a210 100644 --- a/crates/meilisearch/tests/search/restrict_searchable.rs +++ b/crates/meilisearch/tests/search/restrict_searchable.rs @@ -2,11 +2,11 @@ use meili_snap::{json_string, snapshot}; use once_cell::sync::Lazy; use crate::common::index::Index; -use crate::common::{Server, Value}; +use crate::common::{Server, Shared, Value}; use crate::json; -async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> { - let index = server.index("test"); +async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> { + let index = server.unique_index(); let (task, _code) = index.add_documents(documents.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -34,8 +34,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn simple_search_on_title() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // simple search should return 2 documents (ids: 2 and 3). index @@ -51,8 +51,8 @@ async fn simple_search_on_title() { #[actix_rt::test] async fn search_no_searchable_attribute_set() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search( @@ -93,8 +93,8 @@ async fn search_no_searchable_attribute_set() { #[actix_rt::test] async fn search_on_all_attributes() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| { @@ -106,8 +106,8 @@ async fn search_on_all_attributes() { #[actix_rt::test] async fn search_on_all_attributes_restricted_set() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await; index.wait_task(task.uid()).await.succeeded(); @@ -121,8 +121,8 @@ async fn search_on_all_attributes_restricted_set() { #[actix_rt::test] async fn simple_prefix_search_on_title() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // simple search should return 2 documents (ids: 2 and 3). index @@ -135,8 +135,8 @@ async fn simple_prefix_search_on_title() { #[actix_rt::test] async fn simple_search_on_title_matching_strategy_all() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // simple search matching strategy all should only return 1 document (ids: 2). index .search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["title"], "matchingStrategy": "all"}), |response, code| { @@ -148,8 +148,8 @@ async fn simple_search_on_title_matching_strategy_all() { #[actix_rt::test] async fn simple_search_on_no_field() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // simple search on no field shouldn't return any document. index .search(json!({"q": "Captain Marvel", "attributesToSearchOn": []}), |response, code| { @@ -161,8 +161,8 @@ async fn simple_search_on_no_field() { #[actix_rt::test] async fn word_ranking_rule_order() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // Document 3 should appear before document 2. index @@ -189,8 +189,8 @@ async fn word_ranking_rule_order() { #[actix_rt::test] async fn word_ranking_rule_order_exact_words() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; let (task, _status_code) = index .update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]})) .await; @@ -221,9 +221,9 @@ async fn word_ranking_rule_order_exact_words() { #[actix_rt::test] async fn typo_ranking_rule_order() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents( - &server, + server, &json!([ { "title": "Capitain Marivel", @@ -260,9 +260,9 @@ async fn typo_ranking_rule_order() { #[actix_rt::test] async fn attributes_ranking_rule_order() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents( - &server, + server, &json!([ { "title": "Captain Marvel", @@ -301,9 +301,9 @@ async fn attributes_ranking_rule_order() { #[actix_rt::test] async fn exactness_ranking_rule_order() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents( - &server, + server, &json!([ { "title": "Captain Marvel", @@ -340,9 +340,9 @@ async fn exactness_ranking_rule_order() { #[actix_rt::test] async fn search_on_exact_field() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents( - &server, + server, &json!([ { "title": "Captain Marvel", @@ -359,7 +359,7 @@ async fn search_on_exact_field() { let (response, code) = index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); // Searching on an exact attribute should only return the document matching without typo. index @@ -372,7 +372,7 @@ async fn search_on_exact_field() { #[actix_rt::test] async fn phrase_search_on_title() { - let server = Server::new().await; + let server = Server::new_shared(); let documents = json!([ { "id": 8, "desc": "Document Review", "title": "Document Review Specialist II" }, { "id": 5, "desc": "Document Review", "title": "Document Review Attorney" }, @@ -383,7 +383,7 @@ async fn phrase_search_on_title() { { "id": 7, "desc": "Document Review", "title": "Document Review Specialist II" }, { "id": 6, "desc": "Document Review", "title": "Document Review (Entry Level)" } ]); - let index = index_with_documents(&server, &documents).await; + let index = index_with_documents(server, &documents).await; index .search( @@ -416,3 +416,381 @@ async fn phrase_search_on_title() { ) .await; } + +static NESTED_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { + json!([ + { + "details": { + "title": "Shazam!", + "desc": "a Captain Marvel ersatz", + "weaknesses": ["magic", "requires transformation"], + "outfit": { + "has_cape": true, + "colors": { + "primary": "red", + "secondary": "gold" + } + } + }, + "id": "1", + }, + { + "details": { + "title": "Captain Planet", + "desc": "He's not part of the Marvel Cinematic Universe", + "blue_skin": true, + "outfit": { + "has_cape": false + } + }, + "id": "2", + }, + { + "details": { + "title": "Captain Marvel", + "desc": "a Shazam ersatz", + "weaknesses": ["magic", "power instability"], + "outfit": { + "has_cape": false + } + }, + "id": "3", + }]) +}); + +#[actix_rt::test] +async fn nested_search_on_title_with_prefix_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + // Wildcard should match to 'details.' attribute + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "2" + } + ]"###); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_with_suffix_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + // Wildcard should match to any attribute inside 'details.' + // It's worth noting the difference between 'details.*' and '*.title' + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "1" + }, + { + "id": "2" + } + ]"###); + }, + ) + .await; + + // Should return 1 document (ids: 1) + index + .search( + json!({"q": "gold", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "1" + } + ]"###); + }, + ) + .await; + + // Should return 2 documents (ids: 1 and 2) + index + .search( + json!({"q": "true", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "1" + }, + { + "id": "2" + } + ]"###); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_on_title_restricted_set_with_suffix_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + let (task, _status_code) = + index.update_settings_searchable_attributes(json!(["details.title"])).await; + index.wait_task(task.uid()).await.succeeded(); + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "2" + } + ]"###); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_no_searchable_attribute_set_with_any_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"0"); + }, + ) + .await; + + let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await; + index.wait_task(task.uid()).await.succeeded(); + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"0"); + }, + ) + .await; + + let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await; + index.wait_task(task.uid()).await.succeeded(); + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown", "*.title"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "2" + } + ]"###); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_prefix_search_on_title_with_prefix_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + // Nested prefix search with prefix wildcard should return 2 documents (ids: 2 and 3). + index + .search( + json!({"q": "Captain Mar", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "2" + } + ]"###); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_prefix_search_on_details_with_suffix_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + index + .search( + json!({"q": "Captain Mar", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "1" + }, + { + "id": "2" + } + ]"###); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + // Wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3) + index + .search( + json!({"q": "mag", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "1" + }, + { + "id": "3" + } + ]"###); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_on_title_matching_strategy_all() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + // Nested search matching strategy all should only return 1 document (ids: 3) + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "matchingStrategy": "all", "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + } + ]"###); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + // Document 3 should appear before documents 1 and 2 + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.desc", "*.title"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "1" + }, + { + "id": "2" + } + ] + "### + ); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_attributes_ranking_rule_order_with_suffix_wildcard() { + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; + + // Document 3 should appear before documents 1 and 2 + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "1" + }, + { + "id": "2" + } + ] + "### + ); + }, + ) + .await; +} diff --git a/crates/meilisearch/tests/search/snapshots/distinct.rs/distinct_at_search_time/succeed.snap b/crates/meilisearch/tests/search/snapshots/distinct.rs/distinct_at_search_time/succeed.snap index ea55d9c61..c1b01a5e2 100644 --- a/crates/meilisearch/tests/search/snapshots/distinct.rs/distinct_at_search_time/succeed.snap +++ b/crates/meilisearch/tests/search/snapshots/distinct.rs/distinct_at_search_time/succeed.snap @@ -4,7 +4,7 @@ source: crates/meilisearch/tests/search/distinct.rs { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "tamo", + "indexUid": "[uuid]", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, diff --git a/crates/meilisearch/tests/settings/distinct.rs b/crates/meilisearch/tests/settings/distinct.rs index 2c5b7517f..a3b1b5276 100644 --- a/crates/meilisearch/tests/settings/distinct.rs +++ b/crates/meilisearch/tests/settings/distinct.rs @@ -3,8 +3,8 @@ use crate::json; #[actix_rt::test] async fn set_and_reset_distinct_attribute() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await; index.wait_task(task1.uid()).await.succeeded(); @@ -24,8 +24,8 @@ async fn set_and_reset_distinct_attribute() { #[actix_rt::test] async fn set_and_reset_distinct_attribute_with_dedicated_route() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await; index.wait_task(update_task1.uid()).await.succeeded(); diff --git a/crates/meilisearch/tests/settings/get_settings.rs b/crates/meilisearch/tests/settings/get_settings.rs index e4c58cbcd..941533dda 100644 --- a/crates/meilisearch/tests/settings/get_settings.rs +++ b/crates/meilisearch/tests/settings/get_settings.rs @@ -11,59 +11,62 @@ macro_rules! test_setting_routes { #[actix_rt::test] async fn get_unexisting_index() { - let server = Server::new().await; - let url = format!("/indexes/test/settings/{}", - stringify!($setting) - .chars() - .map(|c| if c == '_' { '-' } else { c }) - .collect::()); - let (_response, code) = server.service.get(url).await; - assert_eq!(code, 404); - } - - #[actix_rt::test] - async fn update_unexisting_index() { - let server = Server::new().await; - let url = format!("/indexes/test/settings/{}", - stringify!($setting) - .chars() - .map(|c| if c == '_' { '-' } else { c }) - .collect::()); - let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await; - assert_eq!(code, 202, "{}", response); - server.index("").wait_task(0).await; - let (response, code) = server.index("test").get().await; - assert_eq!(code, 200, "{}", response); - } - - #[actix_rt::test] - async fn delete_unexisting_index() { - let server = Server::new().await; - let url = format!("/indexes/test/settings/{}", - stringify!($setting) - .chars() - .map(|c| if c == '_' { '-' } else { c }) - .collect::()); - let (_, code) = server.service.delete(url).await; - assert_eq!(code, 202); - let response = server.index("").wait_task(0).await; - assert_eq!(response["status"], "failed"); - } - - #[actix_rt::test] - async fn get_default() { - let server = Server::new().await; - let index = server.index("test"); - let (response, code) = index.create(None).await; - assert_eq!(code, 202, "{}", response); - index.wait_task(0).await; - let url = format!("/indexes/test/settings/{}", + let server = Server::new_shared(); + let index_name = uuid::Uuid::new_v4().to_string(); + let url = format!("/indexes/{index_name}/settings/{}", stringify!($setting) .chars() .map(|c| if c == '_' { '-' } else { c }) .collect::()); let (response, code) = server.service.get(url).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 404, "{response}"); + } + + #[actix_rt::test] + async fn update_unexisting_index() { + let server = Server::new_shared(); + let index_name = uuid::Uuid::new_v4().to_string(); + let url = format!("/indexes/{index_name}/settings/{}", + stringify!($setting) + .chars() + .map(|c| if c == '_' { '-' } else { c }) + .collect::()); + let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await; + assert_eq!(code, 202, "{response}"); + let (response, code) = server.service.get(format!("/indixes/{index_name}")).await; + assert_eq!(code, 404, "{response}"); + } + + #[actix_rt::test] + async fn delete_unexisting_index() { + let server = Server::new_shared(); + let index_name = uuid::Uuid::new_v4().to_string(); + let url = format!("/indexes/{index_name}/settings/{}", + stringify!($setting) + .chars() + .map(|c| if c == '_' { '-' } else { c }) + .collect::()); + let (response, code) = server.service.delete(url).await; + assert_eq!(code, 202, "{response}"); + let (response, code) = server.service.get(format!("/indixes/{index_name}")).await; + assert_eq!(code, 404, "{response}"); + } + + #[actix_rt::test] + async fn get_default() { + let server = Server::new_shared(); + let index = server.unique_index(); + let (response, code) = index.create(None).await; + assert_eq!(code, 202, "{response}"); + index.wait_task(response.uid()).await.succeeded(); + let url = format!("/indexes/{}/settings/{}", + index.uid, + stringify!($setting) + .chars() + .map(|c| if c == '_' { '-' } else { c }) + .collect::()); + let (response, code) = server.service.get(url).await; + assert_eq!(code, 200, "{response}"); let expected = crate::json!($default_value); assert_eq!(expected, response); } @@ -195,15 +198,16 @@ test_setting_routes!( #[actix_rt::test] async fn get_settings_unexisting_index() { - let server = Server::new().await; - let (response, code) = server.index("test").settings().await; - assert_eq!(code, 404, "{}", response) + let server = Server::new_shared(); + let index = server.unique_index(); + let (response, code) = index.settings().await; + assert_eq!(code, 404, "{response}") } #[actix_rt::test] async fn get_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, _code) = index.create(None).await; index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.settings().await; @@ -247,9 +251,8 @@ async fn get_settings() { #[actix_rt::test] async fn secrets_are_hidden_in_settings() { - let server = Server::new().await; - - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, _code) = index.create(None).await; index.wait_task(response.uid()).await.succeeded(); @@ -269,11 +272,11 @@ async fn secrets_are_hidden_in_settings() { .await; meili_snap::snapshot!(code, @"202 Accepted"); - meili_snap::snapshot!(meili_snap::json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + meili_snap::snapshot!(meili_snap::json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -282,7 +285,7 @@ async fn secrets_are_hidden_in_settings() { let settings_update_uid = response.uid(); - index.wait_task(settings_update_uid).await; + index.wait_task(settings_update_uid).await.succeeded(); let (response, code) = index.settings().await; meili_snap::snapshot!(code, @"200 OK"); @@ -370,16 +373,16 @@ async fn secrets_are_hidden_in_settings() { #[actix_rt::test] async fn error_update_settings_unknown_field() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_response, code) = index.update_settings(json!({"foo": 12})).await; assert_eq!(code, 400); } #[actix_rt::test] async fn test_partial_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await; index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.settings().await; @@ -398,20 +401,18 @@ async fn test_partial_update() { #[actix_rt::test] async fn error_delete_settings_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.delete_settings().await; assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "failed"); + index.wait_task(task.uid()).await.failed(); } #[actix_rt::test] async fn reset_all_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -423,7 +424,6 @@ async fn reset_all_settings() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - assert_eq!(response["taskUid"], 0); index.wait_task(response.uid()).await.succeeded(); let (update_task,_status_code) = index @@ -456,17 +456,15 @@ async fn reset_all_settings() { #[actix_rt::test] async fn update_setting_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.update_settings(json!({})).await; assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - assert_eq!(response["status"], "succeeded"); + index.wait_task(task.uid()).await.succeeded(); let (_response, code) = index.get().await; assert_eq!(code, 200); let (task, _status_code) = index.delete_settings().await; - let response = index.wait_task(task.uid()).await; - assert_eq!(response["status"], "succeeded"); + index.wait_task(task.uid()).await.succeeded(); } #[actix_rt::test] @@ -487,8 +485,8 @@ async fn error_update_setting_unexisting_index_invalid_uid() { #[actix_rt::test] async fn error_set_invalid_ranking_rules() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(None).await; let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await; @@ -505,8 +503,8 @@ async fn error_set_invalid_ranking_rules() { #[actix_rt::test] async fn set_and_reset_distinct_attribute_with_dedicated_route() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _code) = index.update_distinct_attribute(json!("test")).await; index.wait_task(task.uid()).await.succeeded(); @@ -526,8 +524,8 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() { #[actix_rt::test] async fn granular_filterable_attributes() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(None).await; let (response, code) = @@ -545,7 +543,7 @@ async fn granular_filterable_attributes() { index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.settings().await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["filterableAttributes"]), @r###" [ { diff --git a/crates/meilisearch/tests/settings/proximity_settings.rs b/crates/meilisearch/tests/settings/proximity_settings.rs index c5897bc51..6de1ffe0e 100644 --- a/crates/meilisearch/tests/settings/proximity_settings.rs +++ b/crates/meilisearch/tests/settings/proximity_settings.rs @@ -26,8 +26,8 @@ static DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn attribute_scale_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -38,7 +38,7 @@ async fn attribute_scale_search() { "rankingRules": ["words", "typo", "proximity"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); + assert_eq!("202", code.as_str(), "{response:?}"); index.wait_task(response.uid()).await.succeeded(); // the expected order is [1, 3, 2] instead of [3, 1, 2] @@ -99,8 +99,8 @@ async fn attribute_scale_search() { #[actix_rt::test] async fn attribute_scale_phrase_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -167,8 +167,8 @@ async fn attribute_scale_phrase_search() { #[actix_rt::test] async fn word_scale_set_and_reset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -282,8 +282,8 @@ async fn word_scale_set_and_reset() { #[actix_rt::test] async fn attribute_scale_default_ranking_rules() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -293,7 +293,7 @@ async fn attribute_scale_default_ranking_rules() { "proximityPrecision": "byAttribute" })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); + assert_eq!("202", code.as_str(), "{response:?}"); index.wait_task(response.uid()).await.succeeded(); // the expected order is [3, 1, 2] diff --git a/crates/meilisearch/tests/settings/tokenizer_customization.rs b/crates/meilisearch/tests/settings/tokenizer_customization.rs index 190918b34..7c58368f7 100644 --- a/crates/meilisearch/tests/settings/tokenizer_customization.rs +++ b/crates/meilisearch/tests/settings/tokenizer_customization.rs @@ -5,8 +5,8 @@ use crate::json; #[actix_rt::test] async fn set_and_reset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _code) = index .update_settings(json!({ @@ -70,8 +70,8 @@ async fn set_and_search() { }, ]); - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (add_task, _status_code) = index.add_documents(documents, None).await; index.wait_task(add_task.uid()).await.succeeded(); @@ -224,8 +224,8 @@ async fn advanced_synergies() { }, ]); - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (add_task, _status_code) = index.add_documents(documents, None).await; index.wait_task(add_task.uid()).await.succeeded(); diff --git a/crates/meilisearch/tests/similar/errors.rs b/crates/meilisearch/tests/similar/errors.rs index 30ff5b145..fa4118fe3 100644 --- a/crates/meilisearch/tests/similar/errors.rs +++ b/crates/meilisearch/tests/similar/errors.rs @@ -6,11 +6,11 @@ use crate::json; #[actix_rt::test] async fn similar_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let expected_response = json!({ - "message": "Index `test` not found.", + "message": format!("Index `{}` not found.", index.uid), "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -26,12 +26,12 @@ async fn similar_unexisting_index() { #[actix_rt::test] async fn similar_unexisting_parameter() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index .similar(json!({"id": 287947, "marin": "hello"}), |response, code| { - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); assert_eq!(response["code"], "bad_request"); }) .await; @@ -39,8 +39,8 @@ async fn similar_unexisting_parameter() { #[actix_rt::test] async fn similar_bad_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -53,7 +53,7 @@ async fn similar_bad_id() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": ["doggo"], "embedder": "manual"})).await; snapshot!(code, @"400 Bad Request"); @@ -69,8 +69,8 @@ async fn similar_bad_id() { #[actix_rt::test] async fn similar_bad_ranking_score_threshold() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -83,7 +83,7 @@ async fn similar_bad_ranking_score_threshold() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"rankingScoreThreshold": ["doggo"]})).await; snapshot!(code, @"400 Bad Request"); @@ -99,8 +99,8 @@ async fn similar_bad_ranking_score_threshold() { #[actix_rt::test] async fn similar_invalid_ranking_score_threshold() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -113,7 +113,7 @@ async fn similar_invalid_ranking_score_threshold() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"rankingScoreThreshold": 42})).await; snapshot!(code, @"400 Bad Request"); @@ -129,8 +129,8 @@ async fn similar_invalid_ranking_score_threshold() { #[actix_rt::test] async fn similar_invalid_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -143,7 +143,7 @@ async fn similar_invalid_id() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": "http://invalid-docid/", "embedder": "manual"})).await; @@ -160,8 +160,8 @@ async fn similar_invalid_id() { #[actix_rt::test] async fn similar_not_found_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -174,7 +174,7 @@ async fn similar_not_found_id() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": "definitely-doesnt-exist", "embedder": "manual"})).await; @@ -191,8 +191,8 @@ async fn similar_not_found_id() { #[actix_rt::test] async fn similar_bad_offset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -205,7 +205,7 @@ async fn similar_bad_offset() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": 287947, "offset": "doggo", "embedder": "manual"})).await; @@ -233,8 +233,8 @@ async fn similar_bad_offset() { #[actix_rt::test] async fn similar_bad_limit() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -247,7 +247,7 @@ async fn similar_bad_limit() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": 287947, "limit": "doggo", "embedder": "manual"})).await; @@ -277,8 +277,8 @@ async fn similar_bad_limit() { async fn similar_bad_filter() { // Since a filter is deserialized as a json Value it will never fail to deserialize. // Thus the error message is not generated by deserr but written by us. - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -291,7 +291,7 @@ async fn similar_bad_filter() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); @@ -316,8 +316,8 @@ async fn similar_bad_filter() { #[actix_rt::test] async fn filter_invalid_syntax_object() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -330,7 +330,7 @@ async fn filter_invalid_syntax_object() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -354,8 +354,8 @@ async fn filter_invalid_syntax_object() { #[actix_rt::test] async fn filter_invalid_syntax_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -368,7 +368,7 @@ async fn filter_invalid_syntax_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -392,8 +392,8 @@ async fn filter_invalid_syntax_array() { #[actix_rt::test] async fn filter_invalid_syntax_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -406,7 +406,7 @@ async fn filter_invalid_syntax_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -432,8 +432,8 @@ async fn filter_invalid_syntax_string() { #[actix_rt::test] async fn filter_invalid_attribute_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -446,7 +446,7 @@ async fn filter_invalid_attribute_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -473,8 +473,8 @@ async fn filter_invalid_attribute_array() { #[actix_rt::test] async fn filter_invalid_attribute_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -487,7 +487,7 @@ async fn filter_invalid_attribute_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -514,8 +514,8 @@ async fn filter_invalid_attribute_string() { #[actix_rt::test] async fn filter_reserved_geo_attribute_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -528,7 +528,7 @@ async fn filter_reserved_geo_attribute_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -554,8 +554,8 @@ async fn filter_reserved_geo_attribute_array() { #[actix_rt::test] async fn filter_reserved_geo_attribute_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -568,7 +568,7 @@ async fn filter_reserved_geo_attribute_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -594,8 +594,8 @@ async fn filter_reserved_geo_attribute_string() { #[actix_rt::test] async fn filter_reserved_attribute_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -608,7 +608,7 @@ async fn filter_reserved_attribute_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -634,8 +634,8 @@ async fn filter_reserved_attribute_array() { #[actix_rt::test] async fn filter_reserved_attribute_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -648,7 +648,7 @@ async fn filter_reserved_attribute_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -674,8 +674,8 @@ async fn filter_reserved_attribute_string() { #[actix_rt::test] async fn filter_reserved_geo_point_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -688,7 +688,7 @@ async fn filter_reserved_geo_point_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -714,8 +714,8 @@ async fn filter_reserved_geo_point_array() { #[actix_rt::test] async fn filter_reserved_geo_point_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -728,7 +728,7 @@ async fn filter_reserved_geo_point_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -754,8 +754,8 @@ async fn filter_reserved_geo_point_string() { #[actix_rt::test] async fn similar_bad_retrieve_vectors() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index.similar_post(json!({"retrieveVectors": "doggo", "embedder": "manual"})).await; @@ -806,8 +806,8 @@ async fn similar_bad_retrieve_vectors() { #[actix_rt::test] async fn similar_bad_embedder() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -820,7 +820,7 @@ async fn similar_bad_embedder() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; diff --git a/crates/meilisearch/tests/stats/mod.rs b/crates/meilisearch/tests/stats/mod.rs index aee626460..f44812014 100644 --- a/crates/meilisearch/tests/stats/mod.rs +++ b/crates/meilisearch/tests/stats/mod.rs @@ -6,8 +6,8 @@ use crate::common::Server; use crate::json; #[actix_rt::test] -async fn get_settings_unexisting_index() { - let server = Server::new().await; +async fn get_version() { + let server = Server::new_shared(); let (response, code) = server.version().await; assert_eq!(code, 200); let version = response.as_object().unwrap(); @@ -18,7 +18,7 @@ async fn get_settings_unexisting_index() { #[actix_rt::test] async fn test_healthyness() { - let server = Server::new().await; + let server = Server::new_shared(); let (response, status_code) = server.service.get("/health").await; assert_eq!(status_code, 200); @@ -55,7 +55,7 @@ async fn stats() { ]); let (response, code) = index.add_documents(documents, None).await; - assert_eq!(code, 202, "{}", response); + assert_eq!(code, 202, "{response}"); assert_eq!(response["taskUid"], 1); index.wait_task(response.uid()).await.succeeded(); @@ -78,8 +78,8 @@ async fn stats() { #[actix_rt::test] async fn add_remove_embeddings() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -216,8 +216,8 @@ async fn add_remove_embeddings() { #[actix_rt::test] async fn add_remove_embedded_documents() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -293,8 +293,8 @@ async fn add_remove_embedded_documents() { #[actix_rt::test] async fn update_embedder_settings() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); // 2 embedded documents for 3 embeddings in total // but no embedders are added in the settings yet so we expect 0 embedded documents for 0 embeddings in total diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index f432ef7db..09700d3c5 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -1,8 +1,7 @@ mod errors; mod webhook; -use meili_snap::insta::assert_json_snapshot; -use meili_snap::snapshot; +use meili_snap::{json_string, snapshot}; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; @@ -11,14 +10,12 @@ use crate::json; #[actix_rt::test] async fn error_get_unexisting_task_status() { - let server = Server::new().await; - let index = server.index("test"); - let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); - let (response, code) = index.get_task(1).await; + let server = Server::new_shared(); + let index = server.unique_index(); + let (response, code) = index.get_task(u32::MAX as u64).await; let expected_response = json!({ - "message": "Task `1` not found.", + "message": "Task `4294967295` not found.", "code": "task_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#task_not_found" @@ -30,8 +27,8 @@ async fn error_get_unexisting_task_status() { #[actix_rt::test] async fn get_task_status() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (create_task, _status_code) = index.create(None).await; let (add_task, _status_code) = index .add_documents( @@ -42,7 +39,7 @@ async fn get_task_status() { None, ) .await; - index.wait_task(create_task.uid()).await.succeeded(); + server.wait_task(create_task.uid()).await.succeeded(); let (_response, code) = index.get_task(add_task.uid()).await; assert_eq!(code, 200); // TODO check response format, as per #48 @@ -50,10 +47,11 @@ async fn get_task_status() { #[actix_rt::test] async fn list_tasks() { + // Do not use a shared server because we want to assert stuff against the global list of tasks let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; @@ -64,6 +62,7 @@ async fn list_tasks() { #[actix_rt::test] async fn list_tasks_pagination_and_reverse() { + // do not use a shared server here, as we want to assert tasks ids and we need them to be stable let server = Server::new().await; // First of all we want to create a lot of tasks very quickly. The fastest way is to delete a lot of unexisting indexes let mut last_task = None; @@ -71,7 +70,7 @@ async fn list_tasks_pagination_and_reverse() { let index = server.index(format!("test-{i}")); last_task = Some(index.create(None).await.0.uid()); } - server.wait_task(last_task.unwrap()).await; + server.wait_task(last_task.unwrap()).await.succeeded(); let (response, code) = server.tasks_filter("limit=3").await; assert_eq!(code, 200); @@ -103,13 +102,14 @@ async fn list_tasks_pagination_and_reverse() { #[actix_rt::test] async fn list_tasks_with_star_filters() { let server = Server::new().await; + // Do not use a unique index here, as we want to test the `indexUids=*` filter. let index = server.index("test"); let (task, _code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.service.get("/tasks?indexUids=test").await; + let (response, code) = index.service.get(format!("/tasks?indexUids={}", index.uid)).await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 2); @@ -127,93 +127,102 @@ async fn list_tasks_with_star_filters() { let (response, code) = index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index .service - .get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test") + .get(format!( + "/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}", + index.uid + )) .await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index .service .get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*") .await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_status_filtered() { + // Do not use a shared server because we want to assert stuff against the global list of tasks let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_type_filtered() { + // Do not use a shared server because we want to assert stuff against the global list of tasks let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_invalid_canceled_by_filter() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); - index + server.wait_task(task.uid()).await.succeeded(); + + let (task, _code) = index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; + server.wait_task(task.uid()).await.succeeded(); - let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await; - assert_eq!(code, 200, "{}", response); + let (response, code) = + index.filtered_tasks(&[], &[], &[format!("{}", task.uid()).as_str()]).await; + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); } #[actix_rt::test] async fn list_tasks_status_and_type_filtered() { + // Do not use a shared server because we want to assert stuff against the global list of tasks let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); let (response, code) = index @@ -223,12 +232,12 @@ async fn list_tasks_status_and_type_filtered() { &[], ) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } macro_rules! assert_valid_summarized_task { - ($response:expr, $task_type:literal, $index:literal) => {{ + ($response:expr, $task_type:literal, $index:tt) => {{ assert_eq!($response.as_object().unwrap().len(), 5); assert!($response["taskUid"].as_u64().is_some()); assert_eq!($response["indexUid"], $index); @@ -242,49 +251,49 @@ macro_rules! assert_valid_summarized_task { #[actix_web::test] async fn test_summarized_task_view() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); + let index_uid = index.uid.clone(); let (response, _) = index.create(None).await; - assert_valid_summarized_task!(response, "indexCreation", "test"); + assert_valid_summarized_task!(response, "indexCreation", index_uid); let (response, _) = index.update(None).await; - assert_valid_summarized_task!(response, "indexUpdate", "test"); + assert_valid_summarized_task!(response, "indexUpdate", index_uid); let (response, _) = index.update_settings(json!({})).await; - assert_valid_summarized_task!(response, "settingsUpdate", "test"); + assert_valid_summarized_task!(response, "settingsUpdate", index_uid); let (response, _) = index.update_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid); let (response, _) = index.add_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid); let (response, _) = index.delete_document(1).await; - assert_valid_summarized_task!(response, "documentDeletion", "test"); + assert_valid_summarized_task!(response, "documentDeletion", index_uid); let (response, _) = index.clear_all_documents().await; - assert_valid_summarized_task!(response, "documentDeletion", "test"); + assert_valid_summarized_task!(response, "documentDeletion", index_uid); let (response, _) = index.delete().await; - assert_valid_summarized_task!(response, "indexDeletion", "test"); + assert_valid_summarized_task!(response, "indexDeletion", index_uid); } #[actix_web::test] async fn test_summarized_document_addition_or_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await; - index.wait_task(task.uid()).await.succeeded(); - let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + server.wait_task(task.uid()).await.succeeded(); + let (task, _) = index.get_task(task.uid()).await; + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -302,15 +311,14 @@ async fn test_summarized_document_addition_or_update() { let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; - index.wait_task(task.uid()).await.succeeded(); - let (task, _) = index.get_task(1).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + server.wait_task(task.uid()).await.succeeded(); + let (task, _) = index.get_task(task.uid()).await; + snapshot!(task, @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -329,18 +337,22 @@ async fn test_summarized_document_addition_or_update() { #[actix_web::test] async fn test_summarized_delete_documents_by_batch() { - let server = Server::new().await; - let index = server.index("test"); - let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await; - index.wait_task(task.uid()).await.failed(); - let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + let server = Server::new_shared(); + let index = server.unique_index(); + let non_existing_task_id1 = u32::MAX as u64; + let non_existing_task_id2 = non_existing_task_id1 - 1; + let non_existing_task_id3 = non_existing_task_id1 - 2; + let (task, _status_code) = index + .delete_batch(vec![non_existing_task_id1, non_existing_task_id2, non_existing_task_id3]) + .await; + server.wait_task(task.uid()).await.failed(); + let (task, _) = index.get_task(task.uid()).await; + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -350,7 +362,7 @@ async fn test_summarized_delete_documents_by_batch() { "originalFilter": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -364,15 +376,14 @@ async fn test_summarized_delete_documents_by_batch() { index.create(None).await; let (del_task, _status_code) = index.delete_batch(vec![42]).await; - index.wait_task(del_task.uid()).await.succeeded(); + server.wait_task(del_task.uid()).await.succeeded(); let (task, _) = index.get_task(del_task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -392,20 +403,19 @@ async fn test_summarized_delete_documents_by_batch() { #[actix_web::test] async fn test_summarized_delete_documents_by_filter() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -415,7 +425,7 @@ async fn test_summarized_delete_documents_by_filter() { "originalFilter": "\"doggo = bernese\"" }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -430,15 +440,14 @@ async fn test_summarized_delete_documents_by_filter() { index.create(None).await; let (task, _status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -448,7 +457,7 @@ async fn test_summarized_delete_documents_by_filter() { "originalFilter": "\"doggo = bernese\"" }, "error": { - "message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", + "message": "Index `[uuid]`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", "code": "invalid_document_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_filter" @@ -463,15 +472,14 @@ async fn test_summarized_delete_documents_by_filter() { index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await; let (task, _status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 4, - "batchUid": 4, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -491,18 +499,17 @@ async fn test_summarized_delete_documents_by_filter() { #[actix_web::test] async fn test_summarized_delete_document_by_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.delete_document(1).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -512,7 +519,7 @@ async fn test_summarized_delete_document_by_id() { "originalFilter": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -526,15 +533,14 @@ async fn test_summarized_delete_document_by_id() { index.create(None).await; let (task, _status_code) = index.delete_document(42).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -554,12 +560,12 @@ async fn test_summarized_delete_document_by_id() { #[actix_web::test] async fn test_summarized_settings_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // here we should find my payload even in the failed task. let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await; - meili_snap::snapshot!(code, @"400 Bad Request"); - meili_snap::snapshot!(meili_snap::json_string!(response), @r###" + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" { "message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.", "code": "invalid_settings_ranking_rules", @@ -569,15 +575,14 @@ async fn test_summarized_settings_update() { "###); let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, @@ -605,18 +610,17 @@ async fn test_summarized_settings_update() { #[actix_web::test] async fn test_summarized_index_creation() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexCreation", "canceledBy": null, @@ -632,15 +636,14 @@ async fn test_summarized_index_creation() { "###); let (task, _status_code) = index.create(Some("doggos")).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexCreation", "canceledBy": null, @@ -648,7 +651,7 @@ async fn test_summarized_index_creation() { "primaryKey": "doggos" }, "error": { - "message": "Index `test` already exists.", + "message": "Index `[uuid]` already exists.", "code": "index_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_already_exists" @@ -663,16 +666,16 @@ async fn test_summarized_index_creation() { #[actix_web::test] async fn test_summarized_index_deletion() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (ret, _code) = index.delete().await; - let task = index.wait_task(ret.uid()).await; + let task = server.wait_task(ret.uid()).await; snapshot!(task, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "indexDeletion", "canceledBy": null, @@ -680,7 +683,7 @@ async fn test_summarized_index_deletion() { "deletedDocuments": 0 }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -697,13 +700,13 @@ async fn test_summarized_index_deletion() { // both tasks may get autobatched and the deleted documents count will be wrong. let (ret, _code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; - let task = index.wait_task(ret.uid()).await; + let task = server.wait_task(ret.uid()).await; snapshot!(task, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -720,13 +723,13 @@ async fn test_summarized_index_deletion() { "###); let (ret, _code) = index.delete().await; - let task = index.wait_task(ret.uid()).await; + let task = server.wait_task(ret.uid()).await; snapshot!(task, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexDeletion", "canceledBy": null, @@ -743,13 +746,13 @@ async fn test_summarized_index_deletion() { // What happens when you delete an index that doesn't exists. let (ret, _code) = index.delete().await; - let task = index.wait_task(ret.uid()).await; + let task = server.wait_task(ret.uid()).await; snapshot!(task, @r###" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "indexDeletion", "canceledBy": null, @@ -757,7 +760,7 @@ async fn test_summarized_index_deletion() { "deletedDocuments": 0 }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -772,19 +775,18 @@ async fn test_summarized_index_deletion() { #[actix_web::test] async fn test_summarized_index_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // If the index doesn't exist yet, we should get errors with or without the primary key. let (task, _status_code) = index.update(None).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexUpdate", "canceledBy": null, @@ -792,7 +794,7 @@ async fn test_summarized_index_update() { "primaryKey": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -805,15 +807,14 @@ async fn test_summarized_index_update() { "###); let (task, _status_code) = index.update(Some("bones")).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexUpdate", "canceledBy": null, @@ -821,7 +822,7 @@ async fn test_summarized_index_update() { "primaryKey": "bones" }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -837,15 +838,14 @@ async fn test_summarized_index_update() { index.create(None).await; let (task, _status_code) = index.update(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexUpdate", "canceledBy": null, @@ -861,15 +861,14 @@ async fn test_summarized_index_update() { "###); let (task, _status_code) = index.update(Some("bones")).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 4, - "batchUid": 4, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexUpdate", "canceledBy": null, @@ -887,7 +886,7 @@ async fn test_summarized_index_update() { #[actix_web::test] async fn test_summarized_index_swap() { - let server = Server::new().await; + let server = Server::new_shared(); let (task, _status_code) = server .index_swap(json!([ { "indexes": ["doggos", "cattos"] } @@ -895,12 +894,11 @@ async fn test_summarized_index_swap() { .await; server.wait_task(task.uid()).await.failed(); let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "failed", "type": "indexSwap", @@ -928,23 +926,25 @@ async fn test_summarized_index_swap() { } "###); - let (task, _code) = server.index("doggos").create(None).await; + let doggos_index = server.unique_index(); + let (task, _code) = doggos_index.create(None).await; server.wait_task(task.uid()).await.succeeded(); - let (task, _code) = server.index("cattos").create(None).await; + let cattos_index = server.unique_index(); + let (task, _code) = cattos_index.create(None).await; server.wait_task(task.uid()).await.succeeded(); let (task, _code) = server .index_swap(json!([ - { "indexes": ["doggos", "cattos"] } + { "indexes": [doggos_index.uid, cattos_index.uid] } ])) .await; server.wait_task(task.uid()).await.succeeded(); let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.indexes[0]" => "doggos", ".**.indexes[1]" => "cattos", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "indexSwap", @@ -970,20 +970,21 @@ async fn test_summarized_index_swap() { #[actix_web::test] async fn test_summarized_task_cancelation() { - let server = Server::new().await; - let index = server.index("doggos"); + let server = Server::new_shared(); + let index = server.unique_index(); // to avoid being flaky we're only going to cancel an already finished task :( let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); - let (task, _status_code) = server.cancel_tasks("uids=0").await; - index.wait_task(task.uid()).await.succeeded(); + let task_uid = task.uid(); + server.wait_task(task.uid()).await.succeeded(); + let (task, _status_code) = server.cancel_tasks(format!("uids={task_uid}").as_str()).await; + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.originalFilter" => "[of]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "taskCancelation", @@ -991,7 +992,7 @@ async fn test_summarized_task_cancelation() { "details": { "matchedTasks": 1, "canceledTasks": 0, - "originalFilter": "?uids=0" + "originalFilter": "[of]" }, "error": null, "duration": "[duration]", @@ -1004,20 +1005,19 @@ async fn test_summarized_task_cancelation() { #[actix_web::test] async fn test_summarized_task_deletion() { - let server = Server::new().await; - let index = server.index("doggos"); + let server = Server::new_shared(); + let index = server.unique_index(); // to avoid being flaky we're only going to delete an already finished task :( let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = server.delete_tasks("uids=0").await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "taskDeletion", @@ -1038,22 +1038,22 @@ async fn test_summarized_task_deletion() { #[actix_web::test] async fn test_summarized_dump_creation() { + // Do not use a shared server because it takes too long to create a dump let server = Server::new().await; let (task, _status_code) = server.create_dump().await; server.wait_task(task.uid()).await; let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "dumpCreation", "canceledBy": null, "details": { - "dumpUid": "[dumpUid]" + "dumpUid": "[dump_uid]" }, "error": null, "duration": "[duration]", diff --git a/crates/meilisearch/tests/vector/binary_quantized.rs b/crates/meilisearch/tests/vector/binary_quantized.rs index 96e32c1a3..89d32cc50 100644 --- a/crates/meilisearch/tests/vector/binary_quantized.rs +++ b/crates/meilisearch/tests/vector/binary_quantized.rs @@ -6,8 +6,8 @@ use crate::vector::generate_default_user_provided_documents; #[actix_rt::test] async fn retrieve_binary_quantize_status_in_the_settings() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -65,8 +65,8 @@ async fn retrieve_binary_quantize_status_in_the_settings() { #[actix_rt::test] async fn binary_quantize_before_sending_documents() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -139,8 +139,8 @@ async fn binary_quantize_before_sending_documents() { #[actix_rt::test] async fn binary_quantize_after_sending_documents() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -226,8 +226,8 @@ async fn binary_quantize_after_sending_documents() { #[actix_rt::test] async fn try_to_disable_binary_quantization() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -256,11 +256,11 @@ async fn try_to_disable_binary_quantization() { .await; snapshot!(code, @"202 Accepted"); let ret = server.wait_task(response.uid()).await; - snapshot!(ret, @r#" + snapshot!(json_string!(ret, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".finishedAt" => "[date]", ".startedAt" => "[date]" }), @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "doggo", + "indexUid": "[uuid]", "status": "failed", "type": "settingsUpdate", "canceledBy": null, @@ -274,7 +274,7 @@ async fn try_to_disable_binary_quantization() { } }, "error": { - "message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.", + "message": "Index `[uuid]`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.", "code": "invalid_settings_embedders", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_settings_embedders" diff --git a/crates/milli/src/attribute_patterns.rs b/crates/milli/src/attribute_patterns.rs index 00caa2a6d..8da6942a3 100644 --- a/crates/milli/src/attribute_patterns.rs +++ b/crates/milli/src/attribute_patterns.rs @@ -50,7 +50,7 @@ impl AttributePatterns { /// /// * `pattern` - The pattern to match against. /// * `str` - The string to match against the pattern. -fn match_pattern(pattern: &str, str: &str) -> PatternMatch { +pub fn match_pattern(pattern: &str, str: &str) -> PatternMatch { // If the pattern is a wildcard, return Match if pattern == "*" { return PatternMatch::Match; diff --git a/crates/milli/src/search/hybrid.rs b/crates/milli/src/search/hybrid.rs index e07f886c9..b63f6288f 100644 --- a/crates/milli/src/search/hybrid.rs +++ b/crates/milli/src/search/hybrid.rs @@ -1,11 +1,13 @@ use std::cmp::Ordering; +use heed::RoTxn; use itertools::Itertools; use roaring::RoaringBitmap; use crate::score_details::{ScoreDetails, ScoreValue, ScoringStrategy}; +use crate::search::new::{distinct_fid, distinct_single_docid}; use crate::search::SemanticSearch; -use crate::{MatchingWords, Result, Search, SearchResult}; +use crate::{Index, MatchingWords, Result, Search, SearchResult}; struct ScoreWithRatioResult { matching_words: MatchingWords, @@ -91,7 +93,10 @@ impl ScoreWithRatioResult { keyword_results: Self, from: usize, length: usize, - ) -> (SearchResult, u32) { + distinct: Option<&str>, + index: &Index, + rtxn: &RoTxn<'_>, + ) -> Result<(SearchResult, u32)> { #[derive(Clone, Copy)] enum ResultSource { Semantic, @@ -106,8 +111,9 @@ impl ScoreWithRatioResult { vector_results.document_scores.len() + keyword_results.document_scores.len(), ); - let mut documents_seen = RoaringBitmap::new(); - for ((docid, (main_score, _sub_score)), source) in vector_results + let distinct_fid = distinct_fid(distinct, index, rtxn)?; + let mut excluded_documents = RoaringBitmap::new(); + for res in vector_results .document_scores .into_iter() .zip(std::iter::repeat(ResultSource::Semantic)) @@ -121,13 +127,33 @@ impl ScoreWithRatioResult { compare_scores(left, right).is_ge() }, ) - // remove documents we already saw - .filter(|((docid, _), _)| documents_seen.insert(*docid)) + // remove documents we already saw and apply distinct rule + .filter_map(|item @ ((docid, _), _)| { + if !excluded_documents.insert(docid) { + // the document was already added, or is indistinct from an already-added document. + return None; + } + + if let Some(distinct_fid) = distinct_fid { + if let Err(error) = distinct_single_docid( + index, + rtxn, + distinct_fid, + docid, + &mut excluded_documents, + ) { + return Some(Err(error)); + } + } + + Some(Ok(item)) + }) // start skipping **after** the filter .skip(from) // take **after** skipping .take(length) { + let ((docid, (main_score, _sub_score)), source) = res?; if let ResultSource::Semantic = source { semantic_hit_count += 1; } @@ -136,10 +162,24 @@ impl ScoreWithRatioResult { document_scores.push(main_score); } - ( + // compute the set of candidates from both sets + let candidates = vector_results.candidates | keyword_results.candidates; + let must_remove_redundant_candidates = distinct_fid.is_some(); + let candidates = if must_remove_redundant_candidates { + // patch-up the candidates to remove the indistinct documents, then add back the actual hits + let mut candidates = candidates - excluded_documents; + for docid in &documents_ids { + candidates.insert(*docid); + } + candidates + } else { + candidates + }; + + Ok(( SearchResult { matching_words: keyword_results.matching_words, - candidates: vector_results.candidates | keyword_results.candidates, + candidates, documents_ids, document_scores, degraded: vector_results.degraded | keyword_results.degraded, @@ -147,7 +187,7 @@ impl ScoreWithRatioResult { | keyword_results.used_negative_operator, }, semantic_hit_count, - ) + )) } } @@ -226,8 +266,15 @@ impl Search<'_> { let keyword_results = ScoreWithRatioResult::new(keyword_results, 1.0 - semantic_ratio); let vector_results = ScoreWithRatioResult::new(vector_results, semantic_ratio); - let (merge_results, semantic_hit_count) = - ScoreWithRatioResult::merge(vector_results, keyword_results, self.offset, self.limit); + let (merge_results, semantic_hit_count) = ScoreWithRatioResult::merge( + vector_results, + keyword_results, + self.offset, + self.limit, + search.distinct.as_deref(), + search.index, + search.rtxn, + )?; assert!(merge_results.documents_ids.len() <= self.limit); Ok((merge_results, Some(semantic_hit_count))) } diff --git a/crates/milli/src/search/new/bucket_sort.rs b/crates/milli/src/search/new/bucket_sort.rs index ca7a4a986..3c26cad5c 100644 --- a/crates/milli/src/search/new/bucket_sort.rs +++ b/crates/milli/src/search/new/bucket_sort.rs @@ -4,7 +4,9 @@ use super::logger::SearchLogger; use super::ranking_rules::{BoxRankingRule, RankingRuleQueryTrait}; use super::SearchContext; use crate::score_details::{ScoreDetails, ScoringStrategy}; -use crate::search::new::distinct::{apply_distinct_rule, distinct_single_docid, DistinctOutput}; +use crate::search::new::distinct::{ + apply_distinct_rule, distinct_fid, distinct_single_docid, DistinctOutput, +}; use crate::{Result, TimeBudget}; pub struct BucketSortOutput { @@ -35,16 +37,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( logger.ranking_rules(&ranking_rules); logger.initial_universe(universe); - let distinct_field = match distinct { - Some(distinct) => Some(distinct), - None => ctx.index.distinct_field(ctx.txn)?, - }; - - let distinct_fid = if let Some(field) = distinct_field { - ctx.index.fields_ids_map(ctx.txn)?.id(field) - } else { - None - }; + let distinct_fid = distinct_fid(distinct, ctx.index, ctx.txn)?; if universe.len() < from as u64 { return Ok(BucketSortOutput { diff --git a/crates/milli/src/search/new/distinct.rs b/crates/milli/src/search/new/distinct.rs index 17859b6f8..36172302a 100644 --- a/crates/milli/src/search/new/distinct.rs +++ b/crates/milli/src/search/new/distinct.rs @@ -9,7 +9,7 @@ use crate::heed_codec::facet::{ FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec, FieldDocIdFacetCodec, }; use crate::heed_codec::BytesRefCodec; -use crate::{Index, Result, SearchContext}; +use crate::{FieldId, Index, Result, SearchContext}; pub struct DistinctOutput { pub remaining: RoaringBitmap, @@ -121,3 +121,18 @@ pub fn facet_string_values<'a>( fn facet_values_prefix_key(distinct: u16, id: u32) -> [u8; FID_SIZE + DOCID_SIZE] { concat_arrays::concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes()) } + +pub fn distinct_fid( + query_distinct_field: Option<&str>, + index: &Index, + rtxn: &RoTxn<'_>, +) -> Result> { + let distinct_field = match query_distinct_field { + Some(distinct) => Some(distinct), + None => index.distinct_field(rtxn)?, + }; + + let distinct_fid = + if let Some(field) = distinct_field { index.fields_ids_map(rtxn)?.id(field) } else { None }; + Ok(distinct_fid) +} diff --git a/crates/milli/src/search/new/mod.rs b/crates/milli/src/search/new/mod.rs index 86d0816ba..a65b4076b 100644 --- a/crates/milli/src/search/new/mod.rs +++ b/crates/milli/src/search/new/mod.rs @@ -28,6 +28,7 @@ use std::time::Duration; use bucket_sort::{bucket_sort, BucketSortOutput}; use charabia::{Language, TokenizerBuilder}; use db_cache::DatabaseCache; +pub use distinct::{distinct_fid, distinct_single_docid}; use exact_attribute::ExactAttribute; use graph_based_ranking_rule::{Exactness, Fid, Position, Proximity, Typo}; use heed::RoTxn; @@ -51,6 +52,7 @@ pub use self::geo_sort::{Parameter as GeoSortParameter, Strategy as GeoSortStrat use self::graph_based_ranking_rule::Words; use self::interner::Interned; use self::vector_sort::VectorSort; +use crate::attribute_patterns::{match_pattern, PatternMatch}; use crate::constants::RESERVED_GEO_FIELD_NAME; use crate::index::PrefixSearch; use crate::localized_attributes_rules::LocalizedFieldIds; @@ -119,17 +121,37 @@ impl<'ctx> SearchContext<'ctx> { let searchable_fields_weights = self.index.searchable_fields_and_weights(self.txn)?; let exact_attributes_ids = self.index.exact_attributes_ids(self.txn)?; - let mut wildcard = false; + let mut universal_wildcard = false; let mut restricted_fids = RestrictedFids::default(); for field_name in attributes_to_search_on { if field_name == "*" { - wildcard = true; + universal_wildcard = true; // we cannot early exit as we want to returns error in case of unknown fields continue; } let searchable_weight = searchable_fields_weights.iter().find(|(name, _, _)| name == field_name); + + // The field is not searchable but may contain a wildcard pattern + if searchable_weight.is_none() && field_name.contains("*") { + let matching_searchable_weights: Vec<_> = searchable_fields_weights + .iter() + .filter(|(name, _, _)| match_pattern(field_name, name) == PatternMatch::Match) + .collect(); + + if !matching_searchable_weights.is_empty() { + for (_name, fid, weight) in matching_searchable_weights { + if exact_attributes_ids.contains(fid) { + restricted_fids.exact.push((*fid, *weight)); + } else { + restricted_fids.tolerant.push((*fid, *weight)); + } + } + continue; + } + } + let (fid, weight) = match searchable_weight { // The Field id exist and the field is searchable Some((_name, fid, weight)) => (*fid, *weight), @@ -159,7 +181,7 @@ impl<'ctx> SearchContext<'ctx> { }; } - if wildcard { + if universal_wildcard { self.restricted_fids = None; } else { self.restricted_fids = Some(restricted_fids); diff --git a/crates/milli/src/search/new/tests/ngram_split_words.rs b/crates/milli/src/search/new/tests/ngram_split_words.rs index 8427dd65b..0a89899ed 100644 --- a/crates/milli/src/search/new/tests/ngram_split_words.rs +++ b/crates/milli/src/search/new/tests/ngram_split_words.rs @@ -72,7 +72,7 @@ fn test_2gram_simple() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -103,7 +103,7 @@ fn test_3gram_simple() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -153,7 +153,7 @@ fn test_no_disable_ngrams() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -179,7 +179,7 @@ fn test_2gram_prefix() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -208,7 +208,7 @@ fn test_3gram_prefix() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -260,7 +260,7 @@ fn test_disable_split_words() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); diff --git a/crates/milli/src/search/new/tests/typo.rs b/crates/milli/src/search/new/tests/typo.rs index 1bbe08977..8dd93b102 100644 --- a/crates/milli/src/search/new/tests/typo.rs +++ b/crates/milli/src/search/new/tests/typo.rs @@ -151,7 +151,7 @@ fn test_no_typo() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); diff --git a/crates/milli/src/test_index.rs b/crates/milli/src/test_index.rs index 7759b3e18..dfd570b96 100644 --- a/crates/milli/src/test_index.rs +++ b/crates/milli/src/test_index.rs @@ -19,10 +19,7 @@ use crate::update::{ }; use crate::vector::settings::{EmbedderSource, EmbeddingSettings}; use crate::vector::EmbeddingConfigs; -use crate::{ - db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult, - ThreadPoolNoAbortBuilder, -}; +use crate::{db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult}; pub(crate) struct TempIndex { pub inner: Index, @@ -62,15 +59,8 @@ impl TempIndex { wtxn: &mut RwTxn<'t>, documents: Mmap, ) -> Result<(), crate::error::Error> { - let local_pool; let indexer_config = &self.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = self.inner.read_txn()?; let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?; @@ -153,15 +143,8 @@ impl TempIndex { wtxn: &mut RwTxn<'t>, external_document_ids: Vec, ) -> Result<(), crate::error::Error> { - let local_pool; let indexer_config = &self.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = self.inner.read_txn()?; let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?; @@ -231,15 +214,8 @@ fn aborting_indexation() { let mut wtxn = index.inner.write_txn().unwrap(); let should_abort = AtomicBool::new(false); - let local_pool; let indexer_config = &index.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = index.inner.read_txn().unwrap(); let db_fields_ids_map = index.inner.fields_ids_map(&rtxn).unwrap(); diff --git a/crates/milli/src/thread_pool_no_abort.rs b/crates/milli/src/thread_pool_no_abort.rs index b57050a63..0c2fbb30d 100644 --- a/crates/milli/src/thread_pool_no_abort.rs +++ b/crates/milli/src/thread_pool_no_abort.rs @@ -54,6 +54,10 @@ impl ThreadPoolNoAbortBuilder { ThreadPoolNoAbortBuilder::default() } + pub fn new_for_indexing() -> ThreadPoolNoAbortBuilder { + ThreadPoolNoAbortBuilder::default().thread_name(|index| format!("indexing-thread:{index}")) + } + pub fn thread_name(mut self, closure: F) -> Self where F: FnMut(usize) -> String + 'static, diff --git a/crates/milli/src/update/index_documents/mod.rs b/crates/milli/src/update/index_documents/mod.rs index e0f85ca2d..f547c68d4 100644 --- a/crates/milli/src/update/index_documents/mod.rs +++ b/crates/milli/src/update/index_documents/mod.rs @@ -33,7 +33,6 @@ use crate::documents::{obkv_to_object, DocumentsBatchReader}; use crate::error::{Error, InternalError}; use crate::index::{PrefixSearch, PrefixSettings}; use crate::progress::Progress; -use crate::thread_pool_no_abort::ThreadPoolNoAbortBuilder; pub use crate::update::index_documents::helpers::CursorClonableMmap; use crate::update::{ IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst, @@ -228,24 +227,7 @@ where let possible_embedding_mistakes = crate::vector::error::PossibleEmbeddingMistakes::new(&field_distribution); - let backup_pool; - let pool = match self.indexer_config.thread_pool { - Some(ref pool) => pool, - None => { - // We initialize a backup pool with the default - // settings if none have already been set. - #[allow(unused_mut)] - let mut pool_builder = ThreadPoolNoAbortBuilder::new(); - - #[cfg(test)] - { - pool_builder = pool_builder.num_threads(1); - } - - backup_pool = pool_builder.build()?; - &backup_pool - } - }; + let pool = &self.indexer_config.thread_pool; // create LMDB writer channel let (lmdb_writer_sx, lmdb_writer_rx): ( diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index 6fb33ad78..eb7fbd4d5 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -1,7 +1,7 @@ use grenad::CompressionType; use super::GrenadParameters; -use crate::thread_pool_no_abort::ThreadPoolNoAbort; +use crate::{thread_pool_no_abort::ThreadPoolNoAbort, ThreadPoolNoAbortBuilder}; #[derive(Debug)] pub struct IndexerConfig { @@ -9,9 +9,10 @@ pub struct IndexerConfig { pub max_nb_chunks: Option, pub documents_chunk_size: Option, pub max_memory: Option, + pub max_threads: Option, pub chunk_compression_type: CompressionType, pub chunk_compression_level: Option, - pub thread_pool: Option, + pub thread_pool: ThreadPoolNoAbort, pub max_positions_per_attributes: Option, pub skip_index_budget: bool, } @@ -27,16 +28,39 @@ impl IndexerConfig { } } +/// By default use only 1 thread for indexing in tests +#[cfg(test)] +pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { + let pool = ThreadPoolNoAbortBuilder::new_for_indexing() + .num_threads(1) + .build() + .expect("failed to build default rayon thread pool"); + + (pool, Some(1)) +} + +#[cfg(not(test))] +pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { + let pool = ThreadPoolNoAbortBuilder::new_for_indexing() + .build() + .expect("failed to build default rayon thread pool"); + + (pool, None) +} + impl Default for IndexerConfig { fn default() -> Self { + let (thread_pool, max_threads) = default_thread_pool_and_threads(); + Self { + max_threads, + thread_pool, log_every_n: None, max_nb_chunks: None, documents_chunk_size: None, max_memory: None, chunk_compression_type: CompressionType::None, chunk_compression_level: None, - thread_pool: None, max_positions_per_attributes: None, skip_index_budget: false, } diff --git a/crates/milli/src/update/mod.rs b/crates/milli/src/update/mod.rs index 5acb00113..04ce68fc7 100644 --- a/crates/milli/src/update/mod.rs +++ b/crates/milli/src/update/mod.rs @@ -5,7 +5,7 @@ pub use self::concurrent_available_ids::ConcurrentAvailableIds; pub use self::facet::bulk::FacetsUpdateBulk; pub use self::facet::incremental::FacetsUpdateIncrementalInner; pub use self::index_documents::*; -pub use self::indexer_config::IndexerConfig; +pub use self::indexer_config::{default_thread_pool_and_threads, IndexerConfig}; pub use self::new::ChannelCongestion; pub use self::settings::{validate_embedding_settings, Setting, Settings}; pub use self::update_step::UpdateIndexingStep; diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index 01cfe338f..517ef3f2d 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -8,7 +8,7 @@ use hashbrown::HashMap; use serde_json::Value; use super::super::cache::BalancedCaches; -use super::facet_document::extract_document_facets; +use super::facet_document::{extract_document_facets, extract_geo_document}; use super::FacetKind; use crate::fields_ids_map::metadata::Metadata; use crate::filterable_attributes_rules::match_faceted_field; @@ -90,17 +90,12 @@ impl FacetedDocidsExtractor { let mut cached_sorter = context.data.borrow_mut_or_yield(); let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc); let docid = document_change.docid(); - let res = match document_change { - DocumentChange::Deletion(inner) => extract_document_facets( - inner.current(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - filterable_attributes, - sortable_fields, - asc_desc_fields, - distinct_field, - is_geo_enabled, - &mut |fid, meta, depth, value| { + + // Using a macro avoid borrowing the parameters as mutable in both closures at + // the same time by postponing their creation + macro_rules! facet_fn { + (del) => { + |fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| { Self::facet_fn_with_options( &context.doc_alloc, cached_sorter.deref_mut(), @@ -114,91 +109,10 @@ impl FacetedDocidsExtractor { depth, value, ) - }, - ), - DocumentChange::Update(inner) => { - let has_changed = inner.has_changed_for_fields( - &mut |field_name| { - match_faceted_field( - field_name, - filterable_attributes, - sortable_fields, - asc_desc_fields, - distinct_field, - ) - }, - rtxn, - index, - context.db_fields_ids_map, - )?; - let has_changed_for_geo_fields = - inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?; - if !has_changed && !has_changed_for_geo_fields { - return Ok(()); } - - extract_document_facets( - inner.current(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - filterable_attributes, - sortable_fields, - asc_desc_fields, - distinct_field, - is_geo_enabled, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_del_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_del, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, - )?; - - extract_document_facets( - inner.merged(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - filterable_attributes, - sortable_fields, - asc_desc_fields, - distinct_field, - is_geo_enabled, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_add_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_add, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, - ) - } - DocumentChange::Insertion(inner) => extract_document_facets( - inner.inserted(), - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - filterable_attributes, - sortable_fields, - asc_desc_fields, - distinct_field, - is_geo_enabled, - &mut |fid, meta, depth, value| { + }; + (add) => { + |fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| { Self::facet_fn_with_options( &context.doc_alloc, cached_sorter.deref_mut(), @@ -212,12 +126,116 @@ impl FacetedDocidsExtractor { depth, value, ) - }, - ), + } + }; + } + + match document_change { + DocumentChange::Deletion(inner) => { + let mut del = facet_fn!(del); + + extract_document_facets( + inner.current(rtxn, index, context.db_fields_ids_map)?, + new_fields_ids_map.deref_mut(), + filterable_attributes, + sortable_fields, + asc_desc_fields, + distinct_field, + &mut del, + )?; + + if is_geo_enabled { + extract_geo_document( + inner.current(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + &mut del, + )?; + } + } + DocumentChange::Update(inner) => { + let has_changed_for_facets = inner.has_changed_for_fields( + &mut |field_name| { + match_faceted_field( + field_name, + filterable_attributes, + sortable_fields, + asc_desc_fields, + distinct_field, + ) + }, + rtxn, + index, + context.db_fields_ids_map, + )?; + + // 1. Maybe update doc + if has_changed_for_facets { + extract_document_facets( + inner.current(rtxn, index, context.db_fields_ids_map)?, + new_fields_ids_map.deref_mut(), + filterable_attributes, + sortable_fields, + asc_desc_fields, + distinct_field, + &mut facet_fn!(del), + )?; + + extract_document_facets( + inner.merged(rtxn, index, context.db_fields_ids_map)?, + new_fields_ids_map.deref_mut(), + filterable_attributes, + sortable_fields, + asc_desc_fields, + distinct_field, + &mut facet_fn!(add), + )?; + } + + // 2. Maybe update geo + if is_geo_enabled + && inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)? + { + extract_geo_document( + inner.current(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + &mut facet_fn!(del), + )?; + extract_geo_document( + inner.merged(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + &mut facet_fn!(add), + )?; + } + } + DocumentChange::Insertion(inner) => { + let mut add = facet_fn!(add); + + extract_document_facets( + inner.inserted(), + new_fields_ids_map.deref_mut(), + filterable_attributes, + sortable_fields, + asc_desc_fields, + distinct_field, + &mut add, + )?; + + if is_geo_enabled { + extract_geo_document( + inner.inserted(), + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + &mut add, + )?; + } + } }; del_add_facet_value.send_data(docid, sender, &context.doc_alloc).unwrap(); - res + Ok(()) } #[allow(clippy::too_many_arguments)] diff --git a/crates/milli/src/update/new/extract/faceted/facet_document.rs b/crates/milli/src/update/new/extract/faceted/facet_document.rs index ff15f146d..d0f088bad 100644 --- a/crates/milli/src/update/new/extract/faceted/facet_document.rs +++ b/crates/milli/src/update/new/extract/faceted/facet_document.rs @@ -15,13 +15,11 @@ use crate::{ #[allow(clippy::too_many_arguments)] pub fn extract_document_facets<'doc>( document: impl Document<'doc>, - external_document_id: &str, field_id_map: &mut GlobalFieldsIdsMap, filterable_attributes: &[FilterableAttributesRule], sortable_fields: &HashSet, asc_desc_fields: &HashSet, distinct_field: &Option, - is_geo_enabled: bool, facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>, ) -> Result<()> { // return the match result for the given field name. @@ -101,17 +99,24 @@ pub fn extract_document_facets<'doc>( } } - if is_geo_enabled { - if let Some(geo_value) = document.geo_field()? { - if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? { - let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map - .id_with_metadata_or_insert("_geo.lat") - .zip(field_id_map.id_with_metadata_or_insert("_geo.lng")) - .ok_or(UserError::AttributeLimitReached)?; + Ok(()) +} - facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?; - facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?; - } +pub fn extract_geo_document<'doc>( + document: impl Document<'doc>, + external_document_id: &str, + field_id_map: &mut GlobalFieldsIdsMap, + facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>, +) -> Result<()> { + if let Some(geo_value) = document.geo_field()? { + if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? { + let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map + .id_with_metadata_or_insert("_geo.lat") + .zip(field_id_map.id_with_metadata_or_insert("_geo.lng")) + .ok_or(UserError::AttributeLimitReached)?; + + facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?; + facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?; } } diff --git a/crates/milli/src/update/settings.rs b/crates/milli/src/update/settings.rs index 05607d7a5..f396cd079 100644 --- a/crates/milli/src/update/settings.rs +++ b/crates/milli/src/update/settings.rs @@ -336,7 +336,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> { self.primary_key = Setting::Set(primary_key); } - pub fn set_autorize_typos(&mut self, val: bool) { + pub fn set_authorize_typos(&mut self, val: bool) { self.authorize_typos = Setting::Set(val); } diff --git a/crates/milli/src/update/test_settings.rs b/crates/milli/src/update/test_settings.rs index e78765cfb..59e8d9ff1 100644 --- a/crates/milli/src/update/test_settings.rs +++ b/crates/milli/src/update/test_settings.rs @@ -792,7 +792,7 @@ fn test_disable_typo() { index .update_settings_using_wtxn(&mut txn, |settings| { - settings.set_autorize_typos(false); + settings.set_authorize_typos(false); }) .unwrap();