From 648b2876f65a5f11d12f3e69781c710630bdd46d Mon Sep 17 00:00:00 2001 From: nnethercott Date: Sun, 27 Apr 2025 00:51:26 +0200 Subject: [PATCH 001/131] Create temp threadpool with all CPUs in dump --- .../src/scheduler/process_index_operation.rs | 9 ++++++--- crates/meilisearch/src/lib.rs | 19 +++++++++++++++++++ crates/meilisearch/src/option.rs | 4 ++-- crates/milli/src/index.rs | 9 ++++++--- .../milli/src/update/index_documents/mod.rs | 4 +++- crates/milli/src/update/indexer_config.rs | 6 ++++-- 6 files changed, 40 insertions(+), 11 deletions(-) diff --git a/crates/index-scheduler/src/scheduler/process_index_operation.rs b/crates/index-scheduler/src/scheduler/process_index_operation.rs index 9b12d61cf..68a1de25a 100644 --- a/crates/index-scheduler/src/scheduler/process_index_operation.rs +++ b/crates/index-scheduler/src/scheduler/process_index_operation.rs @@ -115,7 +115,8 @@ impl IndexScheduler { let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { + let pool_guard = indexer_config.thread_pool.read().unwrap(); + let pool = match &*pool_guard { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new() @@ -268,7 +269,8 @@ impl IndexScheduler { if task.error.is_none() { let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { + let pool_guard = indexer_config.thread_pool.read().unwrap(); + let pool = match &*pool_guard { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new() @@ -431,7 +433,8 @@ impl IndexScheduler { if !tasks.iter().all(|res| res.error.is_some()) { let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { + let pool_guard = indexer_config.thread_pool.read().unwrap(); + let pool = match &*pool_guard { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new() diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index 761726d83..4f31606f6 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -38,6 +38,7 @@ use meilisearch_auth::{open_auth_store_env, AuthController}; use meilisearch_types::milli::constants::VERSION_MAJOR; use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod}; +use meilisearch_types::milli::ThreadPoolNoAbortBuilder; use meilisearch_types::settings::apply_settings_to_builder; use meilisearch_types::tasks::KindWithContent; use meilisearch_types::versioning::{ @@ -505,6 +506,18 @@ fn import_dump( let indexer_config = index_scheduler.indexer_config(); + // Use all cpus to index a dump + let pool_before = { + let all_cpus = num_cpus::get(); + + let temp_pool = ThreadPoolNoAbortBuilder::new() + .thread_name(|index| format!("indexing-thread:{index}")) + .num_threads(all_cpus) + .build()?; + + indexer_config.thread_pool.write().unwrap().replace(temp_pool) + }; + // /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might // try to process tasks while we're trying to import the indexes. @@ -576,6 +589,12 @@ fn import_dump( index_scheduler.refresh_index_stats(&uid)?; } + // Restore original thread pool after dump + { + let mut guard = indexer_config.thread_pool.write().unwrap(); + *guard = pool_before; + } + // 5. Import the queue let mut index_scheduler_dump = index_scheduler.register_dumped_task()?; // 5.1. Import the batches diff --git a/crates/meilisearch/src/option.rs b/crates/meilisearch/src/option.rs index c71bf16c0..41dd05651 100644 --- a/crates/meilisearch/src/option.rs +++ b/crates/meilisearch/src/option.rs @@ -6,7 +6,7 @@ use std::num::{NonZeroUsize, ParseIntError}; use std::ops::Deref; use std::path::PathBuf; use std::str::FromStr; -use std::sync::Arc; +use std::sync::{Arc, RwLock}; use std::{env, fmt, fs}; use byte_unit::{Byte, ParseError, UnitType}; @@ -765,7 +765,7 @@ impl TryFrom<&IndexerOpts> for IndexerConfig { Ok(Self { log_every_n: Some(DEFAULT_LOG_EVERY_N), max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize), - thread_pool: Some(thread_pool), + thread_pool: RwLock::new(Some(thread_pool)), max_positions_per_attributes: None, skip_index_budget: other.skip_index_budget, ..Default::default() diff --git a/crates/milli/src/index.rs b/crates/milli/src/index.rs index 1f006b316..779185ca2 100644 --- a/crates/milli/src/index.rs +++ b/crates/milli/src/index.rs @@ -1936,7 +1936,8 @@ pub(crate) mod tests { ) -> Result<(), crate::error::Error> { let local_pool; let indexer_config = &self.indexer_config; - let pool = match &indexer_config.thread_pool { + let pool_guard = indexer_config.thread_pool.read().unwrap(); + let pool = match &*pool_guard { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); @@ -2030,7 +2031,8 @@ pub(crate) mod tests { ) -> Result<(), crate::error::Error> { let local_pool; let indexer_config = &self.indexer_config; - let pool = match &indexer_config.thread_pool { + let pool_guard = indexer_config.thread_pool.read().unwrap(); + let pool = match &*pool_guard { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); @@ -2109,7 +2111,8 @@ pub(crate) mod tests { let local_pool; let indexer_config = &index.indexer_config; - let pool = match &indexer_config.thread_pool { + let pool_guard = indexer_config.thread_pool.read().unwrap(); + let pool = match &*pool_guard { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); diff --git a/crates/milli/src/update/index_documents/mod.rs b/crates/milli/src/update/index_documents/mod.rs index 4acb78b9a..1f962ae9f 100644 --- a/crates/milli/src/update/index_documents/mod.rs +++ b/crates/milli/src/update/index_documents/mod.rs @@ -228,8 +228,10 @@ where let possible_embedding_mistakes = crate::vector::error::PossibleEmbeddingMistakes::new(&field_distribution); + let pool_guard = self.indexer_config.thread_pool.read().unwrap(); + let backup_pool; - let pool = match self.indexer_config.thread_pool { + let pool = match &*pool_guard { Some(ref pool) => pool, None => { // We initialize a backup pool with the default diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index 6fb33ad78..b3559190f 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -1,3 +1,5 @@ +use std::sync::RwLock; + use grenad::CompressionType; use super::GrenadParameters; @@ -11,7 +13,7 @@ pub struct IndexerConfig { pub max_memory: Option, pub chunk_compression_type: CompressionType, pub chunk_compression_level: Option, - pub thread_pool: Option, + pub thread_pool: RwLock>, pub max_positions_per_attributes: Option, pub skip_index_budget: bool, } @@ -36,7 +38,7 @@ impl Default for IndexerConfig { max_memory: None, chunk_compression_type: CompressionType::None, chunk_compression_level: None, - thread_pool: None, + thread_pool: RwLock::new(None), max_positions_per_attributes: None, skip_index_budget: false, } From 3b773b3416d945c5f1676d6addfc2672cdbc1193 Mon Sep 17 00:00:00 2001 From: nnethercott Date: Mon, 28 Apr 2025 11:45:21 +0200 Subject: [PATCH 002/131] Revert thread_pool type back to Option in config --- .../src/scheduler/process_index_operation.rs | 9 +++------ crates/meilisearch/src/lib.rs | 20 +++++++------------ crates/meilisearch/src/option.rs | 4 ++-- crates/milli/src/index.rs | 9 +++------ .../milli/src/update/index_documents/mod.rs | 4 +--- crates/milli/src/update/indexer_config.rs | 20 +++++++++++++++---- 6 files changed, 32 insertions(+), 34 deletions(-) diff --git a/crates/index-scheduler/src/scheduler/process_index_operation.rs b/crates/index-scheduler/src/scheduler/process_index_operation.rs index 68a1de25a..9b12d61cf 100644 --- a/crates/index-scheduler/src/scheduler/process_index_operation.rs +++ b/crates/index-scheduler/src/scheduler/process_index_operation.rs @@ -115,8 +115,7 @@ impl IndexScheduler { let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool_guard = indexer_config.thread_pool.read().unwrap(); - let pool = match &*pool_guard { + let pool = match &indexer_config.thread_pool { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new() @@ -269,8 +268,7 @@ impl IndexScheduler { if task.error.is_none() { let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool_guard = indexer_config.thread_pool.read().unwrap(); - let pool = match &*pool_guard { + let pool = match &indexer_config.thread_pool { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new() @@ -433,8 +431,7 @@ impl IndexScheduler { if !tasks.iter().all(|res| res.error.is_some()) { let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool_guard = indexer_config.thread_pool.read().unwrap(); - let pool = match &*pool_guard { + let pool = match &indexer_config.thread_pool { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new() diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index 4f31606f6..df45dc63b 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -37,7 +37,7 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions}; use meilisearch_auth::{open_auth_store_env, AuthController}; use meilisearch_types::milli::constants::VERSION_MAJOR; use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; -use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod}; +use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig}; use meilisearch_types::milli::ThreadPoolNoAbortBuilder; use meilisearch_types::settings::apply_settings_to_builder; use meilisearch_types::tasks::KindWithContent; @@ -504,10 +504,10 @@ fn import_dump( let network = dump_reader.network()?.cloned().unwrap_or_default(); index_scheduler.put_network(network)?; - let indexer_config = index_scheduler.indexer_config(); + let mut indexer_config = IndexerConfig::clone_no_threadpool(index_scheduler.indexer_config()); - // Use all cpus to index a dump - let pool_before = { + // 3.1 Use all cpus to index the import dump + indexer_config.thread_pool = { let all_cpus = num_cpus::get(); let temp_pool = ThreadPoolNoAbortBuilder::new() @@ -515,7 +515,7 @@ fn import_dump( .num_threads(all_cpus) .build()?; - indexer_config.thread_pool.write().unwrap().replace(temp_pool) + Some(temp_pool) }; // /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might @@ -533,7 +533,7 @@ fn import_dump( let mut wtxn = index.write_txn()?; - let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config); + let mut builder = milli::update::Settings::new(&mut wtxn, &index, &indexer_config); // 4.1 Import the primary key if there is one. if let Some(ref primary_key) = metadata.primary_key { builder.set_primary_key(primary_key.to_string()); @@ -568,7 +568,7 @@ fn import_dump( let builder = milli::update::IndexDocuments::new( &mut wtxn, &index, - indexer_config, + &indexer_config, IndexDocumentsConfig { update_method: IndexDocumentsMethod::ReplaceDocuments, ..Default::default() @@ -589,12 +589,6 @@ fn import_dump( index_scheduler.refresh_index_stats(&uid)?; } - // Restore original thread pool after dump - { - let mut guard = indexer_config.thread_pool.write().unwrap(); - *guard = pool_before; - } - // 5. Import the queue let mut index_scheduler_dump = index_scheduler.register_dumped_task()?; // 5.1. Import the batches diff --git a/crates/meilisearch/src/option.rs b/crates/meilisearch/src/option.rs index 41dd05651..c71bf16c0 100644 --- a/crates/meilisearch/src/option.rs +++ b/crates/meilisearch/src/option.rs @@ -6,7 +6,7 @@ use std::num::{NonZeroUsize, ParseIntError}; use std::ops::Deref; use std::path::PathBuf; use std::str::FromStr; -use std::sync::{Arc, RwLock}; +use std::sync::Arc; use std::{env, fmt, fs}; use byte_unit::{Byte, ParseError, UnitType}; @@ -765,7 +765,7 @@ impl TryFrom<&IndexerOpts> for IndexerConfig { Ok(Self { log_every_n: Some(DEFAULT_LOG_EVERY_N), max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize), - thread_pool: RwLock::new(Some(thread_pool)), + thread_pool: Some(thread_pool), max_positions_per_attributes: None, skip_index_budget: other.skip_index_budget, ..Default::default() diff --git a/crates/milli/src/index.rs b/crates/milli/src/index.rs index 779185ca2..1f006b316 100644 --- a/crates/milli/src/index.rs +++ b/crates/milli/src/index.rs @@ -1936,8 +1936,7 @@ pub(crate) mod tests { ) -> Result<(), crate::error::Error> { let local_pool; let indexer_config = &self.indexer_config; - let pool_guard = indexer_config.thread_pool.read().unwrap(); - let pool = match &*pool_guard { + let pool = match &indexer_config.thread_pool { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); @@ -2031,8 +2030,7 @@ pub(crate) mod tests { ) -> Result<(), crate::error::Error> { let local_pool; let indexer_config = &self.indexer_config; - let pool_guard = indexer_config.thread_pool.read().unwrap(); - let pool = match &*pool_guard { + let pool = match &indexer_config.thread_pool { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); @@ -2111,8 +2109,7 @@ pub(crate) mod tests { let local_pool; let indexer_config = &index.indexer_config; - let pool_guard = indexer_config.thread_pool.read().unwrap(); - let pool = match &*pool_guard { + let pool = match &indexer_config.thread_pool { Some(pool) => pool, None => { local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); diff --git a/crates/milli/src/update/index_documents/mod.rs b/crates/milli/src/update/index_documents/mod.rs index 1f962ae9f..4acb78b9a 100644 --- a/crates/milli/src/update/index_documents/mod.rs +++ b/crates/milli/src/update/index_documents/mod.rs @@ -228,10 +228,8 @@ where let possible_embedding_mistakes = crate::vector::error::PossibleEmbeddingMistakes::new(&field_distribution); - let pool_guard = self.indexer_config.thread_pool.read().unwrap(); - let backup_pool; - let pool = match &*pool_guard { + let pool = match self.indexer_config.thread_pool { Some(ref pool) => pool, None => { // We initialize a backup pool with the default diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index b3559190f..a534a21e9 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -1,5 +1,3 @@ -use std::sync::RwLock; - use grenad::CompressionType; use super::GrenadParameters; @@ -13,7 +11,7 @@ pub struct IndexerConfig { pub max_memory: Option, pub chunk_compression_type: CompressionType, pub chunk_compression_level: Option, - pub thread_pool: RwLock>, + pub thread_pool: Option, pub max_positions_per_attributes: Option, pub skip_index_budget: bool, } @@ -27,6 +25,20 @@ impl IndexerConfig { max_nb_chunks: self.max_nb_chunks, } } + + pub fn clone_no_threadpool(other: &IndexerConfig) -> Self { + Self { + log_every_n: other.log_every_n.clone(), + max_nb_chunks: other.max_nb_chunks.clone(), + documents_chunk_size: other.documents_chunk_size.clone(), + max_memory: other.max_memory.clone(), + chunk_compression_type: other.chunk_compression_type.clone(), + chunk_compression_level: other.chunk_compression_level.clone(), + max_positions_per_attributes: other.max_positions_per_attributes.clone(), + skip_index_budget: other.skip_index_budget.clone(), + thread_pool: None, + } + } } impl Default for IndexerConfig { @@ -38,7 +50,7 @@ impl Default for IndexerConfig { max_memory: None, chunk_compression_type: CompressionType::None, chunk_compression_level: None, - thread_pool: RwLock::new(None), + thread_pool: None, max_positions_per_attributes: None, skip_index_budget: false, } From 89aff2081c81a2ed4e767228d4e19f073e5a698a Mon Sep 17 00:00:00 2001 From: nnethercott Date: Wed, 30 Apr 2025 14:17:32 +0200 Subject: [PATCH 003/131] Fix clippy warnings --- crates/milli/src/update/indexer_config.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index a534a21e9..433273fac 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -28,14 +28,14 @@ impl IndexerConfig { pub fn clone_no_threadpool(other: &IndexerConfig) -> Self { Self { - log_every_n: other.log_every_n.clone(), - max_nb_chunks: other.max_nb_chunks.clone(), - documents_chunk_size: other.documents_chunk_size.clone(), - max_memory: other.max_memory.clone(), - chunk_compression_type: other.chunk_compression_type.clone(), - chunk_compression_level: other.chunk_compression_level.clone(), - max_positions_per_attributes: other.max_positions_per_attributes.clone(), - skip_index_budget: other.skip_index_budget.clone(), + log_every_n: other.log_every_n, + max_nb_chunks: other.max_nb_chunks, + documents_chunk_size: other.documents_chunk_size, + max_memory: other.max_memory, + chunk_compression_type: other.chunk_compression_type, + chunk_compression_level: other.chunk_compression_level, + max_positions_per_attributes: other.max_positions_per_attributes, + skip_index_budget: other.skip_index_budget, thread_pool: None, } } From 2ac826edca956a71c7d4976a06474386a6baf395 Mon Sep 17 00:00:00 2001 From: Nate Nethercott <53127799+nnethercott@users.noreply.github.com> Date: Thu, 1 May 2025 14:48:59 +0200 Subject: [PATCH 004/131] Apply suggested changes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Clément Renault Update crates/meilisearch/src/lib.rs Co-authored-by: Clément Renault --- crates/meilisearch/src/lib.rs | 4 ++-- crates/milli/src/update/indexer_config.rs | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index df45dc63b..3b3c94230 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -37,7 +37,7 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions}; use meilisearch_auth::{open_auth_store_env, AuthController}; use meilisearch_types::milli::constants::VERSION_MAJOR; use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; -use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig}; +use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod}; use meilisearch_types::milli::ThreadPoolNoAbortBuilder; use meilisearch_types::settings::apply_settings_to_builder; use meilisearch_types::tasks::KindWithContent; @@ -504,7 +504,7 @@ fn import_dump( let network = dump_reader.network()?.cloned().unwrap_or_default(); index_scheduler.put_network(network)?; - let mut indexer_config = IndexerConfig::clone_no_threadpool(index_scheduler.indexer_config()); + let mut indexer_config = index_scheduler.indexer_config().clone_no_threadpool(); // 3.1 Use all cpus to index the import dump indexer_config.thread_pool = { diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index 433273fac..f9503c48e 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -26,16 +26,16 @@ impl IndexerConfig { } } - pub fn clone_no_threadpool(other: &IndexerConfig) -> Self { + pub fn clone_no_threadpool(&self) -> Self { Self { - log_every_n: other.log_every_n, - max_nb_chunks: other.max_nb_chunks, - documents_chunk_size: other.documents_chunk_size, - max_memory: other.max_memory, - chunk_compression_type: other.chunk_compression_type, - chunk_compression_level: other.chunk_compression_level, - max_positions_per_attributes: other.max_positions_per_attributes, - skip_index_budget: other.skip_index_budget, + log_every_n: self.log_every_n, + max_nb_chunks: self.max_nb_chunks, + documents_chunk_size: self.documents_chunk_size, + max_memory: self.max_memory, + chunk_compression_type: self.chunk_compression_type, + chunk_compression_level: self.chunk_compression_level, + max_positions_per_attributes: self.max_positions_per_attributes, + skip_index_budget: self.skip_index_budget, thread_pool: None, } } From 47a7ed93d334fd423619514c5e91d041995b8ea6 Mon Sep 17 00:00:00 2001 From: nnethercott Date: Tue, 6 May 2025 09:10:09 +0200 Subject: [PATCH 005/131] feat: Make MaxThreads None by default --- crates/meilisearch/src/lib.rs | 32 ++++++++++-------- crates/meilisearch/src/option.rs | 40 +++++++++++++---------- crates/milli/src/update/indexer_config.rs | 3 ++ 3 files changed, 44 insertions(+), 31 deletions(-) diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index 3b3c94230..7310260f6 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -504,18 +504,22 @@ fn import_dump( let network = dump_reader.network()?.cloned().unwrap_or_default(); index_scheduler.put_network(network)?; - let mut indexer_config = index_scheduler.indexer_config().clone_no_threadpool(); - - // 3.1 Use all cpus to index the import dump - indexer_config.thread_pool = { - let all_cpus = num_cpus::get(); - - let temp_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|index| format!("indexing-thread:{index}")) - .num_threads(all_cpus) - .build()?; - - Some(temp_pool) + // 3.1 Use all cpus to process dump if max_indexing_threads not configured + let backup_config; + let indexer_config = if index_scheduler.indexer_config().max_threads.is_none() { + let mut _config = index_scheduler.indexer_config().clone_no_threadpool(); + _config.thread_pool = { + Some( + ThreadPoolNoAbortBuilder::new() + .thread_name(|index| format!("indexing-thread:{index}")) + .num_threads(num_cpus::get()) + .build()?, + ) + }; + backup_config = _config; + &backup_config + } else { + index_scheduler.indexer_config() }; // /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might @@ -533,7 +537,7 @@ fn import_dump( let mut wtxn = index.write_txn()?; - let mut builder = milli::update::Settings::new(&mut wtxn, &index, &indexer_config); + let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config); // 4.1 Import the primary key if there is one. if let Some(ref primary_key) = metadata.primary_key { builder.set_primary_key(primary_key.to_string()); @@ -568,7 +572,7 @@ fn import_dump( let builder = milli::update::IndexDocuments::new( &mut wtxn, &index, - &indexer_config, + indexer_config, IndexDocumentsConfig { update_method: IndexDocumentsMethod::ReplaceDocuments, ..Default::default() diff --git a/crates/meilisearch/src/option.rs b/crates/meilisearch/src/option.rs index c71bf16c0..259fd501f 100644 --- a/crates/meilisearch/src/option.rs +++ b/crates/meilisearch/src/option.rs @@ -746,10 +746,12 @@ impl IndexerOpts { max_indexing_memory.to_string(), ); } - export_to_env_if_not_present( - MEILI_MAX_INDEXING_THREADS, - max_indexing_threads.0.to_string(), - ); + if let Some(max_indexing_threads) = max_indexing_threads.0 { + export_to_env_if_not_present( + MEILI_MAX_INDEXING_THREADS, + max_indexing_threads.to_string(), + ); + } } } @@ -757,14 +759,18 @@ impl TryFrom<&IndexerOpts> for IndexerConfig { type Error = anyhow::Error; fn try_from(other: &IndexerOpts) -> Result { + // use 1/2 cpu threads if no value specified + let max_indexing_threads = other.max_indexing_threads.unwrap_or(num_cpus::get() / 2); + let thread_pool = ThreadPoolNoAbortBuilder::new() .thread_name(|index| format!("indexing-thread:{index}")) - .num_threads(*other.max_indexing_threads) + .num_threads(max_indexing_threads) .build()?; Ok(Self { log_every_n: Some(DEFAULT_LOG_EVERY_N), max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize), + max_threads: *other.max_indexing_threads, thread_pool: Some(thread_pool), max_positions_per_attributes: None, skip_index_budget: other.skip_index_budget, @@ -828,31 +834,31 @@ fn total_memory_bytes() -> Option { } } -#[derive(Debug, Clone, Copy, Deserialize, Serialize)] -pub struct MaxThreads(usize); +#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)] +pub struct MaxThreads(Option); impl FromStr for MaxThreads { type Err = ParseIntError; - fn from_str(s: &str) -> Result { - usize::from_str(s).map(Self) - } -} - -impl Default for MaxThreads { - fn default() -> Self { - MaxThreads(num_cpus::get() / 2) + fn from_str(s: &str) -> Result { + if s.is_empty() { + return Ok(MaxThreads::default()); + } + usize::from_str(s).map(Some).map(MaxThreads) } } impl fmt::Display for MaxThreads { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.0) + match self.0 { + Some(threads) => write!(f, "{}", threads), + None => Ok(()), + } } } impl Deref for MaxThreads { - type Target = usize; + type Target = Option; fn deref(&self) -> &Self::Target { &self.0 diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index f9503c48e..e19649a0d 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -9,6 +9,7 @@ pub struct IndexerConfig { pub max_nb_chunks: Option, pub documents_chunk_size: Option, pub max_memory: Option, + pub max_threads: Option, pub chunk_compression_type: CompressionType, pub chunk_compression_level: Option, pub thread_pool: Option, @@ -32,6 +33,7 @@ impl IndexerConfig { max_nb_chunks: self.max_nb_chunks, documents_chunk_size: self.documents_chunk_size, max_memory: self.max_memory, + max_threads: self.max_threads, chunk_compression_type: self.chunk_compression_type, chunk_compression_level: self.chunk_compression_level, max_positions_per_attributes: self.max_positions_per_attributes, @@ -48,6 +50,7 @@ impl Default for IndexerConfig { max_nb_chunks: None, documents_chunk_size: None, max_memory: None, + max_threads: None, chunk_compression_type: CompressionType::None, chunk_compression_level: None, thread_pool: None, From 53f32a7dd78f945d7c126b3696ace419da1ba7af Mon Sep 17 00:00:00 2001 From: nnethercott Date: Wed, 7 May 2025 17:00:08 +0200 Subject: [PATCH 006/131] refactor: change thread_pool from Option to ThreadPoolNoAbort --- .../src/scheduler/process_index_operation.rs | 38 ++----------------- crates/meilisearch/src/lib.rs | 26 ++++++------- crates/meilisearch/src/option.rs | 2 +- crates/milli/src/index.rs | 28 ++------------ .../milli/src/update/index_documents/mod.rs | 20 +--------- crates/milli/src/update/indexer_config.rs | 34 ++++++++--------- 6 files changed, 38 insertions(+), 110 deletions(-) diff --git a/crates/index-scheduler/src/scheduler/process_index_operation.rs b/crates/index-scheduler/src/scheduler/process_index_operation.rs index 9b12d61cf..093c6209d 100644 --- a/crates/index-scheduler/src/scheduler/process_index_operation.rs +++ b/crates/index-scheduler/src/scheduler/process_index_operation.rs @@ -5,7 +5,7 @@ use meilisearch_types::milli::documents::PrimaryKey; use meilisearch_types::milli::progress::Progress; use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction}; use meilisearch_types::milli::update::DocumentAdditionResult; -use meilisearch_types::milli::{self, ChannelCongestion, Filter, ThreadPoolNoAbortBuilder}; +use meilisearch_types::milli::{self, ChannelCongestion, Filter}; use meilisearch_types::settings::apply_settings_to_builder; use meilisearch_types::tasks::{Details, KindWithContent, Status, Task}; use meilisearch_types::Index; @@ -113,18 +113,8 @@ impl IndexScheduler { } } - let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|i| format!("indexing-thread-{i}")) - .build() - .unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; progress.update_progress(DocumentOperationProgress::ComputingDocumentChanges); let (document_changes, operation_stats, primary_key) = indexer @@ -266,18 +256,8 @@ impl IndexScheduler { let mut congestion = None; if task.error.is_none() { - let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|i| format!("indexing-thread-{i}")) - .build() - .unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let candidates_count = candidates.len(); progress.update_progress(DocumentEditionProgress::ComputingDocumentChanges); @@ -429,18 +409,8 @@ impl IndexScheduler { let mut congestion = None; if !tasks.iter().all(|res| res.error.is_some()) { - let local_pool; let indexer_config = self.index_mapper.indexer_config(); - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|i| format!("indexing-thread-{i}")) - .build() - .unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; progress.update_progress(DocumentDeletionProgress::DeleteDocuments); let mut indexer = indexer::DocumentDeletion::new(); diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index 7310260f6..9364bc83d 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -37,7 +37,7 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions}; use meilisearch_auth::{open_auth_store_env, AuthController}; use meilisearch_types::milli::constants::VERSION_MAJOR; use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; -use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod}; +use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig}; use meilisearch_types::milli::ThreadPoolNoAbortBuilder; use meilisearch_types::settings::apply_settings_to_builder; use meilisearch_types::tasks::KindWithContent; @@ -504,22 +504,22 @@ fn import_dump( let network = dump_reader.network()?.cloned().unwrap_or_default(); index_scheduler.put_network(network)?; - // 3.1 Use all cpus to process dump if max_indexing_threads not configured + // 3.1 Use all cpus to process dump if a) `max_indexing_threads` not configured and + // b) we're not executing from within a test let backup_config; - let indexer_config = if index_scheduler.indexer_config().max_threads.is_none() { - let mut _config = index_scheduler.indexer_config().clone_no_threadpool(); - _config.thread_pool = { - Some( - ThreadPoolNoAbortBuilder::new() - .thread_name(|index| format!("indexing-thread:{index}")) - .num_threads(num_cpus::get()) - .build()?, - ) - }; + let base_config = index_scheduler.indexer_config(); + + let indexer_config = if base_config.max_threads.is_none() && !cfg!(test) { + let thread_pool = ThreadPoolNoAbortBuilder::new() + .thread_name(|index| format!("indexing-thread:{index}")) + .num_threads(num_cpus::get()) + .build()?; + + let _config = IndexerConfig { thread_pool, ..*base_config }; backup_config = _config; &backup_config } else { - index_scheduler.indexer_config() + base_config }; // /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might diff --git a/crates/meilisearch/src/option.rs b/crates/meilisearch/src/option.rs index 259fd501f..8dcbdcfca 100644 --- a/crates/meilisearch/src/option.rs +++ b/crates/meilisearch/src/option.rs @@ -768,10 +768,10 @@ impl TryFrom<&IndexerOpts> for IndexerConfig { .build()?; Ok(Self { + thread_pool, log_every_n: Some(DEFAULT_LOG_EVERY_N), max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize), max_threads: *other.max_indexing_threads, - thread_pool: Some(thread_pool), max_positions_per_attributes: None, skip_index_budget: other.skip_index_budget, ..Default::default() diff --git a/crates/milli/src/index.rs b/crates/milli/src/index.rs index 1f006b316..948d0fb0d 100644 --- a/crates/milli/src/index.rs +++ b/crates/milli/src/index.rs @@ -1893,7 +1893,6 @@ pub(crate) mod tests { use crate::vector::EmbeddingConfigs; use crate::{ db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult, - ThreadPoolNoAbortBuilder, }; pub(crate) struct TempIndex { @@ -1934,15 +1933,8 @@ pub(crate) mod tests { wtxn: &mut RwTxn<'t>, documents: Mmap, ) -> Result<(), crate::error::Error> { - let local_pool; let indexer_config = &self.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = self.inner.read_txn()?; let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?; @@ -2028,15 +2020,8 @@ pub(crate) mod tests { wtxn: &mut RwTxn<'t>, external_document_ids: Vec, ) -> Result<(), crate::error::Error> { - let local_pool; let indexer_config = &self.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = self.inner.read_txn()?; let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?; @@ -2107,15 +2092,8 @@ pub(crate) mod tests { let mut wtxn = index.inner.write_txn().unwrap(); let should_abort = AtomicBool::new(false); - let local_pool; let indexer_config = &index.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = index.inner.read_txn().unwrap(); let db_fields_ids_map = index.inner.fields_ids_map(&rtxn).unwrap(); diff --git a/crates/milli/src/update/index_documents/mod.rs b/crates/milli/src/update/index_documents/mod.rs index 4acb78b9a..379b991e0 100644 --- a/crates/milli/src/update/index_documents/mod.rs +++ b/crates/milli/src/update/index_documents/mod.rs @@ -33,7 +33,6 @@ use crate::documents::{obkv_to_object, DocumentsBatchReader}; use crate::error::{Error, InternalError}; use crate::index::{PrefixSearch, PrefixSettings}; use crate::progress::Progress; -use crate::thread_pool_no_abort::ThreadPoolNoAbortBuilder; pub use crate::update::index_documents::helpers::CursorClonableMmap; use crate::update::{ IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst, @@ -228,24 +227,7 @@ where let possible_embedding_mistakes = crate::vector::error::PossibleEmbeddingMistakes::new(&field_distribution); - let backup_pool; - let pool = match self.indexer_config.thread_pool { - Some(ref pool) => pool, - None => { - // We initialize a backup pool with the default - // settings if none have already been set. - #[allow(unused_mut)] - let mut pool_builder = ThreadPoolNoAbortBuilder::new(); - - #[cfg(test)] - { - pool_builder = pool_builder.num_threads(1); - } - - backup_pool = pool_builder.build()?; - &backup_pool - } - }; + let pool = &self.indexer_config.thread_pool; // create LMDB writer channel let (lmdb_writer_sx, lmdb_writer_rx): ( diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index e19649a0d..c6ae2b859 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -1,7 +1,7 @@ use grenad::CompressionType; use super::GrenadParameters; -use crate::thread_pool_no_abort::ThreadPoolNoAbort; +use crate::{thread_pool_no_abort::ThreadPoolNoAbort, ThreadPoolNoAbortBuilder}; #[derive(Debug)] pub struct IndexerConfig { @@ -12,7 +12,7 @@ pub struct IndexerConfig { pub max_threads: Option, pub chunk_compression_type: CompressionType, pub chunk_compression_level: Option, - pub thread_pool: Option, + pub thread_pool: ThreadPoolNoAbort, pub max_positions_per_attributes: Option, pub skip_index_budget: bool, } @@ -26,25 +26,23 @@ impl IndexerConfig { max_nb_chunks: self.max_nb_chunks, } } - - pub fn clone_no_threadpool(&self) -> Self { - Self { - log_every_n: self.log_every_n, - max_nb_chunks: self.max_nb_chunks, - documents_chunk_size: self.documents_chunk_size, - max_memory: self.max_memory, - max_threads: self.max_threads, - chunk_compression_type: self.chunk_compression_type, - chunk_compression_level: self.chunk_compression_level, - max_positions_per_attributes: self.max_positions_per_attributes, - skip_index_budget: self.skip_index_budget, - thread_pool: None, - } - } } impl Default for IndexerConfig { fn default() -> Self { + #[allow(unused_mut)] + let mut pool_builder = ThreadPoolNoAbortBuilder::new(); + + #[cfg(test)] + { + pool_builder = pool_builder.num_threads(1); + } + + let thread_pool = pool_builder + .thread_name(|index| format!("indexing-thread:{index}")) + .build() + .expect("failed to build default rayon thread pool"); + Self { log_every_n: None, max_nb_chunks: None, @@ -53,9 +51,9 @@ impl Default for IndexerConfig { max_threads: None, chunk_compression_type: CompressionType::None, chunk_compression_level: None, - thread_pool: None, max_positions_per_attributes: None, skip_index_budget: false, + thread_pool, } } } From 8bd8e744f35db460750935a9107a9bcb51b798c2 Mon Sep 17 00:00:00 2001 From: Lucas Black Date: Fri, 9 May 2025 02:42:48 -0700 Subject: [PATCH 007/131] Attributes to search on supports nested wildcards --- crates/milli/src/search/new/mod.rs | 56 ++++++++++++++++++++++++++++-- 1 file changed, 53 insertions(+), 3 deletions(-) diff --git a/crates/milli/src/search/new/mod.rs b/crates/milli/src/search/new/mod.rs index 6e794ef53..21002c55a 100644 --- a/crates/milli/src/search/new/mod.rs +++ b/crates/milli/src/search/new/mod.rs @@ -120,17 +120,39 @@ impl<'ctx> SearchContext<'ctx> { let searchable_fields_weights = self.index.searchable_fields_and_weights(self.txn)?; let exact_attributes_ids = self.index.exact_attributes_ids(self.txn)?; - let mut wildcard = false; + let mut universal_wildcard = false; let mut restricted_fids = RestrictedFids::default(); for field_name in attributes_to_search_on { if field_name == "*" { - wildcard = true; + universal_wildcard = true; // we cannot early exit as we want to returns error in case of unknown fields continue; } let searchable_weight = searchable_fields_weights.iter().find(|(name, _, _)| name == field_name); + + // The field is not searchable but may contain a wildcard pattern + if searchable_weight.is_none() && field_name.contains("*") { + let matching_searchable_weights: Vec<_> = searchable_fields_weights + .iter() + .filter(|(name, _, _)| { + Self::matches_wildcard_pattern(field_name, name) + }) + .collect(); + + if !matching_searchable_weights.is_empty() { + for (_name, fid, weight) in matching_searchable_weights { + if exact_attributes_ids.contains(fid) { + restricted_fids.exact.push((*fid, *weight)); + } else { + restricted_fids.tolerant.push((*fid, *weight)); + } + } + continue; + } + } + let (fid, weight) = match searchable_weight { // The Field id exist and the field is searchable Some((_name, fid, weight)) => (*fid, *weight), @@ -160,7 +182,7 @@ impl<'ctx> SearchContext<'ctx> { }; } - if wildcard { + if universal_wildcard { self.restricted_fids = None; } else { self.restricted_fids = Some(restricted_fids); @@ -168,6 +190,34 @@ impl<'ctx> SearchContext<'ctx> { Ok(()) } + + fn matches_wildcard_pattern(wildcard_pattern: &str, name: &str) -> bool { + let wildcard_subfields: Vec<&str> = wildcard_pattern.split(".").collect(); + let name_subfields: Vec<&str> = name.split(".").collect(); + + // Deep wildcard matches all attributes after ('**') + if !wildcard_subfields.is_empty() && wildcard_subfields.last() == Some(&"**") { + let prefix_len = wildcard_subfields.len() - 1; + if prefix_len > name_subfields.len() { + return false; + } + + return wildcard_subfields[..prefix_len] + .iter() + .zip(name_subfields.iter()) + .all(|(wc, sf)| *wc == "*" || *wc == *sf); + } + + // Using single wildcard ('*') should match length (e.g. 'a.*.c' matches 'a.b.c') + // where '*' can match any single segment + if wildcard_subfields.len() != name_subfields.len() { + return false; + } + + wildcard_subfields.iter() + .zip(name_subfields.iter()) + .all(|(wc, sf)| *wc == "*" || *wc == *sf) + } } #[derive(Debug, Default)] From 4e6252fb03374caf97685416e04f00f6caad2d11 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 12 May 2025 11:59:21 +0200 Subject: [PATCH 008/131] Only intern in case of single-typo when looking for single typoes --- crates/milli/src/search/new/query_term/compute_derivations.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/milli/src/search/new/query_term/compute_derivations.rs b/crates/milli/src/search/new/query_term/compute_derivations.rs index 10e480a04..5edf85e97 100644 --- a/crates/milli/src/search/new/query_term/compute_derivations.rs +++ b/crates/milli/src/search/new/query_term/compute_derivations.rs @@ -92,12 +92,12 @@ fn find_one_typo_derivations( let mut stream = fst.search_with_state(Intersection(starts, &dfa)).into_stream(); while let Some((derived_word, state)) = stream.next() { - let derived_word = std::str::from_utf8(derived_word)?; - let derived_word = ctx.word_interner.insert(derived_word.to_owned()); let d = dfa.distance(state.1); match d.to_u8() { 0 => (), 1 => { + let derived_word = std::str::from_utf8(derived_word)?; + let derived_word = ctx.word_interner.insert(derived_word.to_owned()); let cf = visit(derived_word)?; if cf.is_break() { break; From c8939944c63e06339e49db058c1fdb2f7908d01c Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 12 May 2025 12:40:55 +0200 Subject: [PATCH 009/131] Add test --- crates/meilisearch/tests/search/mod.rs | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs index 6d98c0b2a..f1d9c8b3b 100644 --- a/crates/meilisearch/tests/search/mod.rs +++ b/crates/meilisearch/tests/search/mod.rs @@ -112,6 +112,26 @@ async fn simple_search() { .await; } +/// See +#[actix_rt::test] +async fn bug_5547() { + let server = Server::new().await; + let index = server.index("big_fst"); + let (response, _code) = index.create(None).await; + index.wait_task(response.uid()).await.succeeded(); + + let mut documents = Vec::new(); + for i in 0..65_535 { + documents.push(json!({"id": i, "title": format!("title{i}")})); + } + + let (response, _code) = index.add_documents(json!(documents), Some("id")).await; + index.wait_task(response.uid()).await.succeeded(); + let (response, code) = index.search_post(json!({"q": "title"})).await; + assert_eq!(code, 200); + snapshot!(response["hits"], @r###"[{"id":0,"title":"title0"},{"id":1,"title":"title1"},{"id":10,"title":"title10"},{"id":100,"title":"title100"},{"id":101,"title":"title101"},{"id":102,"title":"title102"},{"id":103,"title":"title103"},{"id":104,"title":"title104"},{"id":105,"title":"title105"},{"id":106,"title":"title106"},{"id":107,"title":"title107"},{"id":108,"title":"title108"},{"id":1000,"title":"title1000"},{"id":1001,"title":"title1001"},{"id":1002,"title":"title1002"},{"id":1003,"title":"title1003"},{"id":1004,"title":"title1004"},{"id":1005,"title":"title1005"},{"id":1006,"title":"title1006"},{"id":1007,"title":"title1007"}]"###); +} + #[actix_rt::test] async fn search_with_stop_word() { // related to https://github.com/meilisearch/meilisearch/issues/4984 From 15cdc6924b6b733d924d14d87ad9ce165cdd85dc Mon Sep 17 00:00:00 2001 From: nnethercott Date: Tue, 13 May 2025 09:12:34 +0200 Subject: [PATCH 010/131] refactor: remove runtime cfg!(test) check Won't work in integration tests and consequently all threads would be used. To remedy this we make explicit `max_threads=Some(1)` in the IndexerConfig::default --- crates/meilisearch/src/lib.rs | 5 ++-- crates/milli/src/test_index.rs | 32 +++-------------------- crates/milli/src/update/indexer_config.rs | 8 ++++-- 3 files changed, 12 insertions(+), 33 deletions(-) diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index 57ef6d6f2..0a5c2f1f5 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -501,12 +501,11 @@ fn import_dump( let network = dump_reader.network()?.cloned().unwrap_or_default(); index_scheduler.put_network(network)?; - // 3.1 Use all cpus to process dump if a) `max_indexing_threads` not configured and - // b) we're not executing from within a test + // 3.1 Use all cpus to process dump if `max_indexing_threads` not configured let backup_config; let base_config = index_scheduler.indexer_config(); - let indexer_config = if base_config.max_threads.is_none() && !cfg!(test) { + let indexer_config = if base_config.max_threads.is_none() { let thread_pool = ThreadPoolNoAbortBuilder::new() .thread_name(|index| format!("indexing-thread:{index}")) .num_threads(num_cpus::get()) diff --git a/crates/milli/src/test_index.rs b/crates/milli/src/test_index.rs index 7759b3e18..dfd570b96 100644 --- a/crates/milli/src/test_index.rs +++ b/crates/milli/src/test_index.rs @@ -19,10 +19,7 @@ use crate::update::{ }; use crate::vector::settings::{EmbedderSource, EmbeddingSettings}; use crate::vector::EmbeddingConfigs; -use crate::{ - db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult, - ThreadPoolNoAbortBuilder, -}; +use crate::{db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult}; pub(crate) struct TempIndex { pub inner: Index, @@ -62,15 +59,8 @@ impl TempIndex { wtxn: &mut RwTxn<'t>, documents: Mmap, ) -> Result<(), crate::error::Error> { - let local_pool; let indexer_config = &self.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = self.inner.read_txn()?; let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?; @@ -153,15 +143,8 @@ impl TempIndex { wtxn: &mut RwTxn<'t>, external_document_ids: Vec, ) -> Result<(), crate::error::Error> { - let local_pool; let indexer_config = &self.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = self.inner.read_txn()?; let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?; @@ -231,15 +214,8 @@ fn aborting_indexation() { let mut wtxn = index.inner.write_txn().unwrap(); let should_abort = AtomicBool::new(false); - let local_pool; let indexer_config = &index.indexer_config; - let pool = match &indexer_config.thread_pool { - Some(pool) => pool, - None => { - local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap(); - &local_pool - } - }; + let pool = &indexer_config.thread_pool; let rtxn = index.inner.read_txn().unwrap(); let db_fields_ids_map = index.inner.fields_ids_map(&rtxn).unwrap(); diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index c6ae2b859..33573aef6 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -33,9 +33,13 @@ impl Default for IndexerConfig { #[allow(unused_mut)] let mut pool_builder = ThreadPoolNoAbortBuilder::new(); + #[allow(unused_mut, unused_assignments)] + let mut max_threads = None; + #[cfg(test)] { pool_builder = pool_builder.num_threads(1); + max_threads = Some(1); } let thread_pool = pool_builder @@ -44,16 +48,16 @@ impl Default for IndexerConfig { .expect("failed to build default rayon thread pool"); Self { + max_threads, + thread_pool, log_every_n: None, max_nb_chunks: None, documents_chunk_size: None, max_memory: None, - max_threads: None, chunk_compression_type: CompressionType::None, chunk_compression_level: None, max_positions_per_attributes: None, skip_index_budget: false, - thread_pool, } } } From e96c1d4b0fa044da9606c5247e921e30cfe58abb Mon Sep 17 00:00:00 2001 From: nnethercott Date: Tue, 13 May 2025 12:16:34 +0200 Subject: [PATCH 011/131] style: change fmt from empty str to "unlimited" --- crates/meilisearch/src/option.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/src/option.rs b/crates/meilisearch/src/option.rs index 8dcbdcfca..e7d172b71 100644 --- a/crates/meilisearch/src/option.rs +++ b/crates/meilisearch/src/option.rs @@ -852,7 +852,7 @@ impl fmt::Display for MaxThreads { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.0 { Some(threads) => write!(f, "{}", threads), - None => Ok(()), + None => write!(f, "unlimited"), } } } From 806e983aa54ef6d303dde8ab0e4b1efa46cbcb46 Mon Sep 17 00:00:00 2001 From: Nate Nethercott <53127799+nnethercott@users.noreply.github.com> Date: Tue, 13 May 2025 14:14:48 +0200 Subject: [PATCH 012/131] fix: lazy computation in thread default Co-authored-by: Martin Grigorov --- crates/meilisearch/src/option.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/src/option.rs b/crates/meilisearch/src/option.rs index e7d172b71..acb4bc05e 100644 --- a/crates/meilisearch/src/option.rs +++ b/crates/meilisearch/src/option.rs @@ -760,7 +760,7 @@ impl TryFrom<&IndexerOpts> for IndexerConfig { fn try_from(other: &IndexerOpts) -> Result { // use 1/2 cpu threads if no value specified - let max_indexing_threads = other.max_indexing_threads.unwrap_or(num_cpus::get() / 2); + let max_indexing_threads = other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2); let thread_pool = ThreadPoolNoAbortBuilder::new() .thread_name(|index| format!("indexing-thread:{index}")) From 150d1db86bd88c3375754afd80021189b32e16f9 Mon Sep 17 00:00:00 2001 From: Lucas Black Date: Tue, 13 May 2025 21:44:24 -0700 Subject: [PATCH 013/131] Implemented integration tests for restrict_searchable.rs on nested wildcard attributes --- .../tests/search/restrict_searchable.rs | 320 ++++++++++++++++++ 1 file changed, 320 insertions(+) diff --git a/crates/meilisearch/tests/search/restrict_searchable.rs b/crates/meilisearch/tests/search/restrict_searchable.rs index ce99c4047..80eef96db 100644 --- a/crates/meilisearch/tests/search/restrict_searchable.rs +++ b/crates/meilisearch/tests/search/restrict_searchable.rs @@ -416,3 +416,323 @@ async fn phrase_search_on_title() { ) .await; } + +static NESTED_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { + json!([ + { + "details": { + "title": "Shazam!", + "desc": "a Captain Marvel ersatz", + "weaknesses": ["magic", "requires transformation"], + "outfit": { + "has_cape": true + } + }, + "id": "1", + }, + { + "details": { + "title": "Captain Planet", + "desc": "He's not part of the Marvel Cinematic Universe", + "blue_skin": true, + "outfit": { + "has_cape": false + } + }, + "id": "2", + }, + { + "details": { + "title": "Captain Marvel", + "desc": "a Shazam ersatz", + "weaknesses": ["magic", "power instability"], + "outfit": { + "has_cape": false + } + }, + "id": "3", + }]) +}); + +#[actix_rt::test] +async fn nested_search_on_title_with_prefix_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + // Wildcard should match to 'details.' attribute + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_on_title_with_suffix_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + // Wildcard should match to any attribute inside 'details.' + // It's worth noting the difference between 'details.*' and '*.title' + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"3"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_all_details_with_deep_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + // Similar to matching all attributes on simple search documents with universal wildcard + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.**"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"3"); + }, + ) + .await; + + // Should return 2 documents (ids: 1 and 2) + index + .search( + json!({"q": "true", "attributesToSearchOn": ["details.**"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_all_details_restricted_set_with_any_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let (task, _status_code) = index.update_settings_searchable_attributes(json!(["details.title"])).await; + index.wait_task(task.uid()).await.succeeded(); + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + }, + ) + .await; + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.**"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_no_searchable_attribute_set_with_any_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"0"); + }, + ) + .await; + + let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await; + index.wait_task(task.uid()).await.succeeded(); + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"0"); + }, + ) + .await; + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.**",]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"0"); + }, + ) + .await; + + let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await; + index.wait_task(task.uid()).await.succeeded(); + + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown", "*.title"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + }, + ) + .await; + + // We only match deep wild card at the end, otherwise we need to recursively match deep wildcards + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.**", "details.**"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"3"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_prefix_search_on_title_with_prefix_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + // Nested prefix search with wildcard prefix should return 2 documents (ids: 2 and 3). + index + .search( + json!({"q": "Captain Mar", "attributesToSearchOn": ["*.title"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_prefix_search_on_details_with_suffix_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + index + .search( + json!({"q": "Captain Mar", "attributesToSearchOn": ["details.*"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"3"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_prefix_search_on_weaknesses_with_deep_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + // Deep wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3) + index + .search( + json!({"q": "mag", "attributesToSearchOn": ["details.**"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_search_on_title_matching_strategy_all() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + // Nested search matching strategy all should only return 1 document (ids: 3) + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "matchingStrategy": "all"}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(response["hits"].as_array().unwrap().len(), @"1"); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_attributes_ranking_rule_order_with_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + // Document 3 should appear before documents 1 and 2 + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.desc", "*.title"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "1" + }, + { + "id": "2" + } + ] + "### + ); + }, + ) + .await; +} + +#[actix_rt::test] +async fn nested_attributes_ranking_rule_order_with_deep_wildcard() { + let server = Server::new().await; + let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + + // Document 3 should appear before documents 1 and 2 + index + .search( + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.**"], "attributesToRetrieve": ["id"]}), + |response, code| { + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "1" + }, + { + "id": "2" + } + ] + "### + ); + }, + ) + .await; +} From 3fbe1df770ca95fc53a9c061a7ef87eaf63ff117 Mon Sep 17 00:00:00 2001 From: Lucas Black Date: Wed, 14 May 2025 00:18:30 -0700 Subject: [PATCH 014/131] Updated nested_search_all_details_with_deep_wildcard() to test deeply nested attributes --- .../meilisearch/tests/search/restrict_searchable.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/search/restrict_searchable.rs b/crates/meilisearch/tests/search/restrict_searchable.rs index 80eef96db..ffd612557 100644 --- a/crates/meilisearch/tests/search/restrict_searchable.rs +++ b/crates/meilisearch/tests/search/restrict_searchable.rs @@ -425,7 +425,11 @@ static NESTED_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { "desc": "a Captain Marvel ersatz", "weaknesses": ["magic", "requires transformation"], "outfit": { - "has_cape": true + "has_cape": true, + "colors": { + "primary": "red", + "secondary": "gold" + } } }, "id": "1", @@ -494,13 +498,13 @@ async fn nested_search_all_details_with_deep_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; - // Similar to matching all attributes on simple search documents with universal wildcard + // Deep wildcard should match deeply nested attributes index .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.**"]}), + json!({"q": "gold", "attributesToSearchOn": ["details.**"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"3"); + snapshot!(response["hits"].as_array().unwrap().len(), @"1"); }, ) .await; From 865f24cfefbdfd2d0c6f6be2006266977f9d4cee Mon Sep 17 00:00:00 2001 From: nnethercott Date: Wed, 14 May 2025 23:45:24 +0200 Subject: [PATCH 015/131] refactor: helper methods for pool and max threads --- crates/meilisearch/src/lib.rs | 6 ++-- crates/meilisearch/src/option.rs | 10 ++---- crates/milli/src/thread_pool_no_abort.rs | 4 +++ crates/milli/src/update/indexer_config.rs | 37 +++++++++++++---------- 4 files changed, 30 insertions(+), 27 deletions(-) diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index 0a5c2f1f5..441da0d7f 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -506,10 +506,8 @@ fn import_dump( let base_config = index_scheduler.indexer_config(); let indexer_config = if base_config.max_threads.is_none() { - let thread_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|index| format!("indexing-thread:{index}")) - .num_threads(num_cpus::get()) - .build()?; + let thread_pool = + ThreadPoolNoAbortBuilder::new_for_indexing().num_threads(num_cpus::get()).build()?; let _config = IndexerConfig { thread_pool, ..*base_config }; backup_config = _config; diff --git a/crates/meilisearch/src/option.rs b/crates/meilisearch/src/option.rs index acb4bc05e..d98b9aa8b 100644 --- a/crates/meilisearch/src/option.rs +++ b/crates/meilisearch/src/option.rs @@ -759,12 +759,8 @@ impl TryFrom<&IndexerOpts> for IndexerConfig { type Error = anyhow::Error; fn try_from(other: &IndexerOpts) -> Result { - // use 1/2 cpu threads if no value specified - let max_indexing_threads = other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2); - - let thread_pool = ThreadPoolNoAbortBuilder::new() - .thread_name(|index| format!("indexing-thread:{index}")) - .num_threads(max_indexing_threads) + let thread_pool = ThreadPoolNoAbortBuilder::new_for_indexing() + .num_threads(other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2)) .build()?; Ok(Self { @@ -841,7 +837,7 @@ impl FromStr for MaxThreads { type Err = ParseIntError; fn from_str(s: &str) -> Result { - if s.is_empty() { + if s.is_empty() || s == "unlimited" { return Ok(MaxThreads::default()); } usize::from_str(s).map(Some).map(MaxThreads) diff --git a/crates/milli/src/thread_pool_no_abort.rs b/crates/milli/src/thread_pool_no_abort.rs index b57050a63..0c2fbb30d 100644 --- a/crates/milli/src/thread_pool_no_abort.rs +++ b/crates/milli/src/thread_pool_no_abort.rs @@ -54,6 +54,10 @@ impl ThreadPoolNoAbortBuilder { ThreadPoolNoAbortBuilder::default() } + pub fn new_for_indexing() -> ThreadPoolNoAbortBuilder { + ThreadPoolNoAbortBuilder::default().thread_name(|index| format!("indexing-thread:{index}")) + } + pub fn thread_name(mut self, closure: F) -> Self where F: FnMut(usize) -> String + 'static, diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index 33573aef6..edca71e14 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -28,24 +28,29 @@ impl IndexerConfig { } } +/// By default use only 1 thread for indexing in tests +#[cfg(test)] +fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { + let pool = ThreadPoolNoAbortBuilder::new_for_indexing() + .num_threads(1) + .build() + .expect("failed to build default rayon thread pool"); + + (pool, Some(1)) +} + +#[cfg(not(test))] +fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { + let pool = ThreadPoolNoAbortBuilder::new_for_indexing() + .build() + .expect("failed to build default rayon thread pool"); + + (pool, None) +} + impl Default for IndexerConfig { fn default() -> Self { - #[allow(unused_mut)] - let mut pool_builder = ThreadPoolNoAbortBuilder::new(); - - #[allow(unused_mut, unused_assignments)] - let mut max_threads = None; - - #[cfg(test)] - { - pool_builder = pool_builder.num_threads(1); - max_threads = Some(1); - } - - let thread_pool = pool_builder - .thread_name(|index| format!("indexing-thread:{index}")) - .build() - .expect("failed to build default rayon thread pool"); + let (thread_pool, max_threads) = default_thread_pool_and_threads(); Self { max_threads, From 79db2e67fb25af4b52a239a3ea21f150832ce949 Mon Sep 17 00:00:00 2001 From: Nate Nethercott <53127799+nnethercott@users.noreply.github.com> Date: Thu, 15 May 2025 11:04:38 +0200 Subject: [PATCH 016/131] refactor: prefer helper over explicit pool construction Co-authored-by: Many the fish --- crates/meilisearch/src/lib.rs | 8 ++++---- crates/milli/src/update/indexer_config.rs | 4 ++-- crates/milli/src/update/mod.rs | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index 441da0d7f..d83786394 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -37,8 +37,9 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions}; use meilisearch_auth::{open_auth_store_env, AuthController}; use meilisearch_types::milli::constants::VERSION_MAJOR; use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; -use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig}; -use meilisearch_types::milli::ThreadPoolNoAbortBuilder; +use meilisearch_types::milli::update::{ + default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig, +}; use meilisearch_types::settings::apply_settings_to_builder; use meilisearch_types::tasks::KindWithContent; use meilisearch_types::versioning::{ @@ -506,8 +507,7 @@ fn import_dump( let base_config = index_scheduler.indexer_config(); let indexer_config = if base_config.max_threads.is_none() { - let thread_pool = - ThreadPoolNoAbortBuilder::new_for_indexing().num_threads(num_cpus::get()).build()?; + let (thread_pool, _) = default_thread_pool_and_threads(); let _config = IndexerConfig { thread_pool, ..*base_config }; backup_config = _config; diff --git a/crates/milli/src/update/indexer_config.rs b/crates/milli/src/update/indexer_config.rs index edca71e14..eb7fbd4d5 100644 --- a/crates/milli/src/update/indexer_config.rs +++ b/crates/milli/src/update/indexer_config.rs @@ -30,7 +30,7 @@ impl IndexerConfig { /// By default use only 1 thread for indexing in tests #[cfg(test)] -fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { +pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { let pool = ThreadPoolNoAbortBuilder::new_for_indexing() .num_threads(1) .build() @@ -40,7 +40,7 @@ fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { } #[cfg(not(test))] -fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { +pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option) { let pool = ThreadPoolNoAbortBuilder::new_for_indexing() .build() .expect("failed to build default rayon thread pool"); diff --git a/crates/milli/src/update/mod.rs b/crates/milli/src/update/mod.rs index 9a783ffd2..ebb313dcf 100644 --- a/crates/milli/src/update/mod.rs +++ b/crates/milli/src/update/mod.rs @@ -4,7 +4,7 @@ pub use self::concurrent_available_ids::ConcurrentAvailableIds; pub use self::facet::bulk::FacetsUpdateBulk; pub use self::facet::incremental::FacetsUpdateIncrementalInner; pub use self::index_documents::*; -pub use self::indexer_config::IndexerConfig; +pub use self::indexer_config::{default_thread_pool_and_threads, IndexerConfig}; pub use self::new::ChannelCongestion; pub use self::settings::{validate_embedding_settings, Setting, Settings}; pub use self::update_step::UpdateIndexingStep; From c2ceb8e41b1fe1ad08a109ce4bddf86391056c23 Mon Sep 17 00:00:00 2001 From: "Santhosh Reddy Vootukuri (SUNNY) (from Dev Box)" Date: Thu, 8 Aug 2024 07:31:46 -0700 Subject: [PATCH 017/131] Improve Integration tests in the file stats.rs --- crates/meilisearch/tests/index/stats.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index 291cb0ce0..274a0ab17 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -3,8 +3,8 @@ use crate::json; #[actix_rt::test] async fn stats() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared().await; + let index = server.unique_index(); let (task, code) = index.create(Some("id")).await; assert_eq!(code, 202); @@ -47,8 +47,8 @@ async fn stats() { #[actix_rt::test] async fn error_get_stats_unexisting_index() { - let server = Server::new().await; - let (response, code) = server.index("test").stats().await; + let server = Server::new_shared().await; + let (response, code) = server.unique_index().stats().await; let expected_response = json!({ "message": "Index `test` not found.", From d986a3bbafff822409ff33b8f7d4d71190ef192a Mon Sep 17 00:00:00 2001 From: "Santhosh Reddy Vootukuri (SUNNY) (from Dev Box)" Date: Thu, 8 Aug 2024 10:26:39 -0700 Subject: [PATCH 018/131] Changes to index and expected_response as per feedback --- crates/meilisearch/tests/index/stats.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index 274a0ab17..e2a96e101 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -47,11 +47,11 @@ async fn stats() { #[actix_rt::test] async fn error_get_stats_unexisting_index() { - let server = Server::new_shared().await; - let (response, code) = server.unique_index().stats().await; + let index = shared_does_not_exists_index(); + let (response, code) = index.stats().await; let expected_response = json!({ - "message": "Index `test` not found.", + "message": "Index `DOES_NOT_EXISTS` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" From 3e3c00f44cd0fe31a1afe6a681c405ff312bb96d Mon Sep 17 00:00:00 2001 From: "Santhosh Reddy Vootukuri (SUNNY) (from Dev Box)" Date: Mon, 12 Aug 2024 21:55:50 -0700 Subject: [PATCH 019/131] fix for test failure --- crates/meilisearch/tests/index/stats.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index e2a96e101..528be5dcb 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -1,4 +1,7 @@ -use crate::common::Server; +use crate::common::{ + shared_does_not_exists_index, shared_empty_index, shared_index_with_documents, Server, +}; + use crate::json; #[actix_rt::test] From 0e9040e605c865c9bac6facb5a49b4bcf390c3e3 Mon Sep 17 00:00:00 2001 From: "Santhosh Reddy Vootukuri (SUNNY) (from Dev Box)" Date: Tue, 13 Aug 2024 12:09:09 -0700 Subject: [PATCH 020/131] remove warnings --- crates/meilisearch/tests/index/stats.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index 528be5dcb..14ac82acf 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -1,5 +1,5 @@ use crate::common::{ - shared_does_not_exists_index, shared_empty_index, shared_index_with_documents, Server, + shared_does_not_exists_index, Server, }; use crate::json; From 248c90bad56f63641ec22d6b196780177587e144 Mon Sep 17 00:00:00 2001 From: "Santhosh Reddy Vootukuri (SUNNY) (from Dev Box)" Date: Sun, 8 Sep 2024 09:52:22 -0700 Subject: [PATCH 021/131] removing .await --- crates/meilisearch/tests/index/stats.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index 14ac82acf..282c42c3c 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -6,7 +6,7 @@ use crate::json; #[actix_rt::test] async fn stats() { - let server = Server::new_shared().await; + let server = Server::new_shared(); let index = server.unique_index(); let (task, code) = index.create(Some("id")).await; From 4cda584b0cc3476b391620bfd320161e95a908aa Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 May 2025 15:45:25 +0300 Subject: [PATCH 022/131] Fix the build of stats.rs Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/stats.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index 282c42c3c..40e3d312c 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -18,7 +18,7 @@ async fn stats() { assert_eq!(code, 200); assert_eq!(response["numberOfDocuments"], 0); - assert!(response["isIndexing"] == false); + assert_eq!(response["isIndexing"], false); assert!(response["fieldDistribution"].as_object().unwrap().is_empty()); let documents = json!([ @@ -42,7 +42,7 @@ async fn stats() { assert_eq!(code, 200); assert_eq!(response["numberOfDocuments"], 2); - assert!(response["isIndexing"] == false); + assert_eq!(response["isIndexing"], false); assert_eq!(response["fieldDistribution"]["id"], 2); assert_eq!(response["fieldDistribution"]["name"], 1); assert_eq!(response["fieldDistribution"]["age"], 1); @@ -50,7 +50,7 @@ async fn stats() { #[actix_rt::test] async fn error_get_stats_unexisting_index() { - let index = shared_does_not_exists_index(); + let index = shared_does_not_exists_index().await; let (response, code) = index.stats().await; let expected_response = json!({ From 3d130d31c882ba5c4217a0cad71cb778e05a65b6 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 May 2025 15:49:50 +0300 Subject: [PATCH 023/131] Do not hard code the non-exiting index name/uid Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/stats.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index 40e3d312c..de594155b 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -54,7 +54,7 @@ async fn error_get_stats_unexisting_index() { let (response, code) = index.stats().await; let expected_response = json!({ - "message": "Index `DOES_NOT_EXISTS` not found.", + "message": format!("Index `{}` not found.", index.uid), "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" From 13b607bd68243f81be98ce852b9bf327e08142a1 Mon Sep 17 00:00:00 2001 From: Lucas Black Date: Sun, 18 May 2025 20:24:52 -0700 Subject: [PATCH 024/131] Removed matches_wildcard_pattern() and integrated match_pattern() into attributes_to_search_on(), updated test cases --- .../tests/search/restrict_searchable.rs | 57 ++++--------------- crates/milli/src/attribute_patterns.rs | 2 +- crates/milli/src/search/new/mod.rs | 31 +--------- 3 files changed, 13 insertions(+), 77 deletions(-) diff --git a/crates/meilisearch/tests/search/restrict_searchable.rs b/crates/meilisearch/tests/search/restrict_searchable.rs index ffd612557..db1082053 100644 --- a/crates/meilisearch/tests/search/restrict_searchable.rs +++ b/crates/meilisearch/tests/search/restrict_searchable.rs @@ -476,7 +476,7 @@ async fn nested_search_on_title_with_prefix_wildcard() { } #[actix_rt::test] -async fn nested_search_on_title_with_suffix_wildcard() { +async fn nested_search_with_suffix_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; @@ -491,17 +491,11 @@ async fn nested_search_on_title_with_suffix_wildcard() { }, ) .await; -} -#[actix_rt::test] -async fn nested_search_all_details_with_deep_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; - - // Deep wildcard should match deeply nested attributes + // Should return 1 document (ids: 1) index .search( - json!({"q": "gold", "attributesToSearchOn": ["details.**"]}), + json!({"q": "gold", "attributesToSearchOn": ["details.*"]}), |response, code| { snapshot!(code, @"200 OK"); snapshot!(response["hits"].as_array().unwrap().len(), @"1"); @@ -512,7 +506,7 @@ async fn nested_search_all_details_with_deep_wildcard() { // Should return 2 documents (ids: 1 and 2) index .search( - json!({"q": "true", "attributesToSearchOn": ["details.**"]}), + json!({"q": "true", "attributesToSearchOn": ["details.*"]}), |response, code| { snapshot!(code, @"200 OK"); snapshot!(response["hits"].as_array().unwrap().len(), @"2"); @@ -522,7 +516,7 @@ async fn nested_search_all_details_with_deep_wildcard() { } #[actix_rt::test] -async fn nested_search_all_details_restricted_set_with_any_wildcard() { +async fn nested_search_on_title_restricted_set_with_suffix_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; let (task, _status_code) = index.update_settings_searchable_attributes(json!(["details.title"])).await; @@ -537,16 +531,6 @@ async fn nested_search_all_details_restricted_set_with_any_wildcard() { }, ) .await; - - index - .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.**"]}), - |response, code| { - snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"2"); - }, - ) - .await; } #[actix_rt::test] @@ -577,16 +561,6 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() { ) .await; - index - .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.**",]}), - |response, code| { - snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"0"); - }, - ) - .await; - let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await; index.wait_task(task.uid()).await.succeeded(); @@ -599,17 +573,6 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() { }, ) .await; - - // We only match deep wild card at the end, otherwise we need to recursively match deep wildcards - index - .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.**", "details.**"]}), - |response, code| { - snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"3"); - }, - ) - .await; } #[actix_rt::test] @@ -646,14 +609,14 @@ async fn nested_prefix_search_on_details_with_suffix_wildcard() { } #[actix_rt::test] -async fn nested_prefix_search_on_weaknesses_with_deep_wildcard() { +async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; // Deep wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3) index .search( - json!({"q": "mag", "attributesToSearchOn": ["details.**"]}), + json!({"q": "mag", "attributesToSearchOn": ["details.*"]}), |response, code| { snapshot!(code, @"200 OK"); snapshot!(response["hits"].as_array().unwrap().len(), @"2"); @@ -680,7 +643,7 @@ async fn nested_search_on_title_matching_strategy_all() { } #[actix_rt::test] -async fn nested_attributes_ranking_rule_order_with_wildcard() { +async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; @@ -711,14 +674,14 @@ async fn nested_attributes_ranking_rule_order_with_wildcard() { } #[actix_rt::test] -async fn nested_attributes_ranking_rule_order_with_deep_wildcard() { +async fn nested_attributes_ranking_rule_order_with_suffix_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; // Document 3 should appear before documents 1 and 2 index .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.**"], "attributesToRetrieve": ["id"]}), + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); snapshot!(json_string!(response["hits"]), diff --git a/crates/milli/src/attribute_patterns.rs b/crates/milli/src/attribute_patterns.rs index 00caa2a6d..8da6942a3 100644 --- a/crates/milli/src/attribute_patterns.rs +++ b/crates/milli/src/attribute_patterns.rs @@ -50,7 +50,7 @@ impl AttributePatterns { /// /// * `pattern` - The pattern to match against. /// * `str` - The string to match against the pattern. -fn match_pattern(pattern: &str, str: &str) -> PatternMatch { +pub fn match_pattern(pattern: &str, str: &str) -> PatternMatch { // If the pattern is a wildcard, return Match if pattern == "*" { return PatternMatch::Match; diff --git a/crates/milli/src/search/new/mod.rs b/crates/milli/src/search/new/mod.rs index 21002c55a..dfe0ddfc9 100644 --- a/crates/milli/src/search/new/mod.rs +++ b/crates/milli/src/search/new/mod.rs @@ -52,6 +52,7 @@ pub use self::geo_sort::Strategy as GeoSortStrategy; use self::graph_based_ranking_rule::Words; use self::interner::Interned; use self::vector_sort::VectorSort; +use crate::attribute_patterns::{match_pattern, PatternMatch}; use crate::constants::RESERVED_GEO_FIELD_NAME; use crate::index::PrefixSearch; use crate::localized_attributes_rules::LocalizedFieldIds; @@ -137,7 +138,7 @@ impl<'ctx> SearchContext<'ctx> { let matching_searchable_weights: Vec<_> = searchable_fields_weights .iter() .filter(|(name, _, _)| { - Self::matches_wildcard_pattern(field_name, name) + match_pattern(field_name, name) == PatternMatch::Match }) .collect(); @@ -190,34 +191,6 @@ impl<'ctx> SearchContext<'ctx> { Ok(()) } - - fn matches_wildcard_pattern(wildcard_pattern: &str, name: &str) -> bool { - let wildcard_subfields: Vec<&str> = wildcard_pattern.split(".").collect(); - let name_subfields: Vec<&str> = name.split(".").collect(); - - // Deep wildcard matches all attributes after ('**') - if !wildcard_subfields.is_empty() && wildcard_subfields.last() == Some(&"**") { - let prefix_len = wildcard_subfields.len() - 1; - if prefix_len > name_subfields.len() { - return false; - } - - return wildcard_subfields[..prefix_len] - .iter() - .zip(name_subfields.iter()) - .all(|(wc, sf)| *wc == "*" || *wc == *sf); - } - - // Using single wildcard ('*') should match length (e.g. 'a.*.c' matches 'a.b.c') - // where '*' can match any single segment - if wildcard_subfields.len() != name_subfields.len() { - return false; - } - - wildcard_subfields.iter() - .zip(name_subfields.iter()) - .all(|(wc, sf)| *wc == "*" || *wc == *sf) - } } #[derive(Debug, Default)] From 1594c54e2301bbda794c192efcc1ccc8016df6e0 Mon Sep 17 00:00:00 2001 From: Lucas Black Date: Mon, 19 May 2025 02:37:23 -0700 Subject: [PATCH 025/131] Provide more information about resulting documents on test case --- .../meilisearch/tests/search/restrict_searchable.rs | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/search/restrict_searchable.rs b/crates/meilisearch/tests/search/restrict_searchable.rs index db1082053..2232d961b 100644 --- a/crates/meilisearch/tests/search/restrict_searchable.rs +++ b/crates/meilisearch/tests/search/restrict_searchable.rs @@ -466,10 +466,19 @@ async fn nested_search_on_title_with_prefix_wildcard() { // Wildcard should match to 'details.' attribute index .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"]}), + json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "2" + } + ]"###); }, ) .await; From b2f2c5d69f4da23c568add932f101f06f335b162 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 May 2025 14:44:08 +0300 Subject: [PATCH 026/131] Remove an assertion of a task uid. It differs for every run of the IT test suite. Format the imports Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/stats.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/index/stats.rs b/crates/meilisearch/tests/index/stats.rs index de594155b..90c77cec8 100644 --- a/crates/meilisearch/tests/index/stats.rs +++ b/crates/meilisearch/tests/index/stats.rs @@ -1,6 +1,4 @@ -use crate::common::{ - shared_does_not_exists_index, Server, -}; +use crate::common::{shared_does_not_exists_index, Server}; use crate::json; @@ -34,7 +32,6 @@ async fn stats() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - assert_eq!(response["taskUid"], 1); index.wait_task(response.uid()).await.succeeded(); From e2763471e58dbcfcb2d3fa1c206d32de4b592536 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 May 2025 15:36:25 +0300 Subject: [PATCH 027/131] Faster index::get_index IT tests Use shared server for all tests in get_index.rs Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/get_index.rs | 46 +++++++++++++-------- 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/crates/meilisearch/tests/index/get_index.rs b/crates/meilisearch/tests/index/get_index.rs index a436b649b..91c5a31a5 100644 --- a/crates/meilisearch/tests/index/get_index.rs +++ b/crates/meilisearch/tests/index/get_index.rs @@ -43,7 +43,7 @@ async fn error_get_unexisting_index() { #[actix_rt::test] async fn no_index_return_empty_list() { - let server = Server::new().await; + let server = Server::new_shared(); let (response, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); assert!(response["results"].is_array()); @@ -52,29 +52,39 @@ async fn no_index_return_empty_list() { #[actix_rt::test] async fn list_multiple_indexes() { - let server = Server::new().await; - server.index("test").create(None).await; - let (task, _status_code) = server.index("test1").create(Some("key")).await; + let server = Server::new_shared(); - server.index("test").wait_task(task.uid()).await.succeeded(); + let index_without_key = server.unique_index(); + let (response_without_key, _status_code) = index_without_key.create(None).await; + + let index_with_key = server.unique_index(); + let (response_with_key, _status_code) = index_with_key.create(Some("key")).await; + + index_without_key.wait_task(response_without_key.uid()).await.succeeded(); + index_with_key.wait_task(response_with_key.uid()).await.succeeded(); let (response, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); assert!(response["results"].is_array()); let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 2); - assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null)); - assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key")); + assert!(arr.iter().any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null)); + assert!(arr.iter().any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key")); } #[actix_rt::test] async fn get_and_paginate_indexes() { - let server = Server::new().await; + let server = Server::new_shared(); + + let mut indices_names = Vec::new(); const NB_INDEXES: usize = 50; - for i in 0..NB_INDEXES { - server.index(format!("test_{i:02}")).create(None).await; - server.index(format!("test_{i:02}")).wait_task(i as u64).await; + for _ in 0..NB_INDEXES { + let index = server.unique_index(); + indices_names.push(index.uid.clone()); + let (response, _status_code) = index.create(None).await; + index.wait_task(response.uid()).await.succeeded(); } + indices_names.sort(); // basic let (response, code) = server.list_indexes(None, None).await; @@ -87,7 +97,7 @@ async fn get_and_paginate_indexes() { assert_eq!(arr.len(), 20); // ensuring we get all the indexes in the alphabetical order assert!((0..20) - .map(|idx| format!("test_{idx:02}")) + .map(|idx| indices_names[idx].clone()) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -101,7 +111,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 20); assert!((15..35) - .map(|idx| format!("test_{idx:02}")) + .map(|idx| indices_names[idx].clone()) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -115,7 +125,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 5); assert!((45..50) - .map(|idx| format!("test_{idx:02}")) + .map(|idx| indices_names[idx].clone()) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -129,7 +139,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 5); assert!((0..5) - .map(|idx| format!("test_{idx:02}")) + .map(|idx| indices_names[idx].clone()) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -143,7 +153,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 40); assert!((0..40) - .map(|idx| format!("test_{idx:02}")) + .map(|idx| indices_names[idx].clone()) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -157,7 +167,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 50); assert!((0..50) - .map(|idx| format!("test_{idx:02}")) + .map(|idx| indices_names[idx].clone()) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -171,7 +181,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 10); assert!((20..30) - .map(|idx| format!("test_{idx:02}")) + .map(|idx| indices_names[idx].clone()) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); } From 03a36f116ea94856dbdd3b143129485e9e7489c5 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 May 2025 16:20:16 +0300 Subject: [PATCH 028/131] 1. Use a unique Server for no_index_return_empty_list test ... because a Shared one could see indices created by other tests 2. List at least 1000 indices to make sure we get the newly created ones in list_multiple_indexes() Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/get_index.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/meilisearch/tests/index/get_index.rs b/crates/meilisearch/tests/index/get_index.rs index 91c5a31a5..5a9f2007c 100644 --- a/crates/meilisearch/tests/index/get_index.rs +++ b/crates/meilisearch/tests/index/get_index.rs @@ -43,7 +43,7 @@ async fn error_get_unexisting_index() { #[actix_rt::test] async fn no_index_return_empty_list() { - let server = Server::new_shared(); + let server = Server::new().await; let (response, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); assert!(response["results"].is_array()); @@ -63,11 +63,11 @@ async fn list_multiple_indexes() { index_without_key.wait_task(response_without_key.uid()).await.succeeded(); index_with_key.wait_task(response_with_key.uid()).await.succeeded(); - let (response, code) = server.list_indexes(None, None).await; + let (response, code) = server.list_indexes(None, Some(1000)).await; assert_eq!(code, 200); assert!(response["results"].is_array()); let arr = response["results"].as_array().unwrap(); - assert_eq!(arr.len(), 2); + assert!(arr.len() >= 2, "Expected at least 2 indexes."); assert!(arr.iter().any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null)); assert!(arr.iter().any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key")); } From b68e22c0e6db20f7d81aa8acce3734367b82f59e Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 May 2025 16:35:09 +0300 Subject: [PATCH 029/131] Revert the improvements for get_and_paginate_indexes() Because they won't work in multi-threaded execution of the tests Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/get_index.rs | 27 +++++++++------------ 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/crates/meilisearch/tests/index/get_index.rs b/crates/meilisearch/tests/index/get_index.rs index 5a9f2007c..e193efa14 100644 --- a/crates/meilisearch/tests/index/get_index.rs +++ b/crates/meilisearch/tests/index/get_index.rs @@ -74,17 +74,12 @@ async fn list_multiple_indexes() { #[actix_rt::test] async fn get_and_paginate_indexes() { - let server = Server::new_shared(); - - let mut indices_names = Vec::new(); + let server = Server::new().await; const NB_INDEXES: usize = 50; - for _ in 0..NB_INDEXES { - let index = server.unique_index(); - indices_names.push(index.uid.clone()); - let (response, _status_code) = index.create(None).await; - index.wait_task(response.uid()).await.succeeded(); + for i in 0..NB_INDEXES { + server.index(format!("test_{i:02}")).create(None).await; + server.index(format!("test_{i:02}")).wait_task(i as u64).await; } - indices_names.sort(); // basic let (response, code) = server.list_indexes(None, None).await; @@ -97,7 +92,7 @@ async fn get_and_paginate_indexes() { assert_eq!(arr.len(), 20); // ensuring we get all the indexes in the alphabetical order assert!((0..20) - .map(|idx| indices_names[idx].clone()) + .map(|idx| format!("test_{idx:02}")) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -111,7 +106,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 20); assert!((15..35) - .map(|idx| indices_names[idx].clone()) + .map(|idx| format!("test_{idx:02}")) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -125,7 +120,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 5); assert!((45..50) - .map(|idx| indices_names[idx].clone()) + .map(|idx| format!("test_{idx:02}")) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -139,7 +134,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 5); assert!((0..5) - .map(|idx| indices_names[idx].clone()) + .map(|idx| format!("test_{idx:02}")) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -153,7 +148,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 40); assert!((0..40) - .map(|idx| indices_names[idx].clone()) + .map(|idx| format!("test_{idx:02}")) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -167,7 +162,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 50); assert!((0..50) - .map(|idx| indices_names[idx].clone()) + .map(|idx| format!("test_{idx:02}")) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); @@ -181,7 +176,7 @@ async fn get_and_paginate_indexes() { let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 10); assert!((20..30) - .map(|idx| indices_names[idx].clone()) + .map(|idx| format!("test_{idx:02}")) .zip(arr) .all(|(expected, entry)| entry["uid"] == expected)); } From d1a088ea0b65b1ef1baf0d93eec42044657075a3 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 May 2025 16:52:43 +0300 Subject: [PATCH 030/131] Format the code Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/get_index.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/index/get_index.rs b/crates/meilisearch/tests/index/get_index.rs index e193efa14..b26eaeb9a 100644 --- a/crates/meilisearch/tests/index/get_index.rs +++ b/crates/meilisearch/tests/index/get_index.rs @@ -68,8 +68,12 @@ async fn list_multiple_indexes() { assert!(response["results"].is_array()); let arr = response["results"].as_array().unwrap(); assert!(arr.len() >= 2, "Expected at least 2 indexes."); - assert!(arr.iter().any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null)); - assert!(arr.iter().any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key")); + assert!(arr + .iter() + .any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null)); + assert!(arr + .iter() + .any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key")); } #[actix_rt::test] From bb07038c317c3c93f5762cc59deb6e82b6a45374 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 May 2025 16:57:53 +0300 Subject: [PATCH 031/131] tests: Assert succeeded/failed for the index::delete_index IT tests Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/delete_index.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/index/delete_index.rs b/crates/meilisearch/tests/index/delete_index.rs index 03185d21a..713891420 100644 --- a/crates/meilisearch/tests/index/delete_index.rs +++ b/crates/meilisearch/tests/index/delete_index.rs @@ -28,6 +28,7 @@ async fn error_delete_unexisting_index() { let (task, code) = index.delete_index_fail().await; assert_eq!(code, 202); + index.wait_task(task.uid()).await.failed(); let expected_response = json!({ "message": "Index `DOES_NOT_EXISTS` not found.", @@ -57,7 +58,7 @@ async fn loop_delete_add_documents() { } for task in tasks { - let response = index.wait_task(task).await; + let response = index.wait_task(task).await.succeeded(); assert_eq!(response["status"], "succeeded", "{}", response); } } From 2fe5c78cb645f102dec77be50e4704e71796aa76 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 20 May 2025 14:26:26 +0300 Subject: [PATCH 032/131] tests: Faster index::search::mod IT tests * Use shared index where possible. * Call .succeeded/.failed when waiting for a task. * Use newer format_args syntax * Do not use fully qualified name for meili_snap:: functions. The functions are already imported in scope Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/mod.rs | 19 ++ crates/meilisearch/tests/search/mod.rs | 293 ++++++++++++------------- 2 files changed, 154 insertions(+), 158 deletions(-) diff --git a/crates/meilisearch/tests/common/mod.rs b/crates/meilisearch/tests/common/mod.rs index 4d57a6163..da8beac3a 100644 --- a/crates/meilisearch/tests/common/mod.rs +++ b/crates/meilisearch/tests/common/mod.rs @@ -264,6 +264,25 @@ pub static SCORE_DOCUMENTS: Lazy = Lazy::new(|| { ]) }); +pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shared> { + static INDEX: OnceCell> = OnceCell::const_new(); + INDEX.get_or_init(|| async { + let server = Server::new_shared(); + let index = server._index("SCORE_DOCUMENTS").to_shared(); + let documents = SCORE_DOCUMENTS.clone(); + let (response, _code) = index._add_documents(documents, None).await; + index.wait_task(response.uid()).await.succeeded(); + let (response, _code) = index + ._update_settings( + json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}), + ) + .await; + index.wait_task(response.uid()).await.succeeded(); + index + }).await +} + + pub static NESTED_DOCUMENTS: Lazy = Lazy::new(|| { json!([ { diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs index f1d9c8b3b..2ac0d0400 100644 --- a/crates/meilisearch/tests/search/mod.rs +++ b/crates/meilisearch/tests/search/mod.rs @@ -1,4 +1,4 @@ -// This modules contains all the test concerning search. Each particular feature of the search +// This module contains all the test concerning search. Each particular feature of the search // should be tested in its own module to isolate tests and keep the tests readable. mod distinct; @@ -21,8 +21,9 @@ use meilisearch::Opt; use tempfile::TempDir; use crate::common::{ - default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server, - Value, DOCUMENTS, FRUITS_DOCUMENTS, NESTED_DOCUMENTS, SCORE_DOCUMENTS, VECTOR_DOCUMENTS, + default_settings, shared_index_with_documents, shared_index_with_nested_documents, + shared_index_with_score_documents, Server, Value, DOCUMENTS, FRUITS_DOCUMENTS, + NESTED_DOCUMENTS, SCORE_DOCUMENTS, VECTOR_DOCUMENTS, }; use crate::json; @@ -39,39 +40,33 @@ async fn test_settings_documents_indexing_swapping_and_search( let index = server.index("test"); let (task, code) = index.add_documents(documents.clone(), None).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.update_settings(settings.clone()).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); index.search(query.clone(), test.clone()).await; let (task, code) = server.delete_index("test").await; - assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + server.wait_task(task.uid()).await.succeeded(); eprintln!("Settings -> Documents -> test"); let index = server.index("test"); let (task, code) = index.update_settings(settings.clone()).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.add_documents(documents.clone(), None).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + index.wait_task(task.uid()).await.succeeded(); index.search(query.clone(), test.clone()).await; let (task, code) = server.delete_index("test").await; - assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + assert_eq!(code, 202, "{task}"); + server.wait_task(task.uid()).await.succeeded(); } #[actix_rt::test] @@ -79,7 +74,7 @@ async fn simple_placeholder_search() { let index = shared_index_with_documents().await; index .search(json!({}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); }) .await; @@ -87,7 +82,7 @@ async fn simple_placeholder_search() { let index = shared_index_with_nested_documents().await; index .search(json!({}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 4); }) .await; @@ -98,7 +93,7 @@ async fn simple_search() { let index = shared_index_with_documents().await; index .search(json!({"q": "glass"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }) .await; @@ -106,7 +101,7 @@ async fn simple_search() { let index = shared_index_with_nested_documents().await; index .search(json!({"q": "pésti"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 2); }) .await; @@ -141,7 +136,7 @@ async fn search_with_stop_word() { let (_, code) = index .update_settings(json!({"stopWords": ["the", "The", "a", "an", "to", "in", "of"]})) .await; - meili_snap::snapshot!(code, @"202 Accepted"); + snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); index.add_documents(documents, None).await; @@ -150,7 +145,7 @@ async fn search_with_stop_word() { // prefix search index .search(json!({"q": "to the", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @"[]"); }) .await; @@ -158,7 +153,7 @@ async fn search_with_stop_word() { // non-prefix search index .search(json!({"q": "to the ", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -206,7 +201,7 @@ async fn search_with_typo_settings() { let (_, code) = index .update_settings(json!({"typoTolerance": { "disableOnAttributes": ["title", "id"]}})) .await; - meili_snap::snapshot!(code, @"202 Accepted"); + snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -214,7 +209,7 @@ async fn search_with_typo_settings() { index .search(json!({"q": "287947" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -238,7 +233,7 @@ async fn phrase_search_with_stop_word() { let index = server.index("test"); let (_, code) = index.update_settings(json!({"stopWords": ["the", "of"]})).await; - meili_snap::snapshot!(code, @"202 Accepted"); + snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -246,7 +241,7 @@ async fn phrase_search_with_stop_word() { index .search(json!({"q": "how \"to\" train \"the" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }) .await; @@ -257,7 +252,7 @@ async fn negative_phrase_search() { let index = shared_index_with_documents().await; index .search(json!({"q": "-\"train your dragon\"" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 4); assert_eq!(hits[0]["id"], "287947"); @@ -273,7 +268,7 @@ async fn negative_word_search() { let index = shared_index_with_documents().await; index .search(json!({"q": "-escape" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 4); assert_eq!(hits[0]["id"], "287947"); @@ -286,7 +281,7 @@ async fn negative_word_search() { // Everything that contains derivates of escape but not escape: nothing index .search(json!({"q": "-escape escape" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 0); }) @@ -298,7 +293,7 @@ async fn non_negative_search() { let index = shared_index_with_documents().await; index .search(json!({"q": "- escape" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 1); assert_eq!(hits[0]["id"], "522681"); @@ -307,7 +302,7 @@ async fn non_negative_search() { index .search(json!({"q": "- \"train your dragon\"" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 1); assert_eq!(hits[0]["id"], "166428"); @@ -331,7 +326,7 @@ async fn negative_special_cases_search() { // There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass index .search(json!({"q": "-escape escape" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["hits"].as_array().unwrap(); assert_eq!(hits.len(), 1); assert_eq!(hits[0]["id"], "450465"); @@ -356,7 +351,7 @@ async fn test_kanji_language_detection() { index .search(json!({"q": "東京"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }) .await; @@ -382,7 +377,7 @@ async fn test_thai_language() { index .search(json!({"q": "สบู"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); }) .await; } @@ -400,7 +395,7 @@ async fn search_multiple_params() { "offset": 0, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }, ) @@ -417,7 +412,7 @@ async fn search_multiple_params() { "offset": 0, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 2); }, ) @@ -433,7 +428,7 @@ async fn search_with_sort_on_numbers() { "sort": ["id:asc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); }, ) @@ -446,7 +441,7 @@ async fn search_with_sort_on_numbers() { "sort": ["doggos.age:asc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 4); }, ) @@ -462,7 +457,7 @@ async fn search_with_sort_on_strings() { "sort": ["title:desc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); }, ) @@ -475,7 +470,7 @@ async fn search_with_sort_on_strings() { "sort": ["doggos.name:asc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 4); }, ) @@ -490,7 +485,7 @@ async fn search_with_multiple_sort() { "sort": ["id:asc", "title:desc"] })) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); } @@ -503,7 +498,7 @@ async fn search_facet_distribution() { "facets": ["title"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert!(dist.get("title").is_some()); @@ -521,7 +516,7 @@ async fn search_facet_distribution() { "facets": ["father"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert_eq!( @@ -544,9 +539,9 @@ async fn search_facet_distribution() { "facets": ["doggos.name"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); - assert_eq!(dist.len(), 1, "{:?}", dist); + assert_eq!(dist.len(), 1, "{dist:?}"); assert_eq!( dist["doggos.name"], json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1}) @@ -561,9 +556,9 @@ async fn search_facet_distribution() { "facets": ["doggos"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); - assert_eq!(dist.len(), 3, "{:?}", dist); + assert_eq!(dist.len(), 3, "{dist:?}"); assert_eq!( dist["doggos.name"], json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1}) @@ -579,7 +574,7 @@ async fn search_facet_distribution() { "facets": ["doggos.name"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert_eq!( @@ -604,7 +599,7 @@ async fn displayed_attributes() { let (response, code) = index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert!(response["hits"][0].get("title").is_some()); } @@ -623,7 +618,7 @@ async fn placeholder_search_is_hard_limited() { "limit": 1500, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1000); }, ) @@ -636,7 +631,7 @@ async fn placeholder_search_is_hard_limited() { "limit": 400, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 200); }, ) @@ -652,7 +647,7 @@ async fn placeholder_search_is_hard_limited() { "limit": 1500, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1200); }, ) @@ -665,7 +660,7 @@ async fn placeholder_search_is_hard_limited() { "limit": 400, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 200); }, ) @@ -688,7 +683,7 @@ async fn search_is_hard_limited() { "limit": 1500, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1000); }, ) @@ -702,7 +697,7 @@ async fn search_is_hard_limited() { "limit": 400, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 200); }, ) @@ -719,7 +714,7 @@ async fn search_is_hard_limited() { "limit": 1500, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1200); }, ) @@ -733,7 +728,7 @@ async fn search_is_hard_limited() { "limit": 400, }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 200); }, ) @@ -757,7 +752,7 @@ async fn faceting_max_values_per_facet() { "facets": ["number"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let numbers = response["facetDistribution"]["number"].as_object().unwrap(); assert_eq!(numbers.len(), 100); }, @@ -774,7 +769,7 @@ async fn faceting_max_values_per_facet() { "facets": ["number"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let numbers = &response["facetDistribution"]["number"].as_object().unwrap(); assert_eq!(numbers.len(), 10_000); }, @@ -784,13 +779,7 @@ async fn faceting_max_values_per_facet() { #[actix_rt::test] async fn test_score_details() { - let server = Server::new().await; - let index = server.index("test"); - - let documents = DOCUMENTS.clone(); - - let res = index.add_documents(json!(documents), None).await; - index.wait_task(res.0.uid()).await.succeeded(); + let index = shared_index_with_documents().await; index .search( @@ -799,8 +788,8 @@ async fn test_score_details() { "showRankingScoreDetails": true, }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "How to Train Your Dragon: The Hidden World", @@ -850,13 +839,7 @@ async fn test_score_details() { #[actix_rt::test] async fn test_score() { - let server = Server::new().await; - let index = server.index("test"); - - let documents = SCORE_DOCUMENTS.clone(); - - let res = index.add_documents(json!(documents), None).await; - index.wait_task(res.0.uid()).await.succeeded(); + let index = shared_index_with_score_documents().await; index .search( @@ -865,8 +848,8 @@ async fn test_score() { "showRankingScore": true, }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -903,13 +886,7 @@ async fn test_score() { #[actix_rt::test] async fn test_score_threshold() { let query = "Badman dark returns 1"; - let server = Server::new().await; - let index = server.index("test"); - - let documents = SCORE_DOCUMENTS.clone(); - - let res = index.add_documents(json!(documents), None).await; - index.wait_task(res.0.uid()).await.succeeded(); + let index = shared_index_with_score_documents().await; index .search( @@ -919,9 +896,9 @@ async fn test_score_threshold() { "rankingScoreThreshold": 0.0 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"5"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @"5"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -962,9 +939,9 @@ async fn test_score_threshold() { "rankingScoreThreshold": 0.2 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"3"###); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @r###"3"###); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -995,9 +972,9 @@ async fn test_score_threshold() { "rankingScoreThreshold": 0.5 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"2"###); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @r###"2"###); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -1023,9 +1000,9 @@ async fn test_score_threshold() { "rankingScoreThreshold": 0.8 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"1"###); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @r###"1"###); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "Batman the dark knight returns: Part 1", @@ -1046,10 +1023,10 @@ async fn test_score_threshold() { "rankingScoreThreshold": 1.0 }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"0"###); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["estimatedTotalHits"]), @r###"0"###); // nobody is perfect - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @"[]"); + snapshot!(json_string!(response["hits"]), @"[]"); }, ) .await; @@ -1075,8 +1052,8 @@ async fn test_degraded_score_details() { "showRankingScoreDetails": true, }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###" { "hits": [ { @@ -1162,8 +1139,8 @@ async fn camelcased_words() { index .search(json!({"q": "deLonghi"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 0, @@ -1180,8 +1157,8 @@ async fn camelcased_words() { index .search(json!({"q": "dellonghi"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 0, @@ -1198,8 +1175,8 @@ async fn camelcased_words() { index .search(json!({"q": "testa"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1220,8 +1197,8 @@ async fn camelcased_words() { index .search(json!({"q": "testab"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1242,8 +1219,8 @@ async fn camelcased_words() { index .search(json!({"q": "TestaB"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1264,8 +1241,8 @@ async fn camelcased_words() { index .search(json!({"q": "Testab"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1286,8 +1263,8 @@ async fn camelcased_words() { index .search(json!({"q": "TestAb"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1309,8 +1286,8 @@ async fn camelcased_words() { // with Typos index .search(json!({"q": "dellonghi"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 0, @@ -1327,8 +1304,8 @@ async fn camelcased_words() { index .search(json!({"q": "TetsAB"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1349,8 +1326,8 @@ async fn camelcased_words() { index .search(json!({"q": "TetsAB"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 2, @@ -1377,8 +1354,8 @@ async fn simple_search_with_strange_synonyms() { let (task, _status_code) = index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await; - let r = index.wait_task(task.uid()).await; - meili_snap::snapshot!(r["status"], @r###""succeeded""###); + let r = index.wait_task(task.uid()).await.succeeded(); + snapshot!(r["status"], @r###""succeeded""###); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -1386,8 +1363,8 @@ async fn simple_search_with_strange_synonyms() { index .search(json!({"q": "How to train"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "How to Train Your Dragon: The Hidden World", @@ -1404,8 +1381,8 @@ async fn simple_search_with_strange_synonyms() { index .search(json!({"q": "How & train"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "How to Train Your Dragon: The Hidden World", @@ -1422,8 +1399,8 @@ async fn simple_search_with_strange_synonyms() { index .search(json!({"q": "to"}), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["hits"]), @r###" [ { "title": "How to Train Your Dragon: The Hidden World", @@ -1462,8 +1439,8 @@ async fn change_attributes_settings() { "attributesToRetrieve": ["id", "doggos"] }), |response, code| { - assert_eq!(code, 200, "{}", response); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + assert_eq!(code, 200, "{response}"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 852, @@ -1493,8 +1470,8 @@ async fn change_attributes_settings() { "attributesToRetrieve": ["id", "doggos"] }), |response, code| { - assert_eq!(code, 200, "{}", response); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + assert_eq!(code, 200, "{response}"); + snapshot!(json_string!(response["hits"]), @r###" [ { "id": 852, @@ -1563,7 +1540,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "document"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1609,7 +1586,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "zeroth"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1627,7 +1604,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "first"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1650,7 +1627,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "field"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1686,7 +1663,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "array"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); // nested is not searchable snapshot!(json_string!(response["hits"]), @"[]"); }, @@ -1698,7 +1675,7 @@ async fn test_nested_fields() { &settings, &json!({"q": "lied"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); // nested is not searchable snapshot!(json_string!(response["hits"]), @"[]"); }, @@ -1711,7 +1688,7 @@ async fn test_nested_fields() { &settings, &json!({"filter": "nested.object = field"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1747,7 +1724,7 @@ async fn test_nested_fields() { &settings, &json!({"filter": "nested.machin = bidule"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1770,7 +1747,7 @@ async fn test_nested_fields() { &settings, &json!({"filter": "nested = array"}), |response, code| { - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); snapshot!(json_string!(response), @r###" { "message": "Index `test`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = array", @@ -1789,7 +1766,7 @@ async fn test_nested_fields() { &settings, &json!({"filter": r#"nested = "I lied""#}), |response, code| { - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); snapshot!(json_string!(response), @r###" { "message": "Index `test`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = \"I lied\"", @@ -1850,7 +1827,7 @@ async fn test_typo_settings() { }), &json!({"q": "document"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1902,7 +1879,7 @@ async fn test_typo_settings() { }), &json!({"q": "docume"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -1955,7 +1932,7 @@ async fn change_facet_casing() { })) .await; assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index .add_documents( @@ -1968,7 +1945,7 @@ async fn change_facet_casing() { None, ) .await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index .add_documents( @@ -1981,12 +1958,12 @@ async fn change_facet_casing() { None, ) .await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); index .search(json!({ "facets": ["dog"] }), |response, code| { - meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["facetDistribution"]), @r###" + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response["facetDistribution"]), @r###" { "dog": { "bouvier bernois": 1 @@ -2045,7 +2022,7 @@ async fn test_exact_typos_terms() { }), &json!({"q": "12345"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###" [ { @@ -2080,7 +2057,7 @@ async fn test_exact_typos_terms() { }), &json!({"q": "123457"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["hits"]), @r###"[]"###); }, ) From 57eecd61970c51bd19a4b16e54595be9a32da1f5 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 20 May 2025 14:37:45 +0300 Subject: [PATCH 033/131] Remove an empty line Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/mod.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/meilisearch/tests/common/mod.rs b/crates/meilisearch/tests/common/mod.rs index da8beac3a..046519a0e 100644 --- a/crates/meilisearch/tests/common/mod.rs +++ b/crates/meilisearch/tests/common/mod.rs @@ -282,7 +282,6 @@ pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shar }).await } - pub static NESTED_DOCUMENTS: Lazy = Lazy::new(|| { json!([ { From c5ae43cac6ba37a97c0bb186763511aeefa712c7 Mon Sep 17 00:00:00 2001 From: Lucas Black Date: Tue, 20 May 2025 09:03:26 -0700 Subject: [PATCH 034/131] Updated all additional test cases --- .../tests/search/restrict_searchable.rs | 121 +++++++++++++++--- 1 file changed, 101 insertions(+), 20 deletions(-) diff --git a/crates/meilisearch/tests/search/restrict_searchable.rs b/crates/meilisearch/tests/search/restrict_searchable.rs index 2232d961b..2c8c86b5d 100644 --- a/crates/meilisearch/tests/search/restrict_searchable.rs +++ b/crates/meilisearch/tests/search/restrict_searchable.rs @@ -493,10 +493,22 @@ async fn nested_search_with_suffix_wildcard() { // It's worth noting the difference between 'details.*' and '*.title' index .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"]}), + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"3"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "1" + }, + { + "id": "2" + } + ]"###); }, ) .await; @@ -504,10 +516,16 @@ async fn nested_search_with_suffix_wildcard() { // Should return 1 document (ids: 1) index .search( - json!({"q": "gold", "attributesToSearchOn": ["details.*"]}), + json!({"q": "gold", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"1"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "1" + } + ]"###); }, ) .await; @@ -515,10 +533,19 @@ async fn nested_search_with_suffix_wildcard() { // Should return 2 documents (ids: 1 and 2) index .search( - json!({"q": "true", "attributesToSearchOn": ["details.*"]}), + json!({"q": "true", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "1" + }, + { + "id": "2" + } + ]"###); }, ) .await; @@ -533,10 +560,19 @@ async fn nested_search_on_title_restricted_set_with_suffix_wildcard() { index .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"]}), + json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "2" + } + ]"###); }, ) .await; @@ -575,10 +611,19 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() { index .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown", "*.title"]}), + json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown", "*.title"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "2" + } + ]"###); }, ) .await; @@ -589,13 +634,22 @@ async fn nested_prefix_search_on_title_with_prefix_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; - // Nested prefix search with wildcard prefix should return 2 documents (ids: 2 and 3). + // Nested prefix search with prefix wildcard should return 2 documents (ids: 2 and 3). index .search( - json!({"q": "Captain Mar", "attributesToSearchOn": ["*.title"]}), + json!({"q": "Captain Mar", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "2" + } + ]"###); }, ) .await; @@ -608,10 +662,22 @@ async fn nested_prefix_search_on_details_with_suffix_wildcard() { index .search( - json!({"q": "Captain Mar", "attributesToSearchOn": ["details.*"]}), + json!({"q": "Captain Mar", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"3"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + }, + { + "id": "1" + }, + { + "id": "2" + } + ]"###); }, ) .await; @@ -622,13 +688,22 @@ async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; - // Deep wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3) + // Wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3) index .search( - json!({"q": "mag", "attributesToSearchOn": ["details.*"]}), + json!({"q": "mag", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"2"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "1" + }, + { + "id": "3" + } + ]"###); }, ) .await; @@ -642,10 +717,16 @@ async fn nested_search_on_title_matching_strategy_all() { // Nested search matching strategy all should only return 1 document (ids: 3) index .search( - json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "matchingStrategy": "all"}), + json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "matchingStrategy": "all", "attributesToRetrieve": ["id"]}), |response, code| { snapshot!(code, @"200 OK"); - snapshot!(response["hits"].as_array().unwrap().len(), @"1"); + snapshot!(json_string!(response["hits"]), + @r###" + [ + { + "id": "3" + } + ]"###); }, ) .await; From 8c8d98eeaa839f71db7395a65c1801811e161432 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 21 May 2025 10:48:20 +0300 Subject: [PATCH 035/131] Use shared server and unique indices for all tests where possible Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/mod.rs | 64 +++++++++++++------------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs index 2ac0d0400..70d76db0c 100644 --- a/crates/meilisearch/tests/search/mod.rs +++ b/crates/meilisearch/tests/search/mod.rs @@ -110,8 +110,8 @@ async fn simple_search() { /// See #[actix_rt::test] async fn bug_5547() { - let server = Server::new().await; - let index = server.index("big_fst"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, _code) = index.create(None).await; index.wait_task(response.uid()).await.succeeded(); @@ -130,8 +130,8 @@ async fn bug_5547() { #[actix_rt::test] async fn search_with_stop_word() { // related to https://github.com/meilisearch/meilisearch/issues/4984 - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_, code) = index .update_settings(json!({"stopWords": ["the", "The", "a", "an", "to", "in", "of"]})) @@ -195,8 +195,8 @@ async fn search_with_stop_word() { #[actix_rt::test] async fn search_with_typo_settings() { // related to https://github.com/meilisearch/meilisearch/issues/5240 - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_, code) = index .update_settings(json!({"typoTolerance": { "disableOnAttributes": ["title", "id"]}})) @@ -229,8 +229,8 @@ async fn search_with_typo_settings() { #[actix_rt::test] async fn phrase_search_with_stop_word() { // related to https://github.com/meilisearch/meilisearch/issues/3521 - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_, code) = index.update_settings(json!({"stopWords": ["the", "of"]})).await; snapshot!(code, @"202 Accepted"); @@ -312,8 +312,8 @@ async fn non_negative_search() { #[actix_rt::test] async fn negative_special_cases_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -338,8 +338,8 @@ async fn negative_special_cases_search() { #[cfg(not(feature = "chinese-pinyin"))] #[actix_rt::test] async fn test_kanji_language_detection() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { "id": 0, "title": "The quick (\"brown\") fox can't jump 32.3 feet, right? Brr, it's 29.3°F!" }, @@ -360,8 +360,8 @@ async fn test_kanji_language_detection() { #[cfg(feature = "default")] #[actix_rt::test] async fn test_thai_language() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // We don't need documents, the issue is on the query side only. let documents = json!([ @@ -588,8 +588,8 @@ async fn search_facet_distribution() { #[actix_rt::test] async fn displayed_attributes() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({ "displayedAttributes": ["title"] })).await; @@ -605,8 +605,8 @@ async fn displayed_attributes() { #[actix_rt::test] async fn placeholder_search_is_hard_limited() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect(); let (task, _status_code) = index.add_documents(documents.into(), None).await; @@ -669,8 +669,8 @@ async fn placeholder_search_is_hard_limited() { #[actix_rt::test] async fn search_is_hard_limited() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect(); let (task, _status_code) = index.add_documents(documents.into(), None).await; @@ -737,8 +737,8 @@ async fn search_is_hard_limited() { #[actix_rt::test] async fn faceting_max_values_per_facet() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({ "filterableAttributes": ["number"] })).await; @@ -1034,8 +1034,8 @@ async fn test_score_threshold() { #[actix_rt::test] async fn test_degraded_score_details() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = NESTED_DOCUMENTS.clone(); @@ -1123,8 +1123,8 @@ async fn test_degraded_score_details() { #[cfg(feature = "default")] #[actix_rt::test] async fn camelcased_words() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // related to https://github.com/meilisearch/meilisearch/issues/3818 let documents = json!([ @@ -1349,8 +1349,8 @@ async fn camelcased_words() { #[actix_rt::test] async fn simple_search_with_strange_synonyms() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await; @@ -1418,8 +1418,8 @@ async fn simple_search_with_strange_synonyms() { #[actix_rt::test] async fn change_attributes_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({ "searchableAttributes": ["father", "mother"] })).await; @@ -1923,8 +1923,8 @@ async fn test_typo_settings() { /// Modifying facets with different casing should work correctly #[actix_rt::test] async fn change_facet_casing() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ From f888f876352abc0a4103e2d85a58c48b1e06f594 Mon Sep 17 00:00:00 2001 From: Lucas Black Date: Wed, 21 May 2025 02:07:25 -0700 Subject: [PATCH 036/131] Updated formatting using RustFmt --- crates/meilisearch/tests/search/restrict_searchable.rs | 3 ++- crates/milli/src/search/new/mod.rs | 4 +--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/search/restrict_searchable.rs b/crates/meilisearch/tests/search/restrict_searchable.rs index 2c8c86b5d..8ef5db26d 100644 --- a/crates/meilisearch/tests/search/restrict_searchable.rs +++ b/crates/meilisearch/tests/search/restrict_searchable.rs @@ -555,7 +555,8 @@ async fn nested_search_with_suffix_wildcard() { async fn nested_search_on_title_restricted_set_with_suffix_wildcard() { let server = Server::new().await; let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; - let (task, _status_code) = index.update_settings_searchable_attributes(json!(["details.title"])).await; + let (task, _status_code) = + index.update_settings_searchable_attributes(json!(["details.title"])).await; index.wait_task(task.uid()).await.succeeded(); index diff --git a/crates/milli/src/search/new/mod.rs b/crates/milli/src/search/new/mod.rs index dfe0ddfc9..0a3bc1b04 100644 --- a/crates/milli/src/search/new/mod.rs +++ b/crates/milli/src/search/new/mod.rs @@ -137,9 +137,7 @@ impl<'ctx> SearchContext<'ctx> { if searchable_weight.is_none() && field_name.contains("*") { let matching_searchable_weights: Vec<_> = searchable_fields_weights .iter() - .filter(|(name, _, _)| { - match_pattern(field_name, name) == PatternMatch::Match - }) + .filter(|(name, _, _)| match_pattern(field_name, name) == PatternMatch::Match) .collect(); if !matching_searchable_weights.is_empty() { From 1d5265caf412a64b5ab4e258117ae348914689ea Mon Sep 17 00:00:00 2001 From: mcmah309 Date: Thu, 22 May 2025 14:25:04 +0000 Subject: [PATCH 037/131] Fix typo in method name --- crates/meilisearch-types/src/settings.rs | 2 +- .../milli/src/search/new/tests/ngram_split_words.rs | 12 ++++++------ crates/milli/src/search/new/tests/typo.rs | 2 +- crates/milli/src/update/settings.rs | 2 +- crates/milli/src/update/test_settings.rs | 2 +- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/meilisearch-types/src/settings.rs b/crates/meilisearch-types/src/settings.rs index ccf0d75ee..97003074f 100644 --- a/crates/meilisearch-types/src/settings.rs +++ b/crates/meilisearch-types/src/settings.rs @@ -666,7 +666,7 @@ pub fn apply_settings_to_builder( match typo_tolerance { Setting::Set(ref value) => { match value.enabled { - Setting::Set(val) => builder.set_autorize_typos(val), + Setting::Set(val) => builder.set_authorize_typos(val), Setting::Reset => builder.reset_authorize_typos(), Setting::NotSet => (), } diff --git a/crates/milli/src/search/new/tests/ngram_split_words.rs b/crates/milli/src/search/new/tests/ngram_split_words.rs index 8427dd65b..0a89899ed 100644 --- a/crates/milli/src/search/new/tests/ngram_split_words.rs +++ b/crates/milli/src/search/new/tests/ngram_split_words.rs @@ -72,7 +72,7 @@ fn test_2gram_simple() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -103,7 +103,7 @@ fn test_3gram_simple() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -153,7 +153,7 @@ fn test_no_disable_ngrams() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -179,7 +179,7 @@ fn test_2gram_prefix() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -208,7 +208,7 @@ fn test_3gram_prefix() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); @@ -260,7 +260,7 @@ fn test_disable_split_words() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); diff --git a/crates/milli/src/search/new/tests/typo.rs b/crates/milli/src/search/new/tests/typo.rs index 1bbe08977..8dd93b102 100644 --- a/crates/milli/src/search/new/tests/typo.rs +++ b/crates/milli/src/search/new/tests/typo.rs @@ -151,7 +151,7 @@ fn test_no_typo() { let index = create_index(); index .update_settings(|s| { - s.set_autorize_typos(false); + s.set_authorize_typos(false); }) .unwrap(); diff --git a/crates/milli/src/update/settings.rs b/crates/milli/src/update/settings.rs index 51d9aed27..bb965ba69 100644 --- a/crates/milli/src/update/settings.rs +++ b/crates/milli/src/update/settings.rs @@ -333,7 +333,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> { self.primary_key = Setting::Set(primary_key); } - pub fn set_autorize_typos(&mut self, val: bool) { + pub fn set_authorize_typos(&mut self, val: bool) { self.authorize_typos = Setting::Set(val); } diff --git a/crates/milli/src/update/test_settings.rs b/crates/milli/src/update/test_settings.rs index 2b9ee3a5e..1adb96366 100644 --- a/crates/milli/src/update/test_settings.rs +++ b/crates/milli/src/update/test_settings.rs @@ -792,7 +792,7 @@ fn test_disable_typo() { index .update_settings_using_wtxn(&mut txn, |settings| { - settings.set_autorize_typos(false); + settings.set_authorize_typos(false); }) .unwrap(); From 34d58f35c84f08184cd49d81395f7b2b204048dd Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Sun, 25 May 2025 15:48:55 +0300 Subject: [PATCH 038/131] Print `[uuid]` instead of the Uuid index name for MeilisearchHttpError::Milli errors This way the tests' assertions/snapshots for unique indices would be stable Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/src/error.rs | 10 +++++++++- crates/meilisearch/tests/search/errors.rs | 2 +- crates/meilisearch/tests/search/mod.rs | 17 ++++------------- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/crates/meilisearch/src/error.rs b/crates/meilisearch/src/error.rs index b13eb8d7c..13800adc1 100644 --- a/crates/meilisearch/src/error.rs +++ b/crates/meilisearch/src/error.rs @@ -64,7 +64,7 @@ pub enum MeilisearchHttpError { #[error(transparent)] IndexScheduler(#[from] index_scheduler::Error), #[error("{}", match .index_name { - Some(name) if !name.is_empty() => format!("Index `{}`: {error}", name), + Some(name) if !name.is_empty() => format!("Index `{}`: {error}", MeilisearchHttpError::index_name(name)), _ => format!("{error}") })] Milli { error: milli::Error, index_name: Option }, @@ -84,6 +84,14 @@ impl MeilisearchHttpError { pub(crate) fn from_milli(error: milli::Error, index_name: Option) -> Self { Self::Milli { error, index_name } } + + fn index_name(index_name: &str) -> &str { + if let Ok(_) = uuid::Uuid::parse_str(index_name) { + "[uuid]" + } else { + index_name + } + } } impl ErrorCode for MeilisearchHttpError { diff --git a/crates/meilisearch/tests/search/errors.rs b/crates/meilisearch/tests/search/errors.rs index 2b63a07b1..bce03f56b 100644 --- a/crates/meilisearch/tests/search/errors.rs +++ b/crates/meilisearch/tests/search/errors.rs @@ -729,7 +729,7 @@ async fn filter_invalid_attribute_string() { |response, code| { snapshot!(response, @r###" { - "message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass", + "message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs index 70d76db0c..2cc882c71 100644 --- a/crates/meilisearch/tests/search/mod.rs +++ b/crates/meilisearch/tests/search/mod.rs @@ -17,11 +17,9 @@ mod restrict_searchable; mod search_queue; use meili_snap::{json_string, snapshot}; -use meilisearch::Opt; -use tempfile::TempDir; use crate::common::{ - default_settings, shared_index_with_documents, shared_index_with_nested_documents, + shared_index_with_documents, shared_index_with_nested_documents, shared_index_with_score_documents, Server, Value, DOCUMENTS, FRUITS_DOCUMENTS, NESTED_DOCUMENTS, SCORE_DOCUMENTS, VECTOR_DOCUMENTS, }; @@ -33,11 +31,10 @@ async fn test_settings_documents_indexing_swapping_and_search( query: &Value, test: impl Fn(Value, actix_http::StatusCode) + std::panic::UnwindSafe + Clone, ) { - let temp = TempDir::new().unwrap(); - let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap(); + let server = Server::new_shared(); eprintln!("Documents -> Settings -> test"); - let index = server.index("test"); + let index = server.unique_index(); let (task, code) = index.add_documents(documents.clone(), None).await; assert_eq!(code, 202, "{task}"); @@ -48,12 +45,9 @@ async fn test_settings_documents_indexing_swapping_and_search( index.wait_task(task.uid()).await.succeeded(); index.search(query.clone(), test.clone()).await; - let (task, code) = server.delete_index("test").await; - assert_eq!(code, 202, "{task}"); - server.wait_task(task.uid()).await.succeeded(); eprintln!("Settings -> Documents -> test"); - let index = server.index("test"); + let index = server.unique_index(); let (task, code) = index.update_settings(settings.clone()).await; assert_eq!(code, 202, "{task}"); @@ -64,9 +58,6 @@ async fn test_settings_documents_indexing_swapping_and_search( index.wait_task(task.uid()).await.succeeded(); index.search(query.clone(), test.clone()).await; - let (task, code) = server.delete_index("test").await; - assert_eq!(code, 202, "{task}"); - server.wait_task(task.uid()).await.succeeded(); } #[actix_rt::test] From 24e94b28c1c4ae30e4d3256f3af896b8446ba722 Mon Sep 17 00:00:00 2001 From: nnethercott Date: Mon, 26 May 2025 09:22:20 +0200 Subject: [PATCH 039/131] feat: uncouple geo extraction from full doc --- .../new/extract/faceted/extract_facets.rs | 248 ++++++++++++------ .../new/extract/faceted/facet_document.rs | 29 +- 2 files changed, 188 insertions(+), 89 deletions(-) diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index 01cfe338f..aa7510863 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -8,7 +8,7 @@ use hashbrown::HashMap; use serde_json::Value; use super::super::cache::BalancedCaches; -use super::facet_document::extract_document_facets; +use super::facet_document::{extract_document_facets, extract_geo_document}; use super::FacetKind; use crate::fields_ids_map::metadata::Metadata; use crate::filterable_attributes_rules::match_faceted_field; @@ -90,53 +90,8 @@ impl FacetedDocidsExtractor { let mut cached_sorter = context.data.borrow_mut_or_yield(); let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc); let docid = document_change.docid(); - let res = match document_change { - DocumentChange::Deletion(inner) => extract_document_facets( - inner.current(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - filterable_attributes, - sortable_fields, - asc_desc_fields, - distinct_field, - is_geo_enabled, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_del_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_del, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, - ), - DocumentChange::Update(inner) => { - let has_changed = inner.has_changed_for_fields( - &mut |field_name| { - match_faceted_field( - field_name, - filterable_attributes, - sortable_fields, - asc_desc_fields, - distinct_field, - ) - }, - rtxn, - index, - context.db_fields_ids_map, - )?; - let has_changed_for_geo_fields = - inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?; - if !has_changed && !has_changed_for_geo_fields { - return Ok(()); - } - + match document_change { + DocumentChange::Deletion(inner) => { extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), @@ -145,7 +100,6 @@ impl FacetedDocidsExtractor { sortable_fields, asc_desc_fields, distinct_field, - is_geo_enabled, &mut |fid, meta, depth, value| { Self::facet_fn_with_options( &context.doc_alloc, @@ -163,15 +117,155 @@ impl FacetedDocidsExtractor { }, )?; + if is_geo_enabled { + extract_geo_document( + inner.current(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + asc_desc_fields, + &mut |fid, meta, depth, value| { + Self::facet_fn_with_options( + &context.doc_alloc, + cached_sorter.deref_mut(), + BalancedCaches::insert_del_u32, + &mut del_add_facet_value, + DelAddFacetValue::insert_del, + docid, + fid, + meta, + filterable_attributes, + depth, + value, + ) + }, + )?; + } + } + DocumentChange::Update(inner) => { + let has_changed = inner.has_changed_for_fields( + &mut |field_name| { + match_faceted_field( + field_name, + filterable_attributes, + sortable_fields, + asc_desc_fields, + distinct_field, + ) + }, + rtxn, + index, + context.db_fields_ids_map, + )?; + let has_changed_for_geo_fields = + inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?; + + if has_changed { + extract_document_facets( + inner.current(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + filterable_attributes, + sortable_fields, + asc_desc_fields, + distinct_field, + &mut |fid, meta, depth, value| { + Self::facet_fn_with_options( + &context.doc_alloc, + cached_sorter.deref_mut(), + BalancedCaches::insert_del_u32, + &mut del_add_facet_value, + DelAddFacetValue::insert_del, + docid, + fid, + meta, + filterable_attributes, + depth, + value, + ) + }, + )?; + + extract_document_facets( + inner.merged(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + filterable_attributes, + sortable_fields, + asc_desc_fields, + distinct_field, + &mut |fid, meta, depth, value| { + Self::facet_fn_with_options( + &context.doc_alloc, + cached_sorter.deref_mut(), + BalancedCaches::insert_add_u32, + &mut del_add_facet_value, + DelAddFacetValue::insert_add, + docid, + fid, + meta, + filterable_attributes, + depth, + value, + ) + }, + )?; + } + + if is_geo_enabled && has_changed_for_geo_fields{ + extract_geo_document( + inner.current(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + asc_desc_fields, + &mut |fid, meta, depth, value| { + Self::facet_fn_with_options( + &context.doc_alloc, + cached_sorter.deref_mut(), + BalancedCaches::insert_del_u32, + &mut del_add_facet_value, + DelAddFacetValue::insert_del, + docid, + fid, + meta, + filterable_attributes, + depth, + value, + ) + }, + )?; + + extract_geo_document( + inner.merged(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + asc_desc_fields, + &mut |fid, meta, depth, value| { + Self::facet_fn_with_options( + &context.doc_alloc, + cached_sorter.deref_mut(), + BalancedCaches::insert_add_u32, + &mut del_add_facet_value, + DelAddFacetValue::insert_add, + docid, + fid, + meta, + filterable_attributes, + depth, + value, + ) + }, + )?; + } + } + DocumentChange::Insertion(inner) => { extract_document_facets( - inner.merged(rtxn, index, context.db_fields_ids_map)?, + inner.inserted(), inner.external_document_id(), new_fields_ids_map.deref_mut(), filterable_attributes, sortable_fields, asc_desc_fields, distinct_field, - is_geo_enabled, &mut |fid, meta, depth, value| { Self::facet_fn_with_options( &context.doc_alloc, @@ -187,37 +281,35 @@ impl FacetedDocidsExtractor { value, ) }, - ) + )?; + if is_geo_enabled { + extract_geo_document( + inner.inserted(), + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + asc_desc_fields, + &mut |fid, meta, depth, value| { + Self::facet_fn_with_options( + &context.doc_alloc, + cached_sorter.deref_mut(), + BalancedCaches::insert_add_u32, + &mut del_add_facet_value, + DelAddFacetValue::insert_add, + docid, + fid, + meta, + filterable_attributes, + depth, + value, + ) + }, + )?; + } } - DocumentChange::Insertion(inner) => extract_document_facets( - inner.inserted(), - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - filterable_attributes, - sortable_fields, - asc_desc_fields, - distinct_field, - is_geo_enabled, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_add_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_add, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, - ), }; del_add_facet_value.send_data(docid, sender, &context.doc_alloc).unwrap(); - res + Ok(()) } #[allow(clippy::too_many_arguments)] diff --git a/crates/milli/src/update/new/extract/faceted/facet_document.rs b/crates/milli/src/update/new/extract/faceted/facet_document.rs index e74131402..30a5c462e 100644 --- a/crates/milli/src/update/new/extract/faceted/facet_document.rs +++ b/crates/milli/src/update/new/extract/faceted/facet_document.rs @@ -22,7 +22,6 @@ pub fn extract_document_facets<'doc>( sortable_fields: &HashSet, asc_desc_fields: &HashSet, distinct_field: &Option, - is_geo_enabled: bool, facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>, ) -> Result<()> { // return the match result for the given field name. @@ -102,17 +101,25 @@ pub fn extract_document_facets<'doc>( } } - if is_geo_enabled { - if let Some(geo_value) = document.geo_field()? { - if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? { - let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map - .id_with_metadata_or_insert("_geo.lat") - .zip(field_id_map.id_with_metadata_or_insert("_geo.lng")) - .ok_or(UserError::AttributeLimitReached)?; + Ok(()) +} - facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?; - facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?; - } +pub fn extract_geo_document<'doc>( + document: impl Document<'doc>, + external_document_id: &str, + field_id_map: &mut GlobalFieldsIdsMap, + asc_desc_fields: &HashSet, + facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>, +) -> Result<()> { + if let Some(geo_value) = document.geo_field()? { + if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? { + let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map + .id_with_metadata_or_insert("_geo.lat") + .zip(field_id_map.id_with_metadata_or_insert("_geo.lng")) + .ok_or(UserError::AttributeLimitReached)?; + + facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?; + facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?; } } From f690fa068631e8f324c37e1a7aeccb69d642288e Mon Sep 17 00:00:00 2001 From: nnethercott Date: Mon, 26 May 2025 09:46:14 +0200 Subject: [PATCH 040/131] feat: add macro_rules to factorize --- .../new/extract/faceted/extract_facets.rs | 207 ++++++------------ 1 file changed, 72 insertions(+), 135 deletions(-) diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index aa7510863..241e0fd69 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -90,8 +90,48 @@ impl FacetedDocidsExtractor { let mut cached_sorter = context.data.borrow_mut_or_yield(); let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc); let docid = document_change.docid(); + + macro_rules! facet_fn_factory { + (del) => { + |fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| { + Self::facet_fn_with_options( + &context.doc_alloc, + cached_sorter.deref_mut(), + BalancedCaches::insert_del_u32, + &mut del_add_facet_value, + DelAddFacetValue::insert_del, + docid, + fid, + meta, + filterable_attributes, + depth, + value, + ) + } + }; + (add) => { + |fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| { + Self::facet_fn_with_options( + &context.doc_alloc, + cached_sorter.deref_mut(), + BalancedCaches::insert_add_u32, + &mut del_add_facet_value, + DelAddFacetValue::insert_add, + docid, + fid, + meta, + filterable_attributes, + depth, + value, + ) + } + }; + } + match document_change { DocumentChange::Deletion(inner) => { + let mut del = facet_fn_factory!(del); + extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), @@ -100,21 +140,7 @@ impl FacetedDocidsExtractor { sortable_fields, asc_desc_fields, distinct_field, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_del_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_del, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, + &mut del, )?; if is_geo_enabled { @@ -123,21 +149,7 @@ impl FacetedDocidsExtractor { inner.external_document_id(), new_fields_ids_map.deref_mut(), asc_desc_fields, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_del_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_del, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, + &mut del, )?; } } @@ -160,6 +172,9 @@ impl FacetedDocidsExtractor { inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?; if has_changed { + // 1. Delete old facet values + let mut del = facet_fn_factory!(del); + extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), @@ -168,23 +183,21 @@ impl FacetedDocidsExtractor { sortable_fields, asc_desc_fields, distinct_field, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_del_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_del, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, + &mut del, )?; + if is_geo_enabled && has_changed_for_geo_fields { + extract_geo_document( + inner.current(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + asc_desc_fields, + &mut del, + )?; + } + + let mut add = facet_fn_factory!(add); + extract_document_facets( inner.merged(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), @@ -193,71 +206,23 @@ impl FacetedDocidsExtractor { sortable_fields, asc_desc_fields, distinct_field, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_add_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_add, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, - )?; - } - - if is_geo_enabled && has_changed_for_geo_fields{ - extract_geo_document( - inner.current(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - asc_desc_fields, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_del_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_del, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, + &mut add, )?; - extract_geo_document( - inner.merged(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - asc_desc_fields, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_add_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_add, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, - )?; + if is_geo_enabled && has_changed_for_geo_fields { + extract_geo_document( + inner.merged(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + asc_desc_fields, + &mut add, + )?; + } } } DocumentChange::Insertion(inner) => { + let mut add = facet_fn_factory!(add); + extract_document_facets( inner.inserted(), inner.external_document_id(), @@ -266,21 +231,7 @@ impl FacetedDocidsExtractor { sortable_fields, asc_desc_fields, distinct_field, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_add_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_add, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, + &mut add, )?; if is_geo_enabled { extract_geo_document( @@ -288,21 +239,7 @@ impl FacetedDocidsExtractor { inner.external_document_id(), new_fields_ids_map.deref_mut(), asc_desc_fields, - &mut |fid, meta, depth, value| { - Self::facet_fn_with_options( - &context.doc_alloc, - cached_sorter.deref_mut(), - BalancedCaches::insert_add_u32, - &mut del_add_facet_value, - DelAddFacetValue::insert_add, - docid, - fid, - meta, - filterable_attributes, - depth, - value, - ) - }, + &mut add, )?; } } From 95821d0bdebed6e9762183ddd46cce6d4c453adb Mon Sep 17 00:00:00 2001 From: nnethercott Date: Mon, 26 May 2025 10:07:13 +0200 Subject: [PATCH 041/131] refactor: update macro --- .../update/new/extract/faceted/extract_facets.rs | 16 +++++++--------- .../update/new/extract/faceted/facet_document.rs | 1 - 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index 241e0fd69..de0edc164 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -91,7 +91,8 @@ impl FacetedDocidsExtractor { let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc); let docid = document_change.docid(); - macro_rules! facet_fn_factory { + // Macro expanding to an insertion/deletion facet fn + macro_rules! facet_fn { (del) => { |fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| { Self::facet_fn_with_options( @@ -130,7 +131,7 @@ impl FacetedDocidsExtractor { match document_change { DocumentChange::Deletion(inner) => { - let mut del = facet_fn_factory!(del); + let mut del = facet_fn!(del); extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, @@ -148,7 +149,6 @@ impl FacetedDocidsExtractor { inner.current(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), new_fields_ids_map.deref_mut(), - asc_desc_fields, &mut del, )?; } @@ -173,7 +173,7 @@ impl FacetedDocidsExtractor { if has_changed { // 1. Delete old facet values - let mut del = facet_fn_factory!(del); + let mut del = facet_fn!(del); extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, @@ -191,12 +191,12 @@ impl FacetedDocidsExtractor { inner.current(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), new_fields_ids_map.deref_mut(), - asc_desc_fields, &mut del, )?; } - let mut add = facet_fn_factory!(add); + // 2. Insert new facet values + let mut add = facet_fn!(add); extract_document_facets( inner.merged(rtxn, index, context.db_fields_ids_map)?, @@ -214,14 +214,13 @@ impl FacetedDocidsExtractor { inner.merged(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), new_fields_ids_map.deref_mut(), - asc_desc_fields, &mut add, )?; } } } DocumentChange::Insertion(inner) => { - let mut add = facet_fn_factory!(add); + let mut add = facet_fn!(add); extract_document_facets( inner.inserted(), @@ -238,7 +237,6 @@ impl FacetedDocidsExtractor { inner.inserted(), inner.external_document_id(), new_fields_ids_map.deref_mut(), - asc_desc_fields, &mut add, )?; } diff --git a/crates/milli/src/update/new/extract/faceted/facet_document.rs b/crates/milli/src/update/new/extract/faceted/facet_document.rs index 30a5c462e..68bc98b64 100644 --- a/crates/milli/src/update/new/extract/faceted/facet_document.rs +++ b/crates/milli/src/update/new/extract/faceted/facet_document.rs @@ -108,7 +108,6 @@ pub fn extract_geo_document<'doc>( document: impl Document<'doc>, external_document_id: &str, field_id_map: &mut GlobalFieldsIdsMap, - asc_desc_fields: &HashSet, facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>, ) -> Result<()> { if let Some(geo_value) = document.geo_field()? { From f54b57e5be33258cb8ecd024626bd054f48355c2 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 26 May 2025 13:39:15 +0300 Subject: [PATCH 042/131] Use a Regex in insta dynamic redaction to replace Uuids with [uuid] (cherry picked from commit f8b8c6ab71a28052cf9b271ca8aa5d4175f9e8f9) Signed-off-by: Martin Tzvetanov Grigorov --- Cargo.lock | 2646 +++++++++++---------- crates/meili-snap/Cargo.toml | 1 + crates/meili-snap/src/lib.rs | 21 + crates/meilisearch/src/error.rs | 10 +- crates/meilisearch/tests/search/errors.rs | 2 +- 5 files changed, 1399 insertions(+), 1281 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5b23f7e83..c73af51d9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,11 +4,11 @@ version = 4 [[package]] name = "actix-codec" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "617a8268e3537fe1d8c9ead925fca49ef6400927ee7bc26750e90ecee14ce4b8" +checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.9.1", "bytes", "futures-core", "futures-sink", @@ -21,9 +21,9 @@ dependencies = [ [[package]] name = "actix-cors" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e772b3bcafe335042b5db010ab7c09013dad6eac4915c91d8d50902769f331" +checksum = "daa239b93927be1ff123eebada5a3ff23e89f0124ccb8609234e5103d5a5ae6d" dependencies = [ "actix-utils", "actix-web", @@ -36,27 +36,27 @@ dependencies = [ [[package]] name = "actix-http" -version = "3.9.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d48f96fc3003717aeb9856ca3d02a8c7de502667ad76eeacd830b48d2e91fac4" +checksum = "44dfe5c9e0004c623edc65391dfd51daa201e7e30ebd9c9bedf873048ec32bc2" dependencies = [ "actix-codec", "actix-rt", "actix-service", "actix-tls", "actix-utils", - "ahash 0.8.11", "base64 0.22.1", - "bitflags 2.9.0", - "brotli", + "bitflags 2.9.1", + "brotli 8.0.1", "bytes", "bytestring", "derive_more", "encoding_rs", "flate2", + "foldhash", "futures-core", "h2 0.3.26", - "http 0.2.11", + "http 0.2.12", "httparse", "httpdate", "itoa", @@ -65,7 +65,7 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "rand", + "rand 0.9.1", "sha1", "smallvec", "tokio", @@ -80,7 +80,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -91,7 +91,7 @@ checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8" dependencies = [ "bytestring", "cfg-if", - "http 0.2.11", + "http 0.2.12", "regex-lite", "serde", "tracing", @@ -110,30 +110,28 @@ dependencies = [ [[package]] name = "actix-server" -version = "2.2.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e8613a75dd50cc45f473cee3c34d59ed677c0f7b44480ce3b8247d7dc519327" +checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502" dependencies = [ "actix-rt", "actix-service", "actix-utils", "futures-core", "futures-util", - "mio 0.8.11", - "num_cpus", - "socket2 0.4.9", + "mio", + "socket2", "tokio", "tracing", ] [[package]] name = "actix-service" -version = "2.0.2" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b894941f818cfdc7ccc4b9e60fa7e53b5042a2e8567270f9147d5591893373a" +checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f" dependencies = [ "futures-core", - "paste", "pin-project-lite", ] @@ -168,9 +166,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.9.0" +version = "4.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9180d76e5cc7ccbc4d60a506f2c727730b154010262df5b910eb17dbe4b8cb38" +checksum = "a597b77b5c6d6a1e1097fddde329a83665e25c5437c696a3a9a4aa514a614dea" dependencies = [ "actix-codec", "actix-http", @@ -182,13 +180,13 @@ dependencies = [ "actix-tls", "actix-utils", "actix-web-codegen", - "ahash 0.8.11", "bytes", "bytestring", "cfg-if", "cookie", "derive_more", "encoding_rs", + "foldhash", "futures-core", "futures-util", "impl-more", @@ -203,8 +201,9 @@ dependencies = [ "serde_json", "serde_urlencoded", "smallvec", - "socket2 0.5.5", + "socket2", "time", + "tracing", "url", ] @@ -217,30 +216,30 @@ dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "addr2line" -version = "0.20.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +[[package]] +name = "adler32" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" + [[package]] name = "aes" version = "0.8.4" @@ -258,20 +257,20 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", "const-random", - "getrandom 0.2.15", + "getrandom 0.3.3", "once_cell", "version_check", "zerocopy", @@ -315,15 +314,16 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.13" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is_terminal_polyfill", "utf8parse", ] @@ -335,37 +335,38 @@ checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.1" +version = "3.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628" +checksum = "6680de5231bd6ee4c6191b8a1325daa282b415391ec9d3a37bd34f2060dc73fa" dependencies = [ "anstyle", - "windows-sys 0.48.0", + "once_cell_polyfill", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" dependencies = [ "backtrace", ] @@ -387,9 +388,9 @@ dependencies = [ [[package]] name = "arrayvec" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "arroy" @@ -405,11 +406,11 @@ dependencies = [ "nohash", "ordered-float", "page_size", - "rand", + "rand 0.8.5", "rayon", "roaring", "tempfile", - "thiserror 2.0.9", + "thiserror 2.0.12", "tracing", ] @@ -425,13 +426,13 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.85" +version = "0.1.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -442,23 +443,23 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backtrace" -version = "0.3.68" +version = "0.3.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.2", + "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -491,15 +492,15 @@ dependencies = [ "anyhow", "bumpalo", "bytes", - "convert_case 0.6.0", + "convert_case", "criterion", "csv", "flate2", "memmap2", "milli", "mimalloc", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "reqwest", "roaring", "serde_json", @@ -536,7 +537,7 @@ version = "0.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "cexpr", "clang-sys", "itertools 0.13.0", @@ -545,7 +546,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -582,9 +583,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" dependencies = [ "serde", ] @@ -621,9 +622,9 @@ dependencies = [ [[package]] name = "borsh" -version = "1.5.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" +checksum = "ad8646f98db542e39fc66e68a20b2144f6a732636df7c2354e74645faaa433ce" dependencies = [ "borsh-derive", "cfg_aliases", @@ -631,16 +632,15 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.5.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b" +checksum = "fdd1d3c0c2f5833f22386f252fe8ed005c7f59fdcddeef025c01b4c3b9fd9ac3" dependencies = [ "once_cell", "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", - "syn_derive", + "syn 2.0.101", ] [[package]] @@ -651,14 +651,35 @@ checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", - "brotli-decompressor", + "brotli-decompressor 4.0.3", +] + +[[package]] +name = "brotli" +version = "8.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor 5.0.0", ] [[package]] name = "brotli-decompressor" -version = "4.0.1" +version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" +checksum = "a334ef7c9e23abf0ce748e8cd309037da93e606ad52eb372e4ce327a0dcfbdfd" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -666,9 +687,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.11.3" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" +checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" dependencies = [ "memchr", "regex-automata", @@ -686,9 +707,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" dependencies = [ "allocator-api2", "serde", @@ -703,7 +724,7 @@ dependencies = [ "allocator-api2", "bitpacking", "bumpalo", - "hashbrown 0.15.2", + "hashbrown 0.15.3", "serde", "serde_json", ] @@ -743,15 +764,15 @@ dependencies = [ [[package]] name = "bytecount" -version = "0.6.3" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c" +checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" [[package]] name = "bytemuck" -version = "1.21.0" +version = "1.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" +checksum = "9134a6ef01ce4b366b50689c94f82c14bc72bc5d0386829828a2e2752ef7958c" dependencies = [ "bytemuck_derive", ] @@ -764,7 +785,7 @@ checksum = "7ecc273b49b3205b83d648f0690daa588925572cc5063745bfe547fe7ec8e1a1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -775,15 +796,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "bytestring" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "238e4886760d98c4f899360c834fa93e62cf7f721ac3c2da375cbdf4b8679aae" +checksum = "e465647ae23b2823b0753f50decb2d5a86d2bb2cac04788fafd1f80e45378e5f" dependencies = [ "bytes", ] @@ -809,55 +830,55 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.6" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c" +checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" dependencies = [ "serde", ] [[package]] name = "candle-core" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "855dfedff437d2681d68e1f34ae559d88b0dd84aa5a6b63f2c8e75ebdd875bbf" +checksum = "06ccf5ee3532e66868516d9b315f73aec9f34ea1a37ae98514534d458915dbf1" dependencies = [ "byteorder", "candle-kernels", "cudarc", - "gemm", - "half 2.4.1", + "gemm 0.17.1", + "half", "memmap2", "num-traits", "num_cpus", - "rand", + "rand 0.9.1", "rand_distr", "rayon", "safetensors", "thiserror 1.0.69", "ug", "ug-cuda", - "yoke", + "yoke 0.7.5", "zip 1.1.4", ] [[package]] name = "candle-kernels" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53343628fa470b7075c28c589b98735b4220b464e37ddbb8e117040e199f4787" +checksum = "a10885bd902fad1b8518ba2b22369aaed88a3d94e123533ad3ca73db33b1c8ca" dependencies = [ "bindgen_cuda", ] [[package]] name = "candle-nn" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddd3c6b2ee0dfd64af12ae5b07e4b7c517898981cdaeffcb10b71d7dd5c8f359" +checksum = "be1160c3b63f47d40d91110a3e1e1e566ae38edddbbf492a60b40ffc3bc1ff38" dependencies = [ "candle-core", - "half 2.4.1", + "half", "num-traits", "rayon", "safetensors", @@ -867,16 +888,16 @@ dependencies = [ [[package]] name = "candle-transformers" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4270cc692c4a3df2051c2e8c3c4da3a189746af7ca3a547b99ecd335582b92e1" +checksum = "94a0900d49f8605e0e7e6693a1f560e6271279de98e5fa369e7abf3aac245020" dependencies = [ "byteorder", "candle-core", "candle-nn", "fancy-regex", "num-traits", - "rand", + "rand 0.9.1", "rayon", "serde", "serde_json", @@ -886,25 +907,25 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.6" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ceed8ef69d8518a5dda55c07425450b58a4e1946f4951eab6d7191ee86c2443d" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] [[package]] name = "cargo_metadata" -version = "0.19.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8769706aad5d996120af43197bf46ef6ad0fda35216b4505f926a365a232d924" +checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", "cargo-platform", "semver", "serde", "serde_json", - "thiserror 2.0.9", + "thiserror 2.0.12", ] [[package]] @@ -925,9 +946,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.16" +version = "1.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" +checksum = "16595d3be041c03b09d08d0858631facccee9221e579704070e6e9e4915d3bc7" dependencies = [ "jobserver", "libc", @@ -976,9 +997,9 @@ dependencies = [ [[package]] name = "charabia" -version = "0.9.3" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650d52f87a36472ea1c803dee49d6bfd23d426efa9363e2f4c4a0e6a236d3407" +checksum = "4da3b398d57d5526189869b32ac0b4f7fb436f490f47a2a19685cee634df72d2" dependencies = [ "aho-corasick", "csv", @@ -998,9 +1019,9 @@ dependencies = [ [[package]] name = "ciborium" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" dependencies = [ "ciborium-io", "ciborium-ll", @@ -1009,18 +1030,18 @@ dependencies = [ [[package]] name = "ciborium-io" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" [[package]] name = "ciborium-ll" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" dependencies = [ "ciborium-io", - "half 1.8.2", + "half", ] [[package]] @@ -1035,9 +1056,9 @@ dependencies = [ [[package]] name = "clang-sys" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67523a3b4be3ce1989d607a828d036249522dd9c1c8de7f4dd2dae43a37369d1" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" dependencies = [ "glob", "libc", @@ -1046,9 +1067,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.24" +version = "4.5.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9560b07a799281c7e0958b9296854d6fafd4c5f31444a7e5bb1ad6dde5ccf1bd" +checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000" dependencies = [ "clap_builder", "clap_derive", @@ -1056,9 +1077,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.24" +version = "4.5.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "874e0dd3eb68bf99058751ac9712f622e61e6f393a94f7128fa26e3f02f5c7cd" +checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120" dependencies = [ "anstream", "anstyle", @@ -1068,14 +1089,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.24" +version = "4.5.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" +checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -1086,9 +1107,9 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "color-spantrace" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" +checksum = "2ddd8d5bfda1e11a501d0a7303f3bfed9aa632ebdb859be40d0fd70478ed70d5" dependencies = [ "once_cell", "owo-colors", @@ -1098,9 +1119,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "concat-arrays" @@ -1115,15 +1136,15 @@ dependencies = [ [[package]] name = "console" -version = "0.15.7" +version = "0.15.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" dependencies = [ "encode_unicode", - "lazy_static", "libc", + "once_cell", "unicode-width", - "windows-sys 0.45.0", + "windows-sys 0.59.0", ] [[package]] @@ -1141,22 +1162,16 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "once_cell", "tiny-keccak", ] [[package]] name = "constant_time_eq" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" - -[[package]] -name = "convert_case" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] name = "convert_case" @@ -1185,19 +1200,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] -name = "cpufeatures" -version = "0.2.12" +name = "core2" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crc" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" dependencies = [ "crc-catalog", ] @@ -1264,9 +1288,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -1283,24 +1307,24 @@ dependencies = [ [[package]] name = "crossbeam-queue" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" +checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" [[package]] name = "crypto-common" @@ -1326,20 +1350,20 @@ dependencies = [ [[package]] name = "csv-core" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" +checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d" dependencies = [ "memchr", ] [[package]] name = "cudarc" -version = "0.12.2" +version = "0.13.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cd76de2aa3a7bdb9a65941ea5a3c688d941688f736a81b2fc5beb88747a7f25" +checksum = "486c221362668c63a1636cfa51463b09574433b39029326cff40864b3ba12b6e" dependencies = [ - "half 2.4.1", + "half", "libloading", ] @@ -1355,12 +1379,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core 0.20.10", - "darling_macro 0.20.10", + "darling_core 0.20.11", + "darling_macro 0.20.11", ] [[package]] @@ -1379,16 +1403,16 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -1404,15 +1428,21 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core 0.20.10", + "darling_core 0.20.11", "quote", - "syn 2.0.87", + "syn 2.0.101", ] +[[package]] +name = "dary_heap" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04d2cd9c18b9f454ed67da600630b021a8a80bf33f8c95896ab33aaf1c26b728" + [[package]] name = "deadpool" version = "0.10.0" @@ -1448,9 +1478,9 @@ checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b" [[package]] name = "deranged" -version = "0.3.11" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" dependencies = [ "powerfmt", "serde", @@ -1464,7 +1494,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -1503,10 +1533,10 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" dependencies = [ - "darling 0.20.10", + "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -1526,20 +1556,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core 0.20.2", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "derive_more" -version = "0.99.17" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ - "convert_case 0.4.0", "proc-macro2", "quote", - "rustc_version", - "syn 1.0.109", + "syn 2.0.101", + "unicode-xid", ] [[package]] @@ -1565,10 +1603,10 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aadef696fce456c704f10186def1bdc0a40e646c9f4f18cf091477acadb731d8" dependencies = [ - "convert_case 0.6.0", + "convert_case", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -1632,15 +1670,9 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] -[[package]] -name = "doc-comment" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" - [[package]] name = "doxygen-rs" version = "0.4.2" @@ -1657,7 +1689,7 @@ dependencies = [ "anyhow", "big_s", "flate2", - "http 1.2.0", + "http 1.3.1", "maplit", "meili-snap", "meilisearch-types", @@ -1668,7 +1700,7 @@ dependencies = [ "serde_json", "tar", "tempfile", - "thiserror 2.0.9", + "thiserror 2.0.12", "time", "tracing", "uuid", @@ -1685,19 +1717,28 @@ dependencies = [ ] [[package]] -name = "either" -version = "1.13.0" +name = "dyn-stack" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "490bd48eb68fffcfed519b4edbfd82c69cbe741d175b84f0e0cbe8c57cbe0bdd" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" dependencies = [ "serde", ] [[package]] name = "encode_unicode" -version = "0.3.6" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "encoding" @@ -1765,9 +1806,9 @@ checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" [[package]] name = "encoding_rs" -version = "0.8.33" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] @@ -1783,14 +1824,14 @@ dependencies = [ [[package]] name = "enum-as-inner" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" dependencies = [ - "heck 0.4.1", + "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -1810,23 +1851,23 @@ checksum = "a1ab991c1362ac86c61ab6f556cff143daa22e5a15e4e189df818b2fd19fe65b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1857,21 +1898,21 @@ name = "file-store" version = "1.15.0" dependencies = [ "tempfile", - "thiserror 2.0.9", + "thiserror 2.0.12", "tracing", "uuid", ] [[package]] name = "filetime" -version = "0.2.22" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "libredox", + "windows-sys 0.59.0", ] [[package]] @@ -1886,12 +1927,12 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.35" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" dependencies = [ "crc32fast", - "miniz_oxide 0.8.2", + "miniz_oxide", ] [[package]] @@ -1919,9 +1960,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "form_urlencoded" @@ -2000,7 +2041,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -2063,7 +2104,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce20bbb48248608ba4908b45fe36e17e40f56f8c6bb385ecf5d3c4a1e8b05a22" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "debugid", "fxhash", "serde", @@ -2077,17 +2118,37 @@ version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ab24cc62135b40090e31a76a9b2766a501979f3070fa27f689c27ec04377d32" dependencies = [ - "dyn-stack", - "gemm-c32", - "gemm-c64", - "gemm-common", - "gemm-f16", - "gemm-f32", - "gemm-f64", + "dyn-stack 0.10.0", + "gemm-c32 0.17.1", + "gemm-c64 0.17.1", + "gemm-common 0.17.1", + "gemm-f16 0.17.1", + "gemm-f32 0.17.1", + "gemm-f64 0.17.1", "num-complex", "num-traits", "paste", - "raw-cpuid", + "raw-cpuid 10.7.0", + "seq-macro", +] + +[[package]] +name = "gemm" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab96b703d31950f1aeddded248bc95543c9efc7ac9c4a21fda8703a83ee35451" +dependencies = [ + "dyn-stack 0.13.0", + "gemm-c32 0.18.2", + "gemm-c64 0.18.2", + "gemm-common 0.18.2", + "gemm-f16 0.18.2", + "gemm-f32 0.18.2", + "gemm-f64 0.18.2", + "num-complex", + "num-traits", + "paste", + "raw-cpuid 11.5.0", "seq-macro", ] @@ -2097,12 +2158,27 @@ version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9c030d0b983d1e34a546b86e08f600c11696fde16199f971cd46c12e67512c0" dependencies = [ - "dyn-stack", - "gemm-common", + "dyn-stack 0.10.0", + "gemm-common 0.17.1", "num-complex", "num-traits", "paste", - "raw-cpuid", + "raw-cpuid 10.7.0", + "seq-macro", +] + +[[package]] +name = "gemm-c32" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6db9fd9f40421d00eea9dd0770045a5603b8d684654816637732463f4073847" +dependencies = [ + "dyn-stack 0.13.0", + "gemm-common 0.18.2", + "num-complex", + "num-traits", + "paste", + "raw-cpuid 11.5.0", "seq-macro", ] @@ -2112,12 +2188,27 @@ version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbb5f2e79fefb9693d18e1066a557b4546cd334b226beadc68b11a8f9431852a" dependencies = [ - "dyn-stack", - "gemm-common", + "dyn-stack 0.10.0", + "gemm-common 0.17.1", "num-complex", "num-traits", "paste", - "raw-cpuid", + "raw-cpuid 10.7.0", + "seq-macro", +] + +[[package]] +name = "gemm-c64" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfcad8a3d35a43758330b635d02edad980c1e143dc2f21e6fd25f9e4eada8edf" +dependencies = [ + "dyn-stack 0.13.0", + "gemm-common 0.18.2", + "num-complex", + "num-traits", + "paste", + "raw-cpuid 11.5.0", "seq-macro", ] @@ -2128,17 +2219,38 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2e7ea062c987abcd8db95db917b4ffb4ecdfd0668471d8dc54734fdff2354e8" dependencies = [ "bytemuck", - "dyn-stack", - "half 2.4.1", + "dyn-stack 0.10.0", + "half", "num-complex", "num-traits", "once_cell", "paste", - "pulp", - "raw-cpuid", + "pulp 0.18.22", + "raw-cpuid 10.7.0", "rayon", "seq-macro", - "sysctl", + "sysctl 0.5.5", +] + +[[package]] +name = "gemm-common" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a352d4a69cbe938b9e2a9cb7a3a63b7e72f9349174a2752a558a8a563510d0f3" +dependencies = [ + "bytemuck", + "dyn-stack 0.13.0", + "half", + "libm", + "num-complex", + "num-traits", + "once_cell", + "paste", + "pulp 0.21.5", + "raw-cpuid 11.5.0", + "rayon", + "seq-macro", + "sysctl 0.6.0", ] [[package]] @@ -2147,14 +2259,32 @@ version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ca4c06b9b11952071d317604acb332e924e817bd891bec8dfb494168c7cedd4" dependencies = [ - "dyn-stack", - "gemm-common", - "gemm-f32", - "half 2.4.1", + "dyn-stack 0.10.0", + "gemm-common 0.17.1", + "gemm-f32 0.17.1", + "half", "num-complex", "num-traits", "paste", - "raw-cpuid", + "raw-cpuid 10.7.0", + "rayon", + "seq-macro", +] + +[[package]] +name = "gemm-f16" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff95ae3259432f3c3410eaa919033cd03791d81cebd18018393dc147952e109" +dependencies = [ + "dyn-stack 0.13.0", + "gemm-common 0.18.2", + "gemm-f32 0.18.2", + "half", + "num-complex", + "num-traits", + "paste", + "raw-cpuid 11.5.0", "rayon", "seq-macro", ] @@ -2165,12 +2295,27 @@ version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9a69f51aaefbd9cf12d18faf273d3e982d9d711f60775645ed5c8047b4ae113" dependencies = [ - "dyn-stack", - "gemm-common", + "dyn-stack 0.10.0", + "gemm-common 0.17.1", "num-complex", "num-traits", "paste", - "raw-cpuid", + "raw-cpuid 10.7.0", + "seq-macro", +] + +[[package]] +name = "gemm-f32" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc8d3d4385393304f407392f754cd2dc4b315d05063f62cf09f47b58de276864" +dependencies = [ + "dyn-stack 0.13.0", + "gemm-common 0.18.2", + "num-complex", + "num-traits", + "paste", + "raw-cpuid 11.5.0", "seq-macro", ] @@ -2180,12 +2325,27 @@ version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa397a48544fadf0b81ec8741e5c0fba0043008113f71f2034def1935645d2b0" dependencies = [ - "dyn-stack", - "gemm-common", + "dyn-stack 0.10.0", + "gemm-common 0.17.1", "num-complex", "num-traits", "paste", - "raw-cpuid", + "raw-cpuid 10.7.0", + "seq-macro", +] + +[[package]] +name = "gemm-f64" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35b2a4f76ce4b8b16eadc11ccf2e083252d8237c1b589558a49b0183545015bd" +dependencies = [ + "dyn-stack 0.13.0", + "gemm-common 0.18.2", + "num-complex", + "num-traits", + "paste", + "raw-cpuid 11.5.0", "seq-macro", ] @@ -2207,9 +2367,9 @@ checksum = "36d244a08113319b5ebcabad2b8b7925732d15eec46d7e7ac3c11734f3b7a6ad" [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "js-sys", @@ -2220,31 +2380,31 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.13.3+wasi-0.2.2", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", "wasm-bindgen", - "windows-targets 0.52.6", ] [[package]] name = "gimli" -version = "0.27.3" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "git2" -version = "0.19.0" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" +checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "libc", "libgit2-sys", "log", @@ -2253,9 +2413,9 @@ dependencies = [ [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "grenad" @@ -2281,7 +2441,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.11", + "http 0.2.12", "indexmap", "slab", "tokio", @@ -2291,16 +2451,16 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.5" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" +checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.2.0", + "http 1.3.1", "indexmap", "slab", "tokio", @@ -2310,21 +2470,15 @@ dependencies = [ [[package]] name = "half" -version = "1.8.2" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" - -[[package]] -name = "half" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" dependencies = [ "bytemuck", "cfg-if", "crunchy", "num-traits", - "rand", + "rand 0.9.1", "rand_distr", ] @@ -2348,19 +2502,19 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash 0.8.11", + "ahash 0.8.12", "allocator-api2", ] [[package]] name = "hashbrown" -version = "0.15.2" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" dependencies = [ "allocator-api2", "equivalent", @@ -2378,12 +2532,6 @@ dependencies = [ "stable_deref_trait", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - [[package]] name = "heck" version = "0.5.0" @@ -2396,7 +2544,7 @@ version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a56c94661ddfb51aa9cdfbf102cfcc340aa69267f95ebccc4af08d7c530d393" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "byteorder", "heed-traits", "heed-types", @@ -2435,9 +2583,9 @@ checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hermit-abi" -version = "0.4.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +checksum = "f154ce46856750ed433c8649605bf7ed2de3bc35fd9d2a9f30cddd873c80cb08" [[package]] name = "hex" @@ -2451,10 +2599,10 @@ version = "0.3.2" source = "git+https://github.com/dureuill/hf-hub.git?branch=rust_tls#88d4f11cb9fa079f2912bacb96f5080b16825ce8" dependencies = [ "dirs", - "http 1.2.0", + "http 1.3.1", "indicatif", "log", - "rand", + "rand 0.8.5", "serde", "serde_json", "thiserror 1.0.69", @@ -2472,9 +2620,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -2483,9 +2631,9 @@ dependencies = [ [[package]] name = "http" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" dependencies = [ "bytes", "fnv", @@ -2494,50 +2642,50 @@ dependencies = [ [[package]] name = "http-body" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.2.0", + "http 1.3.1", ] [[package]] name = "http-body-util" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", - "futures-util", - "http 1.2.0", + "futures-core", + "http 1.3.1", "http-body", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.8.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "httpdate" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.4.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.5", - "http 1.2.0", + "h2 0.4.10", + "http 1.3.1", "http-body", "httparse", "httpdate", @@ -2550,12 +2698,11 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.2" +version = "0.27.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +checksum = "03a01595e11bdcec50946522c32dde3fc6914743000a68b93000965f2f02406d" dependencies = [ - "futures-util", - "http 1.2.0", + "http 1.3.1", "hyper", "hyper-util", "rustls", @@ -2563,23 +2710,24 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "webpki-roots", + "webpki-roots 1.0.0", ] [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "cf9f1e950e0d9d1d3c47184416723cf29c0d1f93bd8cccf37e4beb6b44f31710" dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.2.0", + "http 1.3.1", "http-body", "hyper", + "libc", "pin-project-lite", - "socket2 0.5.5", + "socket2", "tokio", "tower-service", "tracing", @@ -2587,21 +2735,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" dependencies = [ "displaydoc", - "yoke", + "potential_utf", + "yoke 0.8.0", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" dependencies = [ "displaydoc", "litemap", @@ -2610,31 +2759,11 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" dependencies = [ "displaydoc", "icu_collections", @@ -2642,67 +2771,54 @@ dependencies = [ "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" dependencies = [ "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "potential_utf", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", + "icu_locale_core", "stable_deref_trait", "tinystr", "writeable", - "yoke", + "yoke 0.8.0", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - [[package]] name = "ident_case" version = "1.0.1" @@ -2722,9 +2838,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -2732,9 +2848,32 @@ dependencies = [ [[package]] name = "impl-more" -version = "0.1.6" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d" +checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" + +[[package]] +name = "include-flate" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df49c16750695486c1f34de05da5b7438096156466e7f76c38fcdf285cf0113e" +dependencies = [ + "include-flate-codegen", + "lazy_static", + "libflate", +] + +[[package]] +name = "include-flate-codegen" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c5b246c6261be723b85c61ecf87804e8ea4a35cb68be0ff282ed84b95ffe7d7" +dependencies = [ + "libflate", + "proc-macro2", + "quote", + "syn 2.0.101", +] [[package]] name = "index-scheduler" @@ -2746,7 +2885,7 @@ dependencies = [ "bumpalo", "bumparaw-collections", "byte-unit", - "convert_case 0.6.0", + "convert_case", "crossbeam-channel", "csv", "derive_builder 0.20.2", @@ -2768,7 +2907,7 @@ dependencies = [ "serde_json", "synchronoise", "tempfile", - "thiserror 2.0.9", + "thiserror 2.0.12", "time", "tracing", "ureq", @@ -2777,33 +2916,33 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.7.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", - "hashbrown 0.15.2", + "hashbrown 0.15.3", "serde", ] [[package]] name = "indicatif" -version = "0.17.7" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" +checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" dependencies = [ "console", - "instant", "number_prefix", "portable-atomic", "unicode-width", + "web-time", ] [[package]] name = "inout" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" dependencies = [ "generic-array", ] @@ -2825,18 +2964,18 @@ dependencies = [ [[package]] name = "instant" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ "cfg-if", ] [[package]] name = "ipnet" -version = "2.8.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "irg-kvariants" @@ -2851,15 +2990,21 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.13" +version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" +checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ - "hermit-abi 0.4.0", + "hermit-abi 0.5.1", "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + [[package]] name = "itertools" version = "0.10.5" @@ -2907,31 +3052,41 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jieba-macros" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c676b32a471d3cfae8dac2ad2f8334cd52e53377733cca8c1fb0a5062fec192" +dependencies = [ + "phf_codegen", +] [[package]] name = "jieba-rs" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e2b0210dc78b49337af9e49d7ae41a39dceac6e5985613f1cf7763e2f76a25" +checksum = "6d1bcad6332969e4d48ee568d430e14ee6dea70740c2549d005d87677ebefb0c" dependencies = [ "cedarwood", - "derive_builder 0.20.2", "fxhash", + "include-flate", + "jieba-macros", "lazy_static", "phf", - "phf_codegen", "regex", ] [[package]] name = "jobserver" -version = "0.1.31" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" dependencies = [ + "getrandom 0.3.3", "libc", ] @@ -2955,11 +3110,11 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "9.3.0" +version = "9.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" +checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "js-sys", "pem", "ring", @@ -2979,9 +3134,9 @@ dependencies = [ [[package]] name = "kstring" -version = "2.0.0" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3066350882a1cd6d950d055997f379ac37fd39f81cd4d8ed186032eb3c5747" +checksum = "558bf9508a558512042d3095138b1f7b8fe90c5467d94f9f1da28b3731c5dbd1" dependencies = [ "serde", "static_assertions", @@ -3010,15 +3165,39 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.171" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" + +[[package]] +name = "libflate" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45d9dfdc14ea4ef0900c1cddbc8dcd553fbaacd8a4a282cf4018ae9dd04fb21e" +dependencies = [ + "adler32", + "core2", + "crc32fast", + "dary_heap", + "libflate_lz77", +] + +[[package]] +name = "libflate_lz77" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e0d73b369f386f1c44abd9c570d5318f55ccde816ff4b562fa452e5182863d" +dependencies = [ + "core2", + "hashbrown 0.14.5", + "rle-decode-fast", +] [[package]] name = "libgit2-sys" -version = "0.17.0+1.8.1" +version = "0.18.1+1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" +checksum = "e1dcb20f84ffcdd825c7a311ae347cce604a6f084a767dec4a4929829645290e" dependencies = [ "cc", "libc", @@ -3028,25 +3207,25 @@ dependencies = [ [[package]] name = "libloading" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +checksum = "6a793df0d7afeac54f95b471d3af7f0d4fb975699f972341a4b76988d49cdf0c" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.53.0", ] [[package]] name = "libm" -version = "0.2.8" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libmimalloc-sys" -version = "0.1.39" +version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" +checksum = "ec9d6fac27761dabcd4ee73571cdb06b7022dc99089acbe5435691edffaac0f4" dependencies = [ "cc", "libc", @@ -3064,10 +3243,21 @@ dependencies = [ ] [[package]] -name = "libz-sys" -version = "1.1.15" +name = "libredox" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037731f5d3aaa87a5675e895b63ddff1a87624bc29f77004ea829809654e48f6" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.9.1", + "libc", + "redox_syscall", +] + +[[package]] +name = "libz-sys" +version = "1.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b70e7a7df205e92a1a4cd9aaae7898dac0aa555503cc0a649494d0d60e7651d" dependencies = [ "cc", "libc", @@ -3077,155 +3267,52 @@ dependencies = [ [[package]] name = "lindera" -version = "0.32.3" +version = "0.42.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "832c220475557e3b44a46cad1862b57f010f0c6e93d771d0e628e08689c068b1" -dependencies = [ - "lindera-analyzer", - "lindera-core", - "lindera-dictionary", - "lindera-filter", - "lindera-tokenizer", -] - -[[package]] -name = "lindera-analyzer" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8e26651714abf5167e6b6a80f5cdaa0cad41c5fcb84d8ba96bebafcb9029339" +checksum = "73b6ee48fa4ffaff0b34a0f56e8fe9e3a9f38ff097d7ffe11a189acac242efbf" dependencies = [ "anyhow", "bincode", "byteorder", - "encoding", + "csv", "kanaria", - "lindera-cc-cedict-builder", - "lindera-core", + "lindera-cc-cedict", "lindera-dictionary", - "lindera-filter", - "lindera-ipadic-builder", - "lindera-ko-dic-builder", - "lindera-tokenizer", - "lindera-unidic-builder", + "lindera-ipadic", + "lindera-ipadic-neologd", + "lindera-ko-dic", + "lindera-unidic", "once_cell", "regex", "serde", "serde_json", - "thiserror 1.0.69", + "serde_yaml", + "strum", + "strum_macros", "unicode-blocks", "unicode-normalization", "unicode-segmentation", "yada", ] -[[package]] -name = "lindera-assets" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebb01f1ca53c1e642234c6c7fdb9ac664ad0c1ab9502f33e4200201bac7e6ce7" -dependencies = [ - "encoding", - "flate2", - "lindera-core", - "tar", - "ureq", -] - [[package]] name = "lindera-cc-cedict" -version = "0.32.3" +version = "0.42.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f7618d9aa947fdd7c38eae2b79f0fd237ecb5067608f1363610ba20d20ab5a8" +checksum = "88fb51b5730fd63b1baf677fb19ce3f3f00616a3fbaf430f923b676dce5fab39" dependencies = [ "bincode", "byteorder", - "lindera-cc-cedict-builder", - "lindera-core", - "lindera-decompress", + "lindera-dictionary", "once_cell", -] - -[[package]] -name = "lindera-cc-cedict-builder" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efdbcb809d81428935d601a78c94bfb39500749213f7320705f427a7a1d31aec" -dependencies = [ - "anyhow", - "lindera-core", - "lindera-decompress", - "lindera-dictionary-builder", -] - -[[package]] -name = "lindera-compress" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac178afa2456dac469d3b1a2d7fbaf3e1ea796a1f52321e8ac29545a53c239c" -dependencies = [ - "anyhow", - "flate2", - "lindera-decompress", -] - -[[package]] -name = "lindera-core" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "649777465f48147ce593ab6db347e235e3af8f693a23f4437be94a1cdbdf5fdf" -dependencies = [ - "anyhow", - "bincode", - "byteorder", - "encoding_rs", - "log", - "once_cell", - "serde", - "thiserror 1.0.69", - "yada", -] - -[[package]] -name = "lindera-decompress" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3faaceb85e43ac250021866c6db3cdc9997b44b3d3ea498594d04edc91fc45" -dependencies = [ - "anyhow", - "flate2", - "serde", + "tokio", ] [[package]] name = "lindera-dictionary" -version = "0.32.3" +version = "0.42.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e15b2d2d8a4ad45f2e373a084931cf3dfbde15f124044e2436bb920af3366c" -dependencies = [ - "anyhow", - "bincode", - "byteorder", - "lindera-cc-cedict", - "lindera-cc-cedict-builder", - "lindera-core", - "lindera-ipadic", - "lindera-ipadic-builder", - "lindera-ipadic-neologd", - "lindera-ipadic-neologd-builder", - "lindera-ko-dic", - "lindera-ko-dic-builder", - "lindera-unidic", - "lindera-unidic-builder", - "serde", - "strum", - "strum_macros", -] - -[[package]] -name = "lindera-dictionary-builder" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59802949110545b59b663917ed3fd55dc3b3a8cde6bd20137d7fe24372cfb9aa" +checksum = "d5dafa44610860d21f66dbfee1ad387fd127824b204137b540ada4c1a744b19c" dependencies = [ "anyhow", "bincode", @@ -3235,157 +3322,70 @@ dependencies = [ "encoding", "encoding_rs", "encoding_rs_io", + "flate2", "glob", - "lindera-compress", - "lindera-core", - "lindera-decompress", "log", - "yada", -] - -[[package]] -name = "lindera-filter" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1320f118c3fc9e897f4ebfc16864e5ef8c0b06ba769c0a50e53f193f9d682bf8" -dependencies = [ - "anyhow", - "csv", - "kanaria", - "lindera-cc-cedict-builder", - "lindera-core", - "lindera-dictionary", - "lindera-ipadic-builder", - "lindera-ko-dic-builder", - "lindera-unidic-builder", + "md5", "once_cell", - "regex", + "rand 0.9.1", + "reqwest", "serde", - "serde_json", - "unicode-blocks", - "unicode-normalization", - "unicode-segmentation", + "tar", + "thiserror 2.0.12", + "tokio", "yada", ] [[package]] name = "lindera-ipadic" -version = "0.32.3" +version = "0.42.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b4731bf3730f1f38266d7ee9bca7d460cd336645c9dfd4e6a1082e58ab1e993" +checksum = "d273907fdf1c14a8244a370afd7ac79126337ad450d25888b1613aee17b1262a" dependencies = [ "bincode", "byteorder", - "lindera-core", - "lindera-decompress", - "lindera-ipadic-builder", + "lindera-dictionary", "once_cell", -] - -[[package]] -name = "lindera-ipadic-builder" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "309966c12e682f67205c3cd3c8dc55bbdcd1eb3b5c7c5cb41fb8acd18906d340" -dependencies = [ - "anyhow", - "lindera-core", - "lindera-decompress", - "lindera-dictionary-builder", + "tokio", ] [[package]] name = "lindera-ipadic-neologd" -version = "0.32.3" +version = "0.42.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e90e919b4cfb9962d24ee1e1d50a7c163bbf356376495ad66d1996e20b9f9e44" +checksum = "9d4371fbd6dc3ac5cc76990ed41061c553635f67953771159e4061d7f568d14f" dependencies = [ "bincode", "byteorder", - "lindera-core", - "lindera-decompress", - "lindera-ipadic-neologd-builder", + "lindera-dictionary", "once_cell", -] - -[[package]] -name = "lindera-ipadic-neologd-builder" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e517df0d501f9f8bf3126da20fc8cb9a5e37921e0eec1824d7a62f096463e02" -dependencies = [ - "anyhow", - "lindera-core", - "lindera-decompress", - "lindera-dictionary-builder", + "tokio", ] [[package]] name = "lindera-ko-dic" -version = "0.32.3" +version = "0.42.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9c6da4e68bc8b452a54b96d65361ebdceb4b6f36ecf262425c0e1f77960ae82" +checksum = "03f35d8e54e6d5f73e9f76da0fedfa336fa60a6d2ac7f7dcc8bcd15e338db291" dependencies = [ "bincode", "byteorder", - "lindera-assets", - "lindera-core", - "lindera-decompress", - "lindera-ko-dic-builder", - "once_cell", -] - -[[package]] -name = "lindera-ko-dic-builder" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc95884cc8f6dfb176caf5991043a4acf94c359215bbd039ea765e00454f271" -dependencies = [ - "anyhow", - "lindera-core", - "lindera-decompress", - "lindera-dictionary-builder", -] - -[[package]] -name = "lindera-tokenizer" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d122042e1232a55c3604692445952a134e523822e9b4b9ab32a53ff890037ad4" -dependencies = [ - "bincode", - "lindera-core", "lindera-dictionary", "once_cell", - "serde", - "serde_json", + "tokio", ] [[package]] name = "lindera-unidic" -version = "0.32.3" +version = "0.42.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbffae1fb2f2614abdcb50f99b138476dbac19862ffa57bfdc9c7b5d5b22a90c" +checksum = "661aa828cf6af7ccd1c0c1142c087fd048af5f83776ccec6af9f9c56448bc626" dependencies = [ "bincode", "byteorder", - "lindera-assets", - "lindera-core", - "lindera-decompress", - "lindera-unidic-builder", + "lindera-dictionary", "once_cell", -] - -[[package]] -name = "lindera-unidic-builder" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe50055327712ebd1bcc74b657cf78c728a78b9586e3f99d5dd0b6a0be221c5d" -dependencies = [ - "anyhow", - "lindera-core", - "lindera-decompress", - "lindera-dictionary-builder", + "tokio", ] [[package]] @@ -3396,17 +3396,22 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "linux-raw-sys" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "liquid" -version = "0.26.9" +version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cdcc72b82748f47c2933c172313f5a9aea5b2c4eb3fa4c66b4ea55bb60bb4b1" +checksum = "2a494c3f9dad3cb7ed16f1c51812cbe4b29493d6c2e5cd1e2b87477263d9534d" dependencies = [ - "doc-comment", "liquid-core", "liquid-derive", "liquid-lib", @@ -3415,15 +3420,14 @@ dependencies = [ [[package]] name = "liquid-core" -version = "0.26.9" +version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2752e978ffc53670f3f2e8b3ef09f348d6f7b5474a3be3f8a5befe5382e4effb" +checksum = "fc623edee8a618b4543e8e8505584f4847a4e51b805db1af6d9af0a3395d0d57" dependencies = [ "anymap2", - "itertools 0.13.0", + "itertools 0.14.0", "kstring", "liquid-derive", - "num-traits", "pest", "pest_derive", "regex", @@ -3433,24 +3437,23 @@ dependencies = [ [[package]] name = "liquid-derive" -version = "0.26.8" +version = "0.26.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b51f1d220e3fa869e24cfd75915efe3164bd09bb11b3165db3f37f57bf673e3" +checksum = "de66c928222984aea59fcaed8ba627f388aaac3c1f57dcb05cc25495ef8faefe" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "liquid-lib" -version = "0.26.9" +version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b1a298d3d2287ee5b1e43840d885b8fdfc37d3f4e90d82aacfd04d021618da" +checksum = "9befeedd61f5995bc128c571db65300aeb50d62e4f0542c88282dbcb5f72372a" dependencies = [ - "itertools 0.13.0", + "itertools 0.14.0", "liquid-core", - "once_cell", "percent-encoding", "regex", "time", @@ -3459,9 +3462,9 @@ dependencies = [ [[package]] name = "litemap" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" [[package]] name = "lmdb-master-sys" @@ -3476,43 +3479,36 @@ dependencies = [ [[package]] name = "local-channel" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f303ec0e94c6c54447f84f3b0ef7af769858a9c4ef56ef2a986d3dcd4c3fc9c" +checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" dependencies = [ "futures-core", "futures-sink", - "futures-util", "local-waker", ] [[package]] name = "local-waker" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e34f76eb3611940e0e7d53a9aaa4e6a3151f69541a282fd0dad5571420c53ff1" +checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" [[package]] name = "lock_api" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", ] -[[package]] -name = "lockfree-object-pool" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e" - [[package]] name = "log" -version = "0.4.26" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "lru" @@ -3520,9 +3516,15 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "227748d55f2f0ab4735d87fd623798cb6b664512fe979705f829c9f81c934465" dependencies = [ - "hashbrown 0.15.2", + "hashbrown 0.15.3", ] +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "lzma-rs" version = "0.3.0" @@ -3569,7 +3571,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -3591,6 +3593,7 @@ dependencies = [ "insta", "md5", "once_cell", + "regex-lite", ] [[package]] @@ -3604,7 +3607,7 @@ dependencies = [ "actix-web", "anyhow", "async-trait", - "brotli", + "brotli 6.0.0", "bstr", "build-info", "byte-unit", @@ -3645,7 +3648,7 @@ dependencies = [ "pin-project-lite", "platform-dirs", "prometheus", - "rand", + "rand 0.8.5", "rayon", "regex", "reqwest", @@ -3659,7 +3662,7 @@ dependencies = [ "serde_urlencoded", "sha-1", "sha2", - "siphasher 1.0.1", + "siphasher", "slice-group-by", "static-files", "sysinfo", @@ -3667,7 +3670,7 @@ dependencies = [ "temp-env", "tempfile", "termcolor", - "thiserror 2.0.9", + "thiserror 2.0.12", "time", "tokio", "toml", @@ -3682,7 +3685,7 @@ dependencies = [ "uuid", "wiremock", "yaup", - "zip 2.3.0", + "zip 2.4.2", ] [[package]] @@ -3694,12 +3697,12 @@ dependencies = [ "hmac", "maplit", "meilisearch-types", - "rand", + "rand 0.8.5", "roaring", "serde", "serde_json", "sha2", - "thiserror 2.0.9", + "thiserror 2.0.12", "time", "uuid", ] @@ -3712,7 +3715,7 @@ dependencies = [ "anyhow", "bumpalo", "bumparaw-collections", - "convert_case 0.6.0", + "convert_case", "csv", "deserr", "either", @@ -3725,13 +3728,13 @@ dependencies = [ "memmap2", "milli", "roaring", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde-cs", "serde_json", "tar", "tempfile", - "thiserror 2.0.9", + "thiserror 2.0.12", "time", "tokio", "utoipa", @@ -3792,7 +3795,7 @@ dependencies = [ "candle-transformers", "charabia", "concat-arrays", - "convert_case 0.6.0", + "convert_case", "crossbeam-channel", "csv", "deserr", @@ -3805,7 +3808,7 @@ dependencies = [ "fxhash", "geoutils", "grenad", - "hashbrown 0.15.2", + "hashbrown 0.15.3", "heed", "hf-hub", "indexmap", @@ -3824,13 +3827,13 @@ dependencies = [ "obkv", "once_cell", "ordered-float", - "rand", + "rand 0.8.5", "rayon", "rayon-par-bridge", "rhai", "roaring", "rstar", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "slice-group-by", @@ -3838,7 +3841,7 @@ dependencies = [ "smallvec", "smartstring", "tempfile", - "thiserror 2.0.9", + "thiserror 2.0.12", "thread_local", "tiktoken-rs", "time", @@ -3853,9 +3856,9 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.43" +version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633" +checksum = "995942f432bbb4822a7e9c3faa87a695185b0d09273ba85f097b54f4e458f2af" dependencies = [ "libmimalloc-sys", ] @@ -3868,9 +3871,9 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" dependencies = [ "mime", "unicase", @@ -3884,50 +3887,30 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.2" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" -dependencies = [ - "adler", -] - -[[package]] -name = "miniz_oxide" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ "adler2", ] [[package]] name = "mio" -version = "0.8.11" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.48.0", -] - -[[package]] -name = "mio" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" -dependencies = [ - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "monostate" -version = "0.1.9" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15f370ae88093ec6b11a710dec51321a61d420fafd1bad6e30d01bd9c920e8ee" +checksum = "aafe1be9d0c75642e3e50fedc7ecadf1ef1cbce6eb66462153fc44245343fbee" dependencies = [ "monostate-impl", "serde", @@ -3935,13 +3918,13 @@ dependencies = [ [[package]] name = "monostate-impl" -version = "0.1.9" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "371717c0a5543d6a800cac822eac735aa7d2d2fbb41002e9856a4089532dbdce" +checksum = "c402a4092d5e204f32c9e155431046831fa712637043c58cb73bc6bc6c9663b5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -4095,23 +4078,23 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" +checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" +checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -4131,9 +4114,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.31.1" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] @@ -4146,17 +4129,23 @@ checksum = "ae4512a8f418ac322335255a72361b9ac927e106f4d7fe6ab4d8ac59cb01f7a9" [[package]] name = "once_cell" -version = "1.21.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde51589ab56b20a6f686b2c68f7a0bd6add753d697abf720d63f8db3ab7b1ad" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" [[package]] name = "onig" -version = "6.4.0" +version = "6.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c4b31c8722ad9171c6d77d3557db078cab2bd50afcc9d09c8b315c59df8ca4f" +checksum = "336b9c63443aceef14bea841b899035ae3abe89b7c486aaf4c5bd8aafedac3f0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.9.1", "libc", "once_cell", "onig_sys", @@ -4164,9 +4153,9 @@ dependencies = [ [[package]] name = "onig_sys" -version = "69.8.1" +version = "69.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b829e3d7e9cc74c7e315ee8edb185bf4190da5acde74afd7fc59c35b1f086e7" +checksum = "c7f86c6eef3d6df15f23bcfb6af487cbd2fed4e5581d58d5bf1f5f8b7f6727dc" dependencies = [ "cc", "pkg-config", @@ -4174,9 +4163,9 @@ dependencies = [ [[package]] name = "oorandom" -version = "11.1.3" +version = "11.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" [[package]] name = "option-ext" @@ -4201,9 +4190,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "owo-colors" -version = "3.5.0" +version = "4.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" +checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec" [[package]] name = "page_size" @@ -4227,22 +4216,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", + "redox_syscall", "smallvec", - "windows-targets 0.48.1", + "windows-targets 0.52.6", ] [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "path-matchers" @@ -4271,11 +4260,11 @@ dependencies = [ [[package]] name = "pem" -version = "3.0.3" +version = "3.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310" +checksum = "38af38e8470ac9dee3ce1bae1af9c1671fffc44ddfd8bd1d0a3445bf349a8ef3" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "serde", ] @@ -4295,19 +4284,20 @@ dependencies = [ [[package]] name = "pest" -version = "2.7.2" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1acb4a4365a13f749a93f1a094a7805e5cfa0955373a9de860d962eaa3a5fe5a" +checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6" dependencies = [ - "thiserror 1.0.69", + "memchr", + "thiserror 2.0.12", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.2" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "666d00490d4ac815001da55838c500eafb0320019bbaa44444137c48b443a853" +checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5" dependencies = [ "pest", "pest_generator", @@ -4315,22 +4305,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.2" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ca01446f50dbda87c1786af8770d535423fa8a53aec03b8f4e3d7eb10e0929" +checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "pest_meta" -version = "2.7.2" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56af0a30af74d0445c0bf6d9d051c979b516a1a5af790d251daee76005420a48" +checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0" dependencies = [ "once_cell", "pest", @@ -4339,9 +4329,9 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ "phf_macros", "phf_shared", @@ -4349,9 +4339,9 @@ dependencies = [ [[package]] name = "phf_codegen" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ "phf_generator", "phf_shared", @@ -4359,54 +4349,54 @@ dependencies = [ [[package]] name = "phf_generator" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ "phf_shared", - "rand", + "rand 0.8.5", ] [[package]] name = "phf_macros" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ "phf_generator", "phf_shared", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ - "siphasher 0.3.11", + "siphasher", ] [[package]] name = "pin-project" -version = "1.1.4" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0302c4a0442c456bd56f841aee5c3bfd17967563f6fadc9ceb9f9c23cf3807e0" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.4" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -4429,9 +4419,9 @@ checksum = "16f2611cd06a1ac239a0cea4521de9eb068a6ca110324ee00631aa68daa74fc0" [[package]] name = "pkg-config" -version = "0.3.30" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "platform-dirs" @@ -4444,9 +4434,9 @@ dependencies = [ [[package]] name = "plotters" -version = "0.3.5" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" dependencies = [ "num-traits", "plotters-backend", @@ -4457,24 +4447,33 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.5" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" [[package]] name = "plotters-svg" -version = "0.3.5" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" dependencies = [ "plotters-backend", ] [[package]] name = "portable-atomic" -version = "1.5.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bccab0e7fd7cc19f820a1c8c91720af652d0c88dc9664dd72aef2614f04af3b" +checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" + +[[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] [[package]] name = "powerfmt" @@ -4484,47 +4483,27 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] [[package]] name = "proc-macro-crate" -version = "3.1.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" dependencies = [ - "toml_edit 0.21.0", -] - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", + "toml_edit", ] [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -4535,10 +4514,10 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc5b72d8145275d844d4b5f6d4e1eef00c8cd889edb6035c21675d1bb1f45c9f" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "hex", "procfs-core", - "rustix", + "rustix 0.38.44", ] [[package]] @@ -4547,7 +4526,7 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "hex", ] @@ -4565,7 +4544,7 @@ dependencies = [ "parking_lot", "procfs", "protobuf", - "thiserror 2.0.9", + "thiserror 2.0.12", ] [[package]] @@ -4610,9 +4589,9 @@ dependencies = [ [[package]] name = "pulp" -version = "0.18.9" +version = "0.18.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03457ac216146f43f921500bac4e892d5cd32b0479b929cbfc90f95cd6c599c2" +checksum = "a0a01a0dc67cf4558d279f0c25b0962bd08fc6dec0137699eae304103e882fe6" dependencies = [ "bytemuck", "libm", @@ -4621,61 +4600,89 @@ dependencies = [ ] [[package]] -name = "quinn" -version = "0.11.2" +name = "pulp" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" +checksum = "96b86df24f0a7ddd5e4b95c94fc9ed8a98f1ca94d3b01bdce2824097e7835907" +dependencies = [ + "bytemuck", + "cfg-if", + "libm", + "num-complex", + "reborrow", + "version_check", +] + +[[package]] +name = "quinn" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" dependencies = [ "bytes", + "cfg_aliases", "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 1.1.0", + "rustc-hash 2.1.1", "rustls", - "thiserror 1.0.69", + "socket2", + "thiserror 2.0.12", "tokio", "tracing", + "web-time", ] [[package]] name = "quinn-proto" -version = "0.11.8" +version = "0.11.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" dependencies = [ "bytes", - "rand", + "getrandom 0.3.3", + "lru-slab", + "rand 0.9.1", "ring", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "rustls", + "rustls-pki-types", "slab", - "thiserror 1.0.69", + "thiserror 2.0.12", "tinyvec", "tracing", + "web-time", ] [[package]] name = "quinn-udp" -version = "0.5.2" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" +checksum = "ee4e529991f949c5e25755532370b8af5d114acae52326361d68d47af64aa842" dependencies = [ + "cfg_aliases", "libc", "once_cell", - "socket2 0.5.5", + "socket2", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "quote" -version = "1.0.36" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "radium" version = "0.7.0" @@ -4689,8 +4696,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", ] [[package]] @@ -4700,7 +4717,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", ] [[package]] @@ -4709,17 +4736,26 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.3", ] [[package]] name = "rand_distr" -version = "0.4.3" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" +checksum = "6a8615d50dcf34fa31f7ab52692afec947c4dd0ab803cc87cb3b0b4570ff7463" dependencies = [ "num-traits", - "rand", + "rand 0.9.1", ] [[package]] @@ -4731,6 +4767,15 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "raw-cpuid" +version = "11.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6df7ab838ed27997ba19a4664507e6f82b41fe6e20be42929332156e5e85146" +dependencies = [ + "bitflags 2.9.1", +] + [[package]] name = "rayon" version = "1.10.0" @@ -4779,30 +4824,21 @@ checksum = "03251193000f4bd3b042892be858ee50e8b3719f2b08e5833ac4353724632430" [[package]] name = "redox_syscall" -version = "0.2.16" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", + "bitflags 2.9.1", ] [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom 0.2.15", - "redox_syscall 0.2.16", + "getrandom 0.2.16", + "libredox", "thiserror 1.0.69", ] @@ -4852,16 +4888,16 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.12" +version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" +checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" dependencies = [ "base64 0.22.1", "bytes", "futures-channel", "futures-core", "futures-util", - "http 1.2.0", + "http 1.3.1", "http-body", "http-body-util", "hyper", @@ -4892,7 +4928,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots", + "webpki-roots 0.26.11", "windows-registry", ] @@ -4901,8 +4937,8 @@ name = "rhai" version = "1.20.0" source = "git+https://github.com/rhaiscript/rhai?rev=ef3df63121d27aacd838f366f2b83fd65f20a1e4#ef3df63121d27aacd838f366f2b83fd65f20a1e4" dependencies = [ - "ahash 0.8.11", - "bitflags 2.9.0", + "ahash 0.8.12", + "bitflags 2.9.1", "instant", "num-traits", "once_cell", @@ -4920,7 +4956,7 @@ source = "git+https://github.com/rhaiscript/rhai?rev=ef3df63121d27aacd838f366f2b dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -4931,7 +4967,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom 0.2.16", "libc", "untrusted", "windows-sys 0.52.0", @@ -4939,9 +4975,9 @@ dependencies = [ [[package]] name = "rkyv" -version = "0.7.44" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0" +checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" dependencies = [ "bitvec", "bytecheck", @@ -4957,9 +4993,9 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.44" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65" +checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" dependencies = [ "proc-macro2", "quote", @@ -4967,10 +5003,16 @@ dependencies = [ ] [[package]] -name = "roaring" -version = "0.10.10" +name = "rle-decode-fast" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a652edd001c53df0b3f96a36a8dc93fce6866988efc16808235653c6bcac8bf2" +checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" + +[[package]] +name = "roaring" +version = "0.10.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19e8d2cfa184d94d0726d650a9f4a1be7f9b76ac9fdb954219878dc00c1c1e7b" dependencies = [ "bytemuck", "byteorder", @@ -4991,15 +5033,15 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.35.0" +version = "1.37.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1790d1c4c0ca81211399e0e0af16333276f375209e71a37b67698a373db5b47a" +checksum = "faa7de2ba56ac291bd90c6b9bece784a52ae1411f9506544b3eae36dd2356d50" dependencies = [ "arrayvec", "borsh", "bytes", "num-traits", - "rand", + "rand 0.8.5", "rkyv", "serde", "serde_json", @@ -5007,9 +5049,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" @@ -5019,37 +5061,41 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] -name = "rustc_version" -version = "0.4.0" +name = "rustix" +version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "semver", + "bitflags 2.9.1", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys 0.59.0", ] [[package]] name = "rustix" -version = "0.38.41" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "errno", "libc", - "linux-raw-sys", - "windows-sys 0.52.0", + "linux-raw-sys 0.9.4", + "windows-sys 0.59.0", ] [[package]] name = "rustls" -version = "0.23.20" +version = "0.23.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" +checksum = "730944ca083c1c233a75c09f199e973ca499344a2b7ba9e755c457e86fb4a321" dependencies = [ "log", "once_cell", @@ -5071,15 +5117,19 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.10.1" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "web-time", + "zeroize", +] [[package]] name = "rustls-webpki" -version = "0.102.8" +version = "0.103.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435" dependencies = [ "ring", "rustls-pki-types", @@ -5088,21 +5138,21 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.19" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" [[package]] name = "ryu" -version = "1.0.17" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "safetensors" -version = "0.4.2" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d980e6bfb34436fb0a81e42bc41af43f11805bbbca443e7f68e9faaabe669ed" +checksum = "44560c11236a6130a46ce36c836a62936dc81ebf8c36a37947423571be0e55b6" dependencies = [ "serde", "serde_json", @@ -5131,32 +5181,32 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" [[package]] name = "segment" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd0f21b6eb87a45a7cce06075a29ccdb42658a6eb84bf40c8fc179479630609" +checksum = "971369158e31ad10bd73b558625f99de39554a2f00c2ff886a6796d950e69664" dependencies = [ "async-trait", "reqwest", "serde", "serde_json", - "thiserror 2.0.9", + "thiserror 2.0.12", "time", ] [[package]] name = "semver" -version = "1.0.18" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" dependencies = [ "serde", ] [[package]] name = "seq-macro" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" +checksum = "1bc711410fbe7399f390ca1c3b60ad0f53f80e95c5eb935e52268a0e2cd49acc" [[package]] name = "serde" @@ -5184,7 +5234,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -5230,6 +5280,19 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + [[package]] name = "sha-1" version = "0.10.1" @@ -5254,9 +5317,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -5280,9 +5343,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.1" +version = "1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" dependencies = [ "libc", ] @@ -5295,34 +5358,28 @@ checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" [[package]] name = "simdutf8" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" [[package]] name = "similar" -version = "2.2.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" [[package]] name = "simple_asn1" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" dependencies = [ "num-bigint", "num-traits", - "thiserror 1.0.69", + "thiserror 2.0.12", "time", ] -[[package]] -name = "siphasher" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" - [[package]] name = "siphasher" version = "1.0.1" @@ -5331,9 +5388,9 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] @@ -5356,9 +5413,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.2" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" dependencies = [ "serde", ] @@ -5377,22 +5434,12 @@ dependencies = [ [[package]] name = "socket2" -version = "0.4.9" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" dependencies = [ "libc", - "winapi", -] - -[[package]] -name = "socket2" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" -dependencies = [ - "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -5464,31 +5511,31 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" -version = "0.26.2" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" +checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.26.2" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946" +checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8" dependencies = [ - "heck 0.4.1", + "heck", "proc-macro2", "quote", "rustversion", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -5503,32 +5550,20 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "syn_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn 2.0.87", -] - [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] @@ -5544,13 +5579,13 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -5559,7 +5594,21 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec7dddc5f0fee506baf8b9fdb989e242f17e4b11c61dfbb0635b705217199eea" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", + "byteorder", + "enum-as-inner", + "libc", + "thiserror 1.0.69", + "walkdir", +] + +[[package]] +name = "sysctl" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01198a2debb237c62b6826ec7081082d951f46dbb64b0e8c7649a452230d1dfc" +dependencies = [ + "bitflags 2.9.1", "byteorder", "enum-as-inner", "libc", @@ -5589,9 +5638,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tar" -version = "0.4.43" +version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c65998313f8e17d0d553d28f91a0df93e4dbbbf770279c7bc21ca0f09ea1a1f6" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" dependencies = [ "filetime", "libc", @@ -5609,16 +5658,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.15.0" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ - "cfg-if", "fastrand", - "getrandom 0.2.15", + "getrandom 0.3.3", "once_cell", - "rustix", - "windows-sys 0.52.0", + "rustix 1.0.7", + "windows-sys 0.59.0", ] [[package]] @@ -5632,9 +5680,9 @@ dependencies = [ [[package]] name = "thin-vec" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b" +checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d" dependencies = [ "serde", ] @@ -5650,11 +5698,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.9" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ - "thiserror-impl 2.0.9", + "thiserror-impl 2.0.12", ] [[package]] @@ -5665,18 +5713,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "thiserror-impl" -version = "2.0.9" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -5707,9 +5755,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.37" +version = "0.3.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" dependencies = [ "deranged", "itoa", @@ -5724,15 +5772,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" [[package]] name = "time-macros" -version = "0.2.19" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" dependencies = [ "num-conv", "time-core", @@ -5749,9 +5797,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" dependencies = [ "displaydoc", "zerovec", @@ -5769,9 +5817,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" dependencies = [ "tinyvec_macros", ] @@ -5790,7 +5838,7 @@ dependencies = [ "aho-corasick", "derive_builder 0.12.0", "esaxx-rs", - "getrandom 0.2.15", + "getrandom 0.2.16", "itertools 0.12.1", "lazy_static", "log", @@ -5798,7 +5846,7 @@ dependencies = [ "monostate", "onig", "paste", - "rand", + "rand 0.8.5", "rayon", "rayon-cond", "regex", @@ -5814,18 +5862,18 @@ dependencies = [ [[package]] name = "tokio" -version = "1.43.1" +version = "1.45.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "492a604e2fd7f814268a378409e6c92b5525d747d10db9a229723f55a417958c" +checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" dependencies = [ "backtrace", "bytes", "libc", - "mio 1.0.3", + "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2", "tokio-macros", "windows-sys 0.52.0", ] @@ -5838,25 +5886,24 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "tokio-rustls" -version = "0.26.0" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ "rustls", - "rustls-pki-types", "tokio", ] [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df" dependencies = [ "bytes", "futures-core", @@ -5867,49 +5914,45 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.22", + "toml_edit", ] [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.21.0" +version = "0.22.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" -dependencies = [ - "indexmap", - "toml_datetime", - "winnow 0.5.40", -] - -[[package]] -name = "toml_edit" -version = "0.22.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.22", + "toml_write", + "winnow", ] +[[package]] +name = "toml_write" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" + [[package]] name = "tower" version = "0.5.2" @@ -5951,9 +5994,9 @@ dependencies = [ [[package]] name = "tracing-actix-web" -version = "0.7.15" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a9f5c1aca50ebebf074ee665b9f99f2e84906dcf6b993a0d0090edb835166d" +checksum = "2340b7722695166c7fc9b3e3cd1166e7c74fedb9075b8f0c74d3822d2e41caf5" dependencies = [ "actix-web", "mutually_exclusive_features", @@ -5970,7 +6013,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -6049,21 +6092,21 @@ dependencies = [ [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "uell" @@ -6076,48 +6119,52 @@ dependencies = [ [[package]] name = "ug" -version = "0.0.2" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4eef2ebfc18c67a6dbcacd9d8a4d85e0568cc58c82515552382312c2730ea13" +checksum = "03719c61a91b51541f076dfdba45caacf750b230cefaa4b32d6f5411c3f7f437" dependencies = [ - "half 2.4.1", + "gemm 0.18.2", + "half", + "libloading", + "memmap2", "num", + "num-traits", + "num_cpus", + "rayon", + "safetensors", "serde", - "serde_json", "thiserror 1.0.69", + "tracing", + "yoke 0.7.5", ] [[package]] name = "ug-cuda" -version = "0.0.2" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c4dcab280ad0ef3957e153a82dcad608c954d02cf253b695322f502d1f8902e" +checksum = "50758486d7941f8b0a636ba7e29455c07071f41590beac1fd307ec893e8db69a" dependencies = [ "cudarc", - "half 2.4.1", + "half", "serde", - "serde_json", "thiserror 1.0.69", "ug", ] [[package]] name = "unescaper" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c878a167baa8afd137494101a688ef8c67125089ff2249284bd2b5f9bfedb815" +checksum = "c01d12e3a56a4432a8b436f293c25f4808bdf9e9f9f98f9260bba1f1bc5a1f26" dependencies = [ - "thiserror 1.0.69", + "thiserror 2.0.12", ] [[package]] name = "unicase" -version = "2.6.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" -dependencies = [ - "version_check", -] +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-blocks" @@ -6127,9 +6174,9 @@ checksum = "6b12e05d9e06373163a9bb6bb8c263c261b396643a99445fe6b9811fd376581b" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-normalization" @@ -6151,15 +6198,21 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" -version = "0.1.11" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "unicode_categories" @@ -6167,6 +6220,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + [[package]] name = "untrusted" version = "0.9.0" @@ -6189,7 +6248,7 @@ dependencies = [ "serde_json", "socks", "url", - "webpki-roots", + "webpki-roots 0.26.11", ] [[package]] @@ -6210,17 +6269,11 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8-width" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" +checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" [[package]] name = "utf8_iter" @@ -6230,9 +6283,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "utoipa" @@ -6255,7 +6308,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.87", + "syn 2.0.101", "uuid", ] @@ -6273,19 +6326,21 @@ dependencies = [ [[package]] name = "uuid" -version = "1.11.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.3.3", + "js-sys", "serde", + "wasm-bindgen", ] [[package]] name = "valuable" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "vcpkg" @@ -6295,9 +6350,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vergen" -version = "9.0.2" +version = "9.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f25fc8f8f05df455c7941e87f093ad22522a9ff33d7a027774815acf6f0639" +checksum = "6b2bf58be11fc9414104c6d3a2e464163db5ef74b12296bda593cac37b6e4777" dependencies = [ "anyhow", "derive_builder 0.20.2", @@ -6307,9 +6362,9 @@ dependencies = [ [[package]] name = "vergen-git2" -version = "1.0.2" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e63e069d8749fead1e3bab7a9d79e8fb90516b2ec66fc2243a798ecdc1a31d7" +checksum = "4f6ee511ec45098eabade8a0750e76eec671e7fb2d9360c563911336bea9cac1" dependencies = [ "anyhow", "derive_builder 0.20.2", @@ -6322,9 +6377,9 @@ dependencies = [ [[package]] name = "vergen-lib" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0c767e6751c09fc85cde58722cf2f1007e80e4c8d5a4321fc90d83dc54ca147" +checksum = "9b07e6010c0f3e59fcb164e0163834597da68d1f864e2b8ca49f74de01e9c166" dependencies = [ "anyhow", "derive_builder 0.20.2", @@ -6333,9 +6388,9 @@ dependencies = [ [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" @@ -6375,9 +6430,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" -version = "0.13.3+wasi-0.2.2" +version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ "wit-bindgen-rt", ] @@ -6404,18 +6459,19 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.37" +version = "0.4.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] @@ -6438,7 +6494,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -6454,9 +6510,9 @@ dependencies = [ [[package]] name = "wasm-streams" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" dependencies = [ "futures-util", "js-sys", @@ -6467,9 +6523,19 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.64" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", "wasm-bindgen", @@ -6477,9 +6543,18 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.1" +version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3de34ae270483955a94f4b21bdaaeb83d508bb84a01435f393818edb0012009" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.0", +] + +[[package]] +name = "webpki-roots" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2853738d1cc4f2da3a225c18ec6c3721abb31961096e9dbf5ab35fa88b19cfdb" dependencies = [ "rustls-pki-types", ] @@ -6490,7 +6565,7 @@ version = "0.16.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "471d1c1645d361eb782a1650b1786a8fb58dd625e681a04c09f5ff7c8764a7b0" dependencies = [ - "hashbrown 0.14.3", + "hashbrown 0.14.5", "once_cell", ] @@ -6512,11 +6587,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "winapi", + "windows-sys 0.59.0", ] [[package]] @@ -6555,7 +6630,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -6566,18 +6641,24 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] -name = "windows-registry" -version = "0.2.0" +name = "windows-link" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" + +[[package]] +name = "windows-registry" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" dependencies = [ - "windows-result 0.2.0", + "windows-result 0.3.4", "windows-strings", - "windows-targets 0.52.6", + "windows-targets 0.53.0", ] [[package]] @@ -6591,30 +6672,20 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.2.0" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ - "windows-targets 0.52.6", + "windows-link", ] [[package]] name = "windows-strings" -version = "0.1.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" dependencies = [ - "windows-result 0.2.0", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", + "windows-link", ] [[package]] @@ -6623,7 +6694,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -6636,33 +6707,27 @@ dependencies = [ ] [[package]] -name = "windows-targets" -version = "0.42.2" +name = "windows-sys" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows-targets 0.52.6", ] [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] [[package]] @@ -6674,7 +6739,7 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", + "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", @@ -6682,16 +6747,26 @@ dependencies = [ ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" +name = "windows-targets" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" @@ -6700,16 +6775,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" +name = "windows_aarch64_gnullvm" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" @@ -6718,16 +6793,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] -name = "windows_i686_gnu" -version = "0.42.2" +name = "windows_aarch64_msvc" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" @@ -6735,6 +6810,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + [[package]] name = "windows_i686_gnullvm" version = "0.52.6" @@ -6742,16 +6823,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] -name = "windows_i686_msvc" -version = "0.42.2" +name = "windows_i686_gnullvm" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" @@ -6760,16 +6841,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" +name = "windows_i686_msvc" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" @@ -6778,16 +6859,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" +name = "windows_x86_64_gnu" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" @@ -6796,16 +6877,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" +name = "windows_x86_64_gnullvm" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" @@ -6814,35 +6895,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] -name = "winnow" -version = "0.5.40" +name = "windows_x86_64_msvc" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" -dependencies = [ - "memchr", -] +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.6.22" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39281189af81c07ec09db316b302a3e67bf9bd7cbf6c820b50e35fee9c2fa980" +checksum = "c06928c8748d81b05c9be96aad92e1b6ff01833332f281e8cfca3be4b35fc9ec" dependencies = [ "memchr", ] [[package]] name = "wiremock" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fff469918e7ca034884c7fd8f93fe27bacb7fcb599fd879df6c7b429a29b646" +checksum = "101681b74cd87b5899e87bcf5a64e83334dd313fcd3053ea72e6dba18928e301" dependencies = [ "assert-json-diff", "async-trait", "base64 0.22.1", "deadpool", "futures", - "http 1.2.0", + "http 1.3.1", "http-body-util", "hyper", "hyper-util", @@ -6857,24 +6935,18 @@ dependencies = [ [[package]] name = "wit-bindgen-rt" -version = "0.33.0" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "wyz" @@ -6887,13 +6959,12 @@ dependencies = [ [[package]] name = "xattr" -version = "1.3.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" +checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" dependencies = [ "libc", - "linux-raw-sys", - "rustix", + "rustix 1.0.7", ] [[package]] @@ -6953,7 +7024,19 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", - "yoke-derive", + "yoke-derive 0.7.5", + "zerofrom", +] + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive 0.8.0", "zerofrom", ] @@ -6965,48 +7048,60 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", + "synstructure", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", "synstructure", ] [[package]] name = "zerocopy" -version = "0.7.32" +version = "0.8.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.32" +version = "0.8.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] name = "zerofrom" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655b0814c5c0b19ade497851070c640773304939a6c0fd5f5fb43da0696d05b7" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6a647510471d372f2e6c2e6b7219e44d8c574d24fdc11c610a61455782f18c3" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", "synstructure", ] @@ -7027,29 +7122,40 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", +] + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke 0.8.0", + "zerofrom", ] [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" dependencies = [ - "yoke", + "yoke 0.8.0", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.101", ] [[package]] @@ -7069,9 +7175,9 @@ dependencies = [ [[package]] name = "zip" -version = "2.3.0" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84e9a772a54b54236b9b744aaaf8d7be01b4d6e99725523cb82cb32d1c81b1d7" +checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50" dependencies = [ "aes", "arbitrary", @@ -7082,14 +7188,14 @@ dependencies = [ "deflate64", "displaydoc", "flate2", - "getrandom 0.3.1", + "getrandom 0.3.3", "hmac", "indexmap", "lzma-rs", "memchr", "pbkdf2", "sha1", - "thiserror 2.0.9", + "thiserror 2.0.12", "time", "xz2", "zeroize", @@ -7099,41 +7205,39 @@ dependencies = [ [[package]] name = "zopfli" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5019f391bac5cf252e93bbcc53d039ffd62c7bfb7c150414d61369afe57e946" +checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7" dependencies = [ "bumpalo", "crc32fast", - "lockfree-object-pool", "log", - "once_cell", "simd-adler32", ] [[package]] name = "zstd" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "7.2.0" +version = "7.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa556e971e7b568dc775c136fc9de8c779b1c2fc3a63defaafadffdbd3181afa" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.10+zstd.1.5.6" +version = "2.0.15+zstd.1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" +checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" dependencies = [ "cc", "pkg-config", diff --git a/crates/meili-snap/Cargo.toml b/crates/meili-snap/Cargo.toml index 0c48ff824..b79960fdb 100644 --- a/crates/meili-snap/Cargo.toml +++ b/crates/meili-snap/Cargo.toml @@ -15,3 +15,4 @@ license.workspace = true insta = { version = "=1.39.0", features = ["json", "redactions"] } md5 = "0.7.0" once_cell = "1.20" +regex-lite = "0.1.6" diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index c467aef49..e8e605fec 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -4,9 +4,17 @@ use std::path::{Path, PathBuf}; use std::sync::Mutex; pub use insta; +use insta::internals::{Content, ContentPath}; use once_cell::sync::Lazy; +use regex_lite::{Regex, RegexBuilder}; static SNAPSHOT_NAMES: Lazy>> = Lazy::new(Mutex::default); +/// A regex to match UUIDs in messages, specifically looking for the UUID v4 format +static UUID_IN_MESSAGE_RE: Lazy = Lazy::new(|| { + RegexBuilder::new(r"(?.*)([0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12})(?.*)") + .case_insensitive(true) + .build().unwrap() +}); /// Return the md5 hash of the given string pub fn hash_snapshot(snap: &str) -> String { @@ -26,6 +34,19 @@ pub fn default_snapshot_settings_for_test<'a>( let filename = path.file_name().unwrap().to_str().unwrap(); settings.set_omit_expression(true); + fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content { + match &content { + Content::String(s) => { + let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "$before[uuid]$after"); + Content::String(uuid_replaced.to_string()) + } + _ => content, + } + } + + settings.add_dynamic_redaction(".message", uuid_in_message_redaction); + settings.add_dynamic_redaction(".error.message", uuid_in_message_redaction); + let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name); let test_name = test_name.rsplit("::").next().unwrap().to_owned(); diff --git a/crates/meilisearch/src/error.rs b/crates/meilisearch/src/error.rs index 13800adc1..b13eb8d7c 100644 --- a/crates/meilisearch/src/error.rs +++ b/crates/meilisearch/src/error.rs @@ -64,7 +64,7 @@ pub enum MeilisearchHttpError { #[error(transparent)] IndexScheduler(#[from] index_scheduler::Error), #[error("{}", match .index_name { - Some(name) if !name.is_empty() => format!("Index `{}`: {error}", MeilisearchHttpError::index_name(name)), + Some(name) if !name.is_empty() => format!("Index `{}`: {error}", name), _ => format!("{error}") })] Milli { error: milli::Error, index_name: Option }, @@ -84,14 +84,6 @@ impl MeilisearchHttpError { pub(crate) fn from_milli(error: milli::Error, index_name: Option) -> Self { Self::Milli { error, index_name } } - - fn index_name(index_name: &str) -> &str { - if let Ok(_) = uuid::Uuid::parse_str(index_name) { - "[uuid]" - } else { - index_name - } - } } impl ErrorCode for MeilisearchHttpError { diff --git a/crates/meilisearch/tests/search/errors.rs b/crates/meilisearch/tests/search/errors.rs index bce03f56b..d76a0963d 100644 --- a/crates/meilisearch/tests/search/errors.rs +++ b/crates/meilisearch/tests/search/errors.rs @@ -708,7 +708,7 @@ async fn filter_invalid_attribute_array() { |response, code| { snapshot!(response, @r###" { - "message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass", + "message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" From d2948adea33f39485e1692e908378b0dc21772c0 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 26 May 2025 14:31:58 +0300 Subject: [PATCH 043/131] Migrate more tests to assert with "[uuid]" instead of real Uuid Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/auth/api_keys.rs | 2 +- crates/meilisearch/tests/search/errors.rs | 16 ++++++++-------- crates/meilisearch/tests/search/filters.rs | 4 ++-- crates/meilisearch/tests/search/mod.rs | 4 ++-- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/crates/meilisearch/tests/auth/api_keys.rs b/crates/meilisearch/tests/auth/api_keys.rs index 0aea7d722..7245c73a4 100644 --- a/crates/meilisearch/tests/auth/api_keys.rs +++ b/crates/meilisearch/tests/auth/api_keys.rs @@ -538,7 +538,7 @@ async fn error_add_api_key_parameters_uid_already_exist() { let (response, code) = server.add_api_key(content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.", + "message": "`uid` field value `[uuid]` is already an existing API key.", "code": "api_key_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#api_key_already_exists" diff --git a/crates/meilisearch/tests/search/errors.rs b/crates/meilisearch/tests/search/errors.rs index d76a0963d..5e54bcff1 100644 --- a/crates/meilisearch/tests/search/errors.rs +++ b/crates/meilisearch/tests/search/errors.rs @@ -886,7 +886,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -912,7 +912,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -933,7 +933,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -959,7 +959,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -985,7 +985,7 @@ async fn search_with_pattern_filter_settings_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r#" { - "message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -1144,7 +1144,7 @@ async fn search_on_unknown_field() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" @@ -1165,7 +1165,7 @@ async fn search_on_unknown_field_plus_joker() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" @@ -1183,7 +1183,7 @@ async fn search_on_unknown_field_plus_joker() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" diff --git a/crates/meilisearch/tests/search/filters.rs b/crates/meilisearch/tests/search/filters.rs index 4219d2ec1..49409efa6 100644 --- a/crates/meilisearch/tests/search/filters.rs +++ b/crates/meilisearch/tests/search/filters.rs @@ -720,7 +720,7 @@ async fn test_filterable_attributes_priority() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2", + "message": "Index `[uuid]`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -746,7 +746,7 @@ async fn test_filterable_attributes_priority() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS", + "message": "Index `[uuid]`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs index 2cc882c71..bfa23054a 100644 --- a/crates/meilisearch/tests/search/mod.rs +++ b/crates/meilisearch/tests/search/mod.rs @@ -1741,7 +1741,7 @@ async fn test_nested_fields() { assert_eq!(code, 400, "{response}"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = array", + "message": "Index `[uuid]`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = array", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -1760,7 +1760,7 @@ async fn test_nested_fields() { assert_eq!(code, 400, "{response}"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = \"I lied\"", + "message": "Index `[uuid]`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = \"I lied\"", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" From 6738a4f6ee4cad6fc341286e1fdd55898134df9e Mon Sep 17 00:00:00 2001 From: nnethercott Date: Mon, 26 May 2025 10:08:07 +0200 Subject: [PATCH 044/131] feat: mettre a jour the insta snapshots --- .../meilisearch/tests/documents/add_documents.rs | 16 ++++++++-------- .../update/new/extract/faceted/extract_facets.rs | 5 +---- .../update/new/extract/faceted/facet_document.rs | 1 - 3 files changed, 9 insertions(+), 13 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 6569bb9a5..39ad57750 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -2039,6 +2039,14 @@ async fn update_documents_with_geo_field() { @r###" { "hits": [ + { + "id": "4", + "_geo": { + "lat": "4", + "lng": "0" + }, + "_geoDistance": 667170 + }, { "id": "3", "_geo": { @@ -2048,14 +2056,6 @@ async fn update_documents_with_geo_field() { "doggo": "kefir", "_geoDistance": 555975 }, - { - "id": "4", - "_geo": { - "lat": "4", - "lng": "0" - }, - "_geoDistance": 667170 - }, { "id": "1" }, diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index de0edc164..3086d25e4 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -135,7 +135,6 @@ impl FacetedDocidsExtractor { extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), new_fields_ids_map.deref_mut(), filterable_attributes, sortable_fields, @@ -177,7 +176,6 @@ impl FacetedDocidsExtractor { extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), new_fields_ids_map.deref_mut(), filterable_attributes, sortable_fields, @@ -200,7 +198,6 @@ impl FacetedDocidsExtractor { extract_document_facets( inner.merged(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), new_fields_ids_map.deref_mut(), filterable_attributes, sortable_fields, @@ -224,7 +221,6 @@ impl FacetedDocidsExtractor { extract_document_facets( inner.inserted(), - inner.external_document_id(), new_fields_ids_map.deref_mut(), filterable_attributes, sortable_fields, @@ -232,6 +228,7 @@ impl FacetedDocidsExtractor { distinct_field, &mut add, )?; + if is_geo_enabled { extract_geo_document( inner.inserted(), diff --git a/crates/milli/src/update/new/extract/faceted/facet_document.rs b/crates/milli/src/update/new/extract/faceted/facet_document.rs index 68bc98b64..359c32e58 100644 --- a/crates/milli/src/update/new/extract/faceted/facet_document.rs +++ b/crates/milli/src/update/new/extract/faceted/facet_document.rs @@ -16,7 +16,6 @@ use crate::filterable_attributes_rules::match_faceted_field; #[allow(clippy::too_many_arguments)] pub fn extract_document_facets<'doc>( document: impl Document<'doc>, - external_document_id: &str, field_id_map: &mut GlobalFieldsIdsMap, filterable_attributes: &[FilterableAttributesRule], sortable_fields: &HashSet, From 18aed75d3b0221222c97bc445101c8f069918ac1 Mon Sep 17 00:00:00 2001 From: nnethercott Date: Mon, 26 May 2025 18:20:55 +0200 Subject: [PATCH 045/131] fix logic --- .../tests/documents/add_documents.rs | 16 +++---- .../new/extract/faceted/extract_facets.rs | 46 +++++++++---------- 2 files changed, 31 insertions(+), 31 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 39ad57750..6569bb9a5 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -2039,14 +2039,6 @@ async fn update_documents_with_geo_field() { @r###" { "hits": [ - { - "id": "4", - "_geo": { - "lat": "4", - "lng": "0" - }, - "_geoDistance": 667170 - }, { "id": "3", "_geo": { @@ -2056,6 +2048,14 @@ async fn update_documents_with_geo_field() { "doggo": "kefir", "_geoDistance": 555975 }, + { + "id": "4", + "_geo": { + "lat": "4", + "lng": "0" + }, + "_geoDistance": 667170 + }, { "id": "1" }, diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index 3086d25e4..861c67bbe 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -170,10 +170,10 @@ impl FacetedDocidsExtractor { let has_changed_for_geo_fields = inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?; - if has_changed { - // 1. Delete old facet values - let mut del = facet_fn!(del); + // 1. Delete old facet values + let mut del = facet_fn!(del); + if has_changed { extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, new_fields_ids_map.deref_mut(), @@ -183,19 +183,20 @@ impl FacetedDocidsExtractor { distinct_field, &mut del, )?; + } + if is_geo_enabled && has_changed_for_geo_fields { + extract_geo_document( + inner.current(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + &mut del, + )?; + } - if is_geo_enabled && has_changed_for_geo_fields { - extract_geo_document( - inner.current(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - &mut del, - )?; - } - - // 2. Insert new facet values - let mut add = facet_fn!(add); + // 2. Insert new facet values + let mut add = facet_fn!(add); + if has_changed { extract_document_facets( inner.merged(rtxn, index, context.db_fields_ids_map)?, new_fields_ids_map.deref_mut(), @@ -205,15 +206,14 @@ impl FacetedDocidsExtractor { distinct_field, &mut add, )?; - - if is_geo_enabled && has_changed_for_geo_fields { - extract_geo_document( - inner.merged(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - &mut add, - )?; - } + } + if is_geo_enabled && has_changed_for_geo_fields { + extract_geo_document( + inner.merged(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + &mut add, + )?; } } DocumentChange::Insertion(inner) => { From c9ec502ed9e37ca970f9a0954780003696452913 Mon Sep 17 00:00:00 2001 From: nnethercott Date: Mon, 26 May 2025 18:32:59 +0200 Subject: [PATCH 046/131] refactor for readability --- .../new/extract/faceted/extract_facets.rs | 31 +++++++------------ 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index 861c67bbe..2640ac462 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -170,9 +170,7 @@ impl FacetedDocidsExtractor { let has_changed_for_geo_fields = inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?; - // 1. Delete old facet values - let mut del = facet_fn!(del); - + // 1. Maybe update doc if has_changed { extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, @@ -181,22 +179,9 @@ impl FacetedDocidsExtractor { sortable_fields, asc_desc_fields, distinct_field, - &mut del, + &mut facet_fn!(del), )?; - } - if is_geo_enabled && has_changed_for_geo_fields { - extract_geo_document( - inner.current(rtxn, index, context.db_fields_ids_map)?, - inner.external_document_id(), - new_fields_ids_map.deref_mut(), - &mut del, - )?; - } - // 2. Insert new facet values - let mut add = facet_fn!(add); - - if has_changed { extract_document_facets( inner.merged(rtxn, index, context.db_fields_ids_map)?, new_fields_ids_map.deref_mut(), @@ -204,15 +189,23 @@ impl FacetedDocidsExtractor { sortable_fields, asc_desc_fields, distinct_field, - &mut add, + &mut facet_fn!(add), )?; } + + // 2. Maybe update geo if is_geo_enabled && has_changed_for_geo_fields { + extract_geo_document( + inner.current(rtxn, index, context.db_fields_ids_map)?, + inner.external_document_id(), + new_fields_ids_map.deref_mut(), + &mut facet_fn!(del), + )?; extract_geo_document( inner.merged(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), new_fields_ids_map.deref_mut(), - &mut add, + &mut facet_fn!(add), )?; } } From 9ad43b6841aa4765b7f31645631c69dce565d612 Mon Sep 17 00:00:00 2001 From: nnethercott Date: Mon, 26 May 2025 18:37:20 +0200 Subject: [PATCH 047/131] rename has_changed to has_changed_for_facets --- crates/milli/src/update/new/extract/faceted/extract_facets.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index 2640ac462..a1d9e6553 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -153,7 +153,7 @@ impl FacetedDocidsExtractor { } } DocumentChange::Update(inner) => { - let has_changed = inner.has_changed_for_fields( + let has_changed_for_facets = inner.has_changed_for_fields( &mut |field_name| { match_faceted_field( field_name, @@ -171,7 +171,7 @@ impl FacetedDocidsExtractor { inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?; // 1. Maybe update doc - if has_changed { + if has_changed_for_facets { extract_document_facets( inner.current(rtxn, index, context.db_fields_ids_map)?, new_fields_ids_map.deref_mut(), From 46ff78b4ec5e6f5812490c3399b4183545f3cb2d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 27 May 2025 11:47:02 +0300 Subject: [PATCH 048/131] Update the regex to replace all occurrences of uuids in the redaction Signed-off-by: Martin Tzvetanov Grigorov --- Cargo.lock | 1 + crates/meili-snap/Cargo.toml | 3 +++ crates/meili-snap/src/lib.rs | 21 ++++++++++++++++----- 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c73af51d9..dc2aa5af4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3594,6 +3594,7 @@ dependencies = [ "md5", "once_cell", "regex-lite", + "uuid", ] [[package]] diff --git a/crates/meili-snap/Cargo.toml b/crates/meili-snap/Cargo.toml index b79960fdb..9dba56256 100644 --- a/crates/meili-snap/Cargo.toml +++ b/crates/meili-snap/Cargo.toml @@ -16,3 +16,6 @@ insta = { version = "=1.39.0", features = ["json", "redactions"] } md5 = "0.7.0" once_cell = "1.20" regex-lite = "0.1.6" + +[dev-dependencies] +uuid = { version = "1.17.0", features = ["v4"] } diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index e8e605fec..975b0d47c 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -6,14 +6,12 @@ use std::sync::Mutex; pub use insta; use insta::internals::{Content, ContentPath}; use once_cell::sync::Lazy; -use regex_lite::{Regex, RegexBuilder}; +use regex_lite::Regex; static SNAPSHOT_NAMES: Lazy>> = Lazy::new(Mutex::default); /// A regex to match UUIDs in messages, specifically looking for the UUID v4 format static UUID_IN_MESSAGE_RE: Lazy = Lazy::new(|| { - RegexBuilder::new(r"(?.*)([0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12})(?.*)") - .case_insensitive(true) - .build().unwrap() + Regex::new(r"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}").unwrap() }); /// Return the md5 hash of the given string @@ -37,7 +35,7 @@ pub fn default_snapshot_settings_for_test<'a>( fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content { match &content { Content::String(s) => { - let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "$before[uuid]$after"); + let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "[uuid]"); Content::String(uuid_replaced.to_string()) } _ => content, @@ -252,7 +250,10 @@ macro_rules! json_string { #[cfg(test)] mod tests { + use uuid::Uuid; use crate as meili_snap; + use crate::UUID_IN_MESSAGE_RE; + #[test] fn snap() { snapshot_hash!(10, @"d3d9446802a44259755d38e6d163e820"); @@ -300,4 +301,14 @@ mod tests { // snapshot_hash!("", name: "", @"d41d8cd98f00b204e9800998ecf8427e"); } } + + #[test] + fn uuid_in_message_regex() { + let uuid1 = Uuid::new_v4(); + let uuid2 = Uuid::new_v4(); + let uuid3 = Uuid::new_v4(); + let to_replace = format!("1 {uuid1} 2 {uuid2} 3 {uuid3} 4"); + let replaced = UUID_IN_MESSAGE_RE.replace_all(to_replace.as_str(), "[uuid]"); + assert_eq!(replaced, "1 [uuid] 2 [uuid] 3 [uuid] 4"); + } } From 4897ad99d06337791071cc4727be186a7c40b365 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 27 May 2025 14:26:29 +0300 Subject: [PATCH 049/131] Wait for the add_documents task Format the code Signed-off-by: Martin Tzvetanov Grigorov --- crates/meili-snap/src/lib.rs | 5 +++-- crates/meilisearch/tests/search/mod.rs | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 975b0d47c..688c87494 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -11,7 +11,8 @@ use regex_lite::Regex; static SNAPSHOT_NAMES: Lazy>> = Lazy::new(Mutex::default); /// A regex to match UUIDs in messages, specifically looking for the UUID v4 format static UUID_IN_MESSAGE_RE: Lazy = Lazy::new(|| { - Regex::new(r"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}").unwrap() + Regex::new(r"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}") + .unwrap() }); /// Return the md5 hash of the given string @@ -250,9 +251,9 @@ macro_rules! json_string { #[cfg(test)] mod tests { - use uuid::Uuid; use crate as meili_snap; use crate::UUID_IN_MESSAGE_RE; + use uuid::Uuid; #[test] fn snap() { diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs index bfa23054a..53e6415c5 100644 --- a/crates/meilisearch/tests/search/mod.rs +++ b/crates/meilisearch/tests/search/mod.rs @@ -130,8 +130,8 @@ async fn search_with_stop_word() { snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); - index.add_documents(documents, None).await; - index.wait_task(1).await; + let (task, code) = index.add_documents(documents, None).await; + index.wait_task(task.uid()).await.succeeded(); // prefix search index From 48cad4132ad368197ec353e5cbf173ad5981375c Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 27 May 2025 16:44:57 +0300 Subject: [PATCH 050/131] Fix clippy - ignore `code` variable Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs index 53e6415c5..be476da35 100644 --- a/crates/meilisearch/tests/search/mod.rs +++ b/crates/meilisearch/tests/search/mod.rs @@ -130,7 +130,7 @@ async fn search_with_stop_word() { snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); - let (task, code) = index.add_documents(documents, None).await; + let (task, _code) = index.add_documents(documents, None).await; index.wait_task(task.uid()).await.succeeded(); // prefix search From 38b1c57fa80560685ff422e44196d05621c6afcd Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 May 2025 16:03:44 +0300 Subject: [PATCH 051/131] Faster IT tests for add_documents.rs Use Shared server where possible Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/documents/add_documents.rs | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 6569bb9a5..2c8925833 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -18,8 +18,8 @@ async fn add_documents_test_json_content_types() { } ]); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -75,8 +75,8 @@ async fn add_single_document_test_json_content_types() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -132,8 +132,8 @@ async fn add_single_document_gzip_encoded() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post let document = serde_json::to_string(&document).unwrap(); @@ -187,8 +187,8 @@ async fn add_single_document_gzip_encoded() { async fn add_single_document_gzip_encoded_with_incomplete_error() { let document = json!("kefir"); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post let document = serde_json::to_string(&document).unwrap(); @@ -244,8 +244,8 @@ async fn add_single_document_with_every_encoding() { "content": "Bouvier Bernois", }); - // this is a what is expected and should work - let server = Server::new().await; + // this is what is expected and should work + let server = Server::new_shared(); let app = server.init_web_app().await; // post let document = serde_json::to_string(&document).unwrap(); @@ -518,7 +518,7 @@ async fn error_add_documents_test_bad_content_types() { } ]); - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -574,7 +574,7 @@ async fn error_add_documents_test_no_content_type() { } ]); - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -622,7 +622,7 @@ async fn error_add_documents_test_no_content_type() { async fn error_add_malformed_csv_documents() { let document = "id, content\n1234, hello, world\n12, hello world"; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -672,7 +672,7 @@ async fn error_add_malformed_csv_documents() { async fn error_add_malformed_json_documents() { let document = r#"[{"id": 1}, {id: 2}]"#; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -768,7 +768,7 @@ async fn error_add_malformed_json_documents() { async fn error_add_malformed_ndjson_documents() { let document = "{\"id\": 1}\n{id: 2}"; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -818,7 +818,7 @@ async fn error_add_malformed_ndjson_documents() { async fn error_add_missing_payload_csv_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -868,7 +868,7 @@ async fn error_add_missing_payload_csv_documents() { async fn error_add_missing_payload_json_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post @@ -918,7 +918,7 @@ async fn error_add_missing_payload_json_documents() { async fn error_add_missing_payload_ndjson_documents() { let document = ""; - let server = Server::new().await; + let server = Server::new_shared(); let app = server.init_web_app().await; // post From cf5d26124a9ec856f41e07e4b0c8b80b4db49f49 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 May 2025 16:12:02 +0300 Subject: [PATCH 052/131] Call .succeeded() or .failed() on the waited task Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/documents/add_documents.rs | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 2c8925833..57d0864ad 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -1111,7 +1111,7 @@ async fn document_addition_with_huge_int_primary_key() { let (response, code) = index.add_documents(documents, Some("primary")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(response, @r###" { @@ -1568,7 +1568,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" @@ -1603,7 +1603,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.failed(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" @@ -1652,7 +1652,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() { let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); // Documents without a primary key are not accepted. snapshot!(response, @@ -1697,7 +1697,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" @@ -1733,7 +1733,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ let (response, code) = index.update_documents(documents, Some("id")).await; snapshot!(code, @"202 Accepted"); - let response = index.wait_task(response.uid()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" @@ -1782,7 +1782,7 @@ async fn add_documents_with_geo_field() { ]); let (task, _status_code) = index.add_documents(documents, None).await; - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { @@ -1906,7 +1906,7 @@ async fn update_documents_with_geo_field() { ]); let (task, _status_code) = index.add_documents(documents, None).await; - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { @@ -1975,7 +1975,7 @@ async fn update_documents_with_geo_field() { } ]); let (task, _status_code) = index.update_documents(updated_documents, None).await; - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { @@ -2913,7 +2913,7 @@ async fn batch_several_documents_addition() { // wait first batch of documents to finish futures::future::join_all(waiter).await; - index.wait_task(4).await; + index.wait_task(4).await.succeeded(); // run a second completely failing batch documents[40] = json!({"title": "error", "desc": "error"}); @@ -2925,7 +2925,7 @@ async fn batch_several_documents_addition() { } // wait second batch of documents to finish futures::future::join_all(waiter).await; - index.wait_task(9).await; + index.wait_task(9).await.failed(); let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await; From fc88b003b47301006348e2cd6ddd0f189f9d7c7d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 26 May 2025 11:28:23 +0300 Subject: [PATCH 053/131] Use shared server and unique indices for add_documents IT tests Signed-off-by: Martin Tzvetanov Grigorov --- Cargo.lock | 102 +++--- crates/index-scheduler/src/error.rs | 14 +- crates/meili-snap/src/lib.rs | 10 + crates/meilisearch/Cargo.toml | 2 +- .../tests/documents/add_documents.rs | 328 +++++++++--------- 5 files changed, 243 insertions(+), 213 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dc2aa5af4..a36c568b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1067,9 +1067,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.38" +version = "4.5.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000" +checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f" dependencies = [ "clap_builder", "clap_derive", @@ -1077,9 +1077,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.38" +version = "4.5.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120" +checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51" dependencies = [ "anstream", "anstyle", @@ -2715,17 +2715,21 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9f1e950e0d9d1d3c47184416723cf29c0d1f93bd8cccf37e4beb6b44f31710" +checksum = "b1c293b6b3d21eca78250dc7dbebd6b9210ec5530e038cbfe0661b5c47ab06e8" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", "http 1.3.1", "http-body", "hyper", + "ipnet", "libc", + "percent-encoding", "pin-project-lite", "socket2", "tokio", @@ -2988,6 +2992,16 @@ dependencies = [ "serde", ] +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-terminal" version = "0.4.16" @@ -3067,9 +3081,9 @@ dependencies = [ [[package]] name = "jieba-rs" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d1bcad6332969e4d48ee568d430e14ee6dea70740c2549d005d87677ebefb0c" +checksum = "b06096b4b61fb4bfdbf16c6a968ea2d6be1ac9617cf3db741c3b641e6c290a35" dependencies = [ "cedarwood", "fxhash", @@ -3207,9 +3221,9 @@ dependencies = [ [[package]] name = "libloading" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a793df0d7afeac54f95b471d3af7f0d4fb975699f972341a4b76988d49cdf0c" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" dependencies = [ "cfg-if", "windows-targets 0.53.0", @@ -4889,9 +4903,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "2bf597b113be201cb2269b4c39b39a804d01b99ee95a4278f0ed04e45cff1c71" dependencies = [ "base64 0.22.1", "bytes", @@ -4913,7 +4927,6 @@ dependencies = [ "pin-project-lite", "quinn", "rustls", - "rustls-pemfile", "rustls-pki-types", "serde", "serde_json", @@ -4923,14 +4936,14 @@ dependencies = [ "tokio-rustls", "tokio-util", "tower", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.26.11", - "windows-registry", + "webpki-roots 1.0.0", ] [[package]] @@ -5435,9 +5448,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.9" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", @@ -5969,6 +5982,24 @@ dependencies = [ "tower-service", ] +[[package]] +name = "tower-http" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fdb0c213ca27a9f57ab69ddb290fd80d970922355b83ae380b395d3986b8a2e" +dependencies = [ + "bitflags 2.9.1", + "bytes", + "futures-util", + "http 1.3.1", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -6619,7 +6650,7 @@ checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" dependencies = [ "windows-implement", "windows-interface", - "windows-result 0.1.2", + "windows-result", "windows-targets 0.52.6", ] @@ -6645,23 +6676,6 @@ dependencies = [ "syn 2.0.101", ] -[[package]] -name = "windows-link" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" - -[[package]] -name = "windows-registry" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" -dependencies = [ - "windows-result 0.3.4", - "windows-strings", - "windows-targets 0.53.0", -] - [[package]] name = "windows-result" version = "0.1.2" @@ -6671,24 +6685,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-result" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" -dependencies = [ - "windows-link", -] - [[package]] name = "windows-sys" version = "0.48.0" diff --git a/crates/index-scheduler/src/error.rs b/crates/index-scheduler/src/error.rs index cb798b385..a0945d8a3 100644 --- a/crates/index-scheduler/src/error.rs +++ b/crates/index-scheduler/src/error.rs @@ -126,7 +126,7 @@ pub enum Error { #[error(transparent)] Heed(#[from] heed::Error), #[error("{}", match .index_uid { - Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", uid), + Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", Error::index_name(uid)), _ => format!("{error}") })] Milli { error: milli::Error, index_uid: Option }, @@ -177,6 +177,18 @@ pub enum Error { PlannedFailure, } +impl Error { + + #[inline] + fn index_name(index_name: &str) -> &str { + if let Ok(_) = uuid::Uuid::parse_str(index_name) { + "[uuid]" + } else { + index_name + } + } +} + #[derive(Debug, thiserror::Error)] #[error( "{disabled_action} requires enabling the `{feature}` experimental feature. See {issue_link}" diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 688c87494..17a57c082 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -33,6 +33,7 @@ pub fn default_snapshot_settings_for_test<'a>( let filename = path.file_name().unwrap().to_str().unwrap(); settings.set_omit_expression(true); +<<<<<<< HEAD fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content { match &content { Content::String(s) => { @@ -45,6 +46,15 @@ pub fn default_snapshot_settings_for_test<'a>( settings.add_dynamic_redaction(".message", uuid_in_message_redaction); settings.add_dynamic_redaction(".error.message", uuid_in_message_redaction); + settings.add_dynamic_redaction(".indexUid", |content, _content_path| { + match &content { + Content::String(s) => match uuid::Uuid::parse_str(s) { + Ok(_) => Content::String("[uuid]".to_owned()), + Err(_) => content, + }, + _ => content, + } + }); let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name); let test_name = test_name.rsplit("::").next().unwrap().to_owned(); diff --git a/crates/meilisearch/Cargo.toml b/crates/meilisearch/Cargo.toml index 40c0d98b5..dffa60326 100644 --- a/crates/meilisearch/Cargo.toml +++ b/crates/meilisearch/Cargo.toml @@ -116,7 +116,7 @@ utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] } actix-rt = "2.10.0" brotli = "6.0.0" # fixed version due to format breakages in v1.40 -insta = "=1.39.0" +insta = { version = "=1.39.0", features = ["redactions"] } manifest-dir-macros = "0.1.18" maplit = "1.0.2" meili-snap = { path = "../meili-snap" } diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 57d0864ad..e8ef43b40 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -3,7 +3,7 @@ use meili_snap::{json_string, snapshot}; use meilisearch::Opt; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; - +use uuid::Uuid; use crate::common::encoder::Encoder; use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value}; use crate::json; @@ -21,10 +21,11 @@ async fn add_documents_test_json_content_types() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -37,7 +38,7 @@ async fn add_documents_test_json_content_types() { @r###" { "taskUid": 0, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -46,7 +47,7 @@ async fn add_documents_test_json_content_types() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -59,7 +60,7 @@ async fn add_documents_test_json_content_types() { @r###" { "taskUid": 1, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -78,10 +79,11 @@ async fn add_single_document_test_json_content_types() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -94,7 +96,7 @@ async fn add_single_document_test_json_content_types() { @r###" { "taskUid": 0, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -103,7 +105,7 @@ async fn add_single_document_test_json_content_types() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -116,7 +118,7 @@ async fn add_single_document_test_json_content_types() { @r###" { "taskUid": 1, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -135,11 +137,12 @@ async fn add_single_document_gzip_encoded() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); let encoder = Encoder::Gzip; let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document.clone())) .insert_header(("content-type", "application/json")) .insert_header(encoder.header().unwrap()) @@ -153,7 +156,7 @@ async fn add_single_document_gzip_encoded() { @r###" { "taskUid": 0, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -162,7 +165,7 @@ async fn add_single_document_gzip_encoded() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document)) .insert_header(("content-type", "application/json")) .insert_header(encoder.header().unwrap()) @@ -176,7 +179,7 @@ async fn add_single_document_gzip_encoded() { @r###" { "taskUid": 1, - "indexUid": "dog", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -190,10 +193,11 @@ async fn add_single_document_gzip_encoded_with_incomplete_error() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .insert_header(("content-encoding", "gzip")) @@ -215,7 +219,7 @@ async fn add_single_document_gzip_encoded_with_incomplete_error() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .insert_header(("content-encoding", "gzip")) @@ -247,12 +251,13 @@ async fn add_single_document_with_every_encoding() { // this is what is expected and should work let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let document = serde_json::to_string(&document).unwrap(); for (task_uid, encoder) in Encoder::iterator().enumerate() { let mut req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document.clone())) .insert_header(("content-type", "application/json")); req = match encoder.header() { @@ -271,8 +276,8 @@ async fn add_single_document_with_every_encoding() { #[actix_rt::test] async fn add_csv_document() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id,name,race 0,jean,bernese mountain @@ -283,18 +288,18 @@ async fn add_csv_document() { snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" { "taskUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { "uid": 0, "batchUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -335,8 +340,8 @@ async fn add_csv_document() { #[actix_rt::test] async fn add_csv_document_with_types() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id:number,name:string,race:string,age:number,cute:boolean 0,jean,bernese mountain,2.5,true @@ -348,18 +353,18 @@ async fn add_csv_document_with_types() { snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" { "taskUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { "uid": 0, "batchUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -411,8 +416,8 @@ async fn add_csv_document_with_types() { #[actix_rt::test] async fn add_csv_document_with_custom_delimiter() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id|name|race 0|jean|bernese mountain @@ -424,18 +429,18 @@ async fn add_csv_document_with_custom_delimiter() { snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" { "taskUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response["taskUid"].as_u64().unwrap()).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { "uid": 0, "batchUid": 0, - "indexUid": "pets", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -476,8 +481,8 @@ async fn add_csv_document_with_custom_delimiter() { #[actix_rt::test] async fn add_csv_document_with_types_error() { - let server = Server::new().await; - let index = server.index("pets"); + let server = Server::new_shared(); + let index = server.unique_index(); let document = "#id:number,a:boolean,b:number 0,doggo,1"; @@ -520,10 +525,11 @@ async fn error_add_documents_test_bad_content_types() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/plain")) .to_request(); @@ -544,7 +550,7 @@ async fn error_add_documents_test_bad_content_types() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/plain")) .to_request(); @@ -576,10 +582,11 @@ async fn error_add_documents_test_no_content_type() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .to_request(); let res = test::call_service(&app, req).await; @@ -599,7 +606,7 @@ async fn error_add_documents_test_no_content_type() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .to_request(); let res = test::call_service(&app, req).await; @@ -624,10 +631,11 @@ async fn error_add_malformed_csv_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -648,7 +656,7 @@ async fn error_add_malformed_csv_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -674,10 +682,11 @@ async fn error_add_malformed_json_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -698,7 +707,7 @@ async fn error_add_malformed_json_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -724,7 +733,7 @@ async fn error_add_malformed_json_documents() { let document = format!("\"{}\"", long); let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document) .insert_header(("content-type", "application/json")) .to_request(); @@ -745,7 +754,7 @@ async fn error_add_malformed_json_documents() { // add one more char to the long string to test if the truncating works. let document = format!("\"{}m\"", long); let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document) .insert_header(("content-type", "application/json")) .to_request(); @@ -770,10 +779,11 @@ async fn error_add_malformed_ndjson_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -794,7 +804,7 @@ async fn error_add_malformed_ndjson_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -820,10 +830,11 @@ async fn error_add_missing_payload_csv_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -844,7 +855,7 @@ async fn error_add_missing_payload_csv_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "text/csv")) .to_request(); @@ -870,10 +881,11 @@ async fn error_add_missing_payload_json_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -894,7 +906,7 @@ async fn error_add_missing_payload_json_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/json")) .to_request(); @@ -920,10 +932,11 @@ async fn error_add_missing_payload_ndjson_documents() { let server = Server::new_shared(); let app = server.init_web_app().await; + let index_name = Uuid::new_v4().to_string(); // post let req = test::TestRequest::post() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -944,7 +957,7 @@ async fn error_add_missing_payload_ndjson_documents() { // put let req = test::TestRequest::put() - .uri("/indexes/dog/documents") + .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(document.to_string()) .insert_header(("content-type", "application/x-ndjson")) .to_request(); @@ -966,8 +979,8 @@ async fn error_add_missing_payload_ndjson_documents() { #[actix_rt::test] async fn add_documents_no_index_creation() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -989,7 +1002,7 @@ async fn add_documents_no_index_creation() { { "uid": 0, "batchUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1037,8 +1050,8 @@ async fn error_document_add_create_index_bad_uid() { #[actix_rt::test] async fn document_addition_with_primary_key() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1052,7 +1065,7 @@ async fn document_addition_with_primary_key() { @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -1068,7 +1081,7 @@ async fn document_addition_with_primary_key() { { "uid": 0, "batchUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1086,10 +1099,10 @@ async fn document_addition_with_primary_key() { let (response, code) = index.get().await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]" }), + snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]", ".uid" => "[uuid]" }), @r###" { - "uid": "test", + "uid": "[uuid]", "createdAt": "[date]", "updatedAt": "[date]", "primaryKey": "primary" @@ -1099,8 +1112,8 @@ async fn document_addition_with_primary_key() { #[actix_rt::test] async fn document_addition_with_huge_int_primary_key() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1117,7 +1130,7 @@ async fn document_addition_with_huge_int_primary_key() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1146,8 +1159,8 @@ async fn document_addition_with_huge_int_primary_key() { #[actix_rt::test] async fn replace_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -1162,7 +1175,7 @@ async fn replace_document() { @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" @@ -1190,7 +1203,7 @@ async fn replace_document() { { "uid": 1, "batchUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1219,17 +1232,17 @@ async fn replace_document() { #[actix_rt::test] async fn add_no_documents() { - let server = Server::new().await; - let index = server.index("kefir"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.add_documents(json!([]), None).await; snapshot!(code, @"202 Accepted"); - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1246,13 +1259,13 @@ async fn add_no_documents() { "#); let (task, _code) = index.add_documents(json!([]), Some("kefkef")).await; - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1269,13 +1282,13 @@ async fn add_no_documents() { "#); let (task, _code) = index.add_documents(json!([{ "kefkef": 1 }]), None).await; - let task = server.wait_task(task.uid()).await; + let task = server.wait_task(task.uid()).await.succeeded(); let task = task.succeeded(); snapshot!(task, @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "kefir", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1307,8 +1320,8 @@ async fn add_no_documents() { #[actix_rt::test] async fn add_larger_dataset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let update_id = index.load_test_set().await; let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); @@ -1319,12 +1332,11 @@ async fn add_larger_dataset() { let (response, code) = index .get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() }) .await; - assert_eq!(code, 200, "failed with `{}`", response); + assert_eq!(code, 200, "failed with `{response}`"); assert_eq!(response["results"].as_array().unwrap().len(), 77); // x-ndjson add large test - let server = Server::new().await; - let index = server.index("test"); + let index = server.unique_index(); let update_id = index.load_test_set_ndjson().await; let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); @@ -1341,8 +1353,8 @@ async fn add_larger_dataset() { #[actix_rt::test] async fn error_add_documents_bad_document_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("docid")).await; // unsupported characters @@ -1362,7 +1374,7 @@ async fn error_add_documents_bad_document_id() { { "uid": 1, "batchUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1399,7 +1411,7 @@ async fn error_add_documents_bad_document_id() { { "uid": 2, "batchUid": 2, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1436,7 +1448,7 @@ async fn error_add_documents_bad_document_id() { { "uid": 3, "batchUid": 3, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1460,8 +1472,8 @@ async fn error_add_documents_bad_document_id() { #[actix_rt::test] async fn error_add_documents_missing_document_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("docid")).await; let documents = json!([ { @@ -1478,7 +1490,7 @@ async fn error_add_documents_missing_document_id() { { "uid": 1, "batchUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1502,8 +1514,8 @@ async fn error_add_documents_missing_document_id() { #[actix_rt::test] async fn error_document_field_limit_reached_in_one_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1527,7 +1539,7 @@ async fn error_document_field_limit_reached_in_one_document() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1551,8 +1563,8 @@ async fn error_document_field_limit_reached_in_one_document() { #[actix_rt::test] async fn error_document_field_limit_reached_over_multiple_documents() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1575,7 +1587,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1610,7 +1622,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1634,8 +1646,8 @@ async fn error_document_field_limit_reached_over_multiple_documents() { #[actix_rt::test] async fn error_document_field_limit_reached_in_one_nested_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1660,7 +1672,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1679,8 +1691,8 @@ async fn error_document_field_limit_reached_in_one_nested_document() { #[actix_rt::test] async fn error_document_field_limit_reached_over_multiple_documents_with_nested_fields() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; @@ -1704,7 +1716,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1740,7 +1752,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1759,8 +1771,8 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_ #[actix_rt::test] async fn add_documents_with_geo_field() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; let documents = json!([ @@ -1788,7 +1800,7 @@ async fn add_documents_with_geo_field() { { "uid": 1, "batchUid": 1, - "indexUid": "doggo", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1883,8 +1895,8 @@ async fn add_documents_with_geo_field() { #[actix_rt::test] async fn update_documents_with_geo_field() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; let documents = json!([ @@ -1912,7 +1924,7 @@ async fn update_documents_with_geo_field() { { "uid": 1, "batchUid": 1, - "indexUid": "doggo", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -1981,7 +1993,7 @@ async fn update_documents_with_geo_field() { { "uid": 2, "batchUid": 2, - "indexUid": "doggo", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2075,8 +2087,8 @@ async fn update_documents_with_geo_field() { #[actix_rt::test] async fn add_documents_invalid_geo_field() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; @@ -2092,12 +2104,12 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }), @r###" { "uid": 2, "batchUid": 2, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2106,7 +2118,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: The `_geo` field in the document with the id: `\"11\"` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", + "message": "Index `[uuid]`: The `_geo` field in the document with the id: `\"11\"` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2135,7 +2147,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 3, "batchUid": 3, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2144,7 +2156,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", + "message": "Index `[uuid]`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2173,7 +2185,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 4, "batchUid": 4, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2182,7 +2194,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", + "message": "Index `[uuid]`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2211,7 +2223,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 5, "batchUid": 5, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2220,7 +2232,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2249,7 +2261,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 6, "batchUid": 6, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2258,7 +2270,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2287,7 +2299,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 7, "batchUid": 7, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2296,7 +2308,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2325,7 +2337,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 8, "batchUid": 8, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2334,7 +2346,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2363,7 +2375,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 9, "batchUid": 9, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2372,7 +2384,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `false` and `true`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `false` and `true`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2401,7 +2413,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 10, "batchUid": 10, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2410,7 +2422,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", + "message": "Index `[uuid]`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2439,7 +2451,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 11, "batchUid": 11, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2448,7 +2460,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", + "message": "Index `[uuid]`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2477,7 +2489,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 12, "batchUid": 12, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2486,7 +2498,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2515,7 +2527,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 13, "batchUid": 13, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2524,7 +2536,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: The `_geo` field in the document with the id: `\"11\"` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", + "message": "Index `[uuid]`: The `_geo` field in the document with the id: `\"11\"` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2554,7 +2566,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 14, "batchUid": 14, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2563,7 +2575,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse longitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse longitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2591,7 +2603,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 15, "batchUid": 15, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2600,7 +2612,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse latitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2628,7 +2640,7 @@ async fn add_documents_invalid_geo_field() { { "uid": 16, "batchUid": 16, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2637,7 +2649,7 @@ async fn add_documents_invalid_geo_field() { "indexedDocuments": 0 }, "error": { - "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"13\"`. Was expecting finite numbers but instead got `null` and `null`.", + "message": "Index `[uuid]`: Could not parse latitude nor longitude in the document with the id: `\"13\"`. Was expecting finite numbers but instead got `null` and `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2653,8 +2665,8 @@ async fn add_documents_invalid_geo_field() { // Related to #4333 #[actix_rt::test] async fn add_invalid_geo_and_then_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; // _geo is not a correct object @@ -2671,7 +2683,7 @@ async fn add_invalid_geo_and_then_settings() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2694,7 +2706,7 @@ async fn add_invalid_geo_and_then_settings() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "settingsUpdate", "canceledBy": null, @@ -2704,7 +2716,7 @@ async fn add_invalid_geo_and_then_settings() { ] }, "error": { - "message": "Index `test`: Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `[uuid]`: Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" @@ -2719,8 +2731,8 @@ async fn add_invalid_geo_and_then_settings() { #[actix_rt::test] async fn error_add_documents_payload_size() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(Some("id")).await; let document = json!( { @@ -2746,8 +2758,8 @@ async fn error_add_documents_payload_size() { #[actix_rt::test] async fn error_primary_key_inference() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -2766,7 +2778,7 @@ async fn error_primary_key_inference() { { "uid": 0, "batchUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2807,7 +2819,7 @@ async fn error_primary_key_inference() { { "uid": 1, "batchUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2846,7 +2858,7 @@ async fn error_primary_key_inference() { { "uid": 2, "batchUid": 2, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -2865,8 +2877,8 @@ async fn error_primary_key_inference() { #[actix_rt::test] async fn add_documents_with_primary_key_twice() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -2888,8 +2900,8 @@ async fn add_documents_with_primary_key_twice() { #[actix_rt::test] async fn batch_several_documents_addition() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let mut documents: Vec<_> = (0..150usize) .map(|id| { From ce9c930d1070dc60f172db146e074bb9099125b8 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 26 May 2025 11:33:59 +0300 Subject: [PATCH 054/131] Fix clippy and fmt Signed-off-by: Martin Tzvetanov Grigorov --- crates/index-scheduler/src/error.rs | 3 +-- crates/meili-snap/src/lib.rs | 1 - crates/meilisearch/tests/documents/add_documents.rs | 6 +++--- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/index-scheduler/src/error.rs b/crates/index-scheduler/src/error.rs index a0945d8a3..cbedf827d 100644 --- a/crates/index-scheduler/src/error.rs +++ b/crates/index-scheduler/src/error.rs @@ -178,10 +178,9 @@ pub enum Error { } impl Error { - #[inline] fn index_name(index_name: &str) -> &str { - if let Ok(_) = uuid::Uuid::parse_str(index_name) { + if uuid::Uuid::parse_str(index_name).is_ok() { "[uuid]" } else { index_name diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 17a57c082..30769c7c1 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -33,7 +33,6 @@ pub fn default_snapshot_settings_for_test<'a>( let filename = path.file_name().unwrap().to_str().unwrap(); settings.set_omit_expression(true); -<<<<<<< HEAD fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content { match &content { Content::String(s) => { diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index e8ef43b40..efe1bfbf0 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -1,12 +1,12 @@ +use crate::common::encoder::Encoder; +use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value}; +use crate::json; use actix_web::test; use meili_snap::{json_string, snapshot}; use meilisearch::Opt; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; use uuid::Uuid; -use crate::common::encoder::Encoder; -use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value}; -use crate::json; /// This is the basic usage of our API and every other tests uses the content-type application/json #[actix_rt::test] From f3d691667d47985347766f134b294a2a8354bbe2 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 26 May 2025 13:39:15 +0300 Subject: [PATCH 055/131] Use a Regex in insta dynamic redaction to replace Uuids with [uuid] Signed-off-by: Martin Tzvetanov Grigorov --- crates/index-scheduler/src/error.rs | 13 +------------ crates/meili-snap/src/lib.rs | 8 ++++++++ 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/crates/index-scheduler/src/error.rs b/crates/index-scheduler/src/error.rs index cbedf827d..cb798b385 100644 --- a/crates/index-scheduler/src/error.rs +++ b/crates/index-scheduler/src/error.rs @@ -126,7 +126,7 @@ pub enum Error { #[error(transparent)] Heed(#[from] heed::Error), #[error("{}", match .index_uid { - Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", Error::index_name(uid)), + Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", uid), _ => format!("{error}") })] Milli { error: milli::Error, index_uid: Option }, @@ -177,17 +177,6 @@ pub enum Error { PlannedFailure, } -impl Error { - #[inline] - fn index_name(index_name: &str) -> &str { - if uuid::Uuid::parse_str(index_name).is_ok() { - "[uuid]" - } else { - index_name - } - } -} - #[derive(Debug, thiserror::Error)] #[error( "{disabled_action} requires enabling the `{feature}` experimental feature. See {issue_link}" diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 30769c7c1..0f709b3de 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -55,6 +55,14 @@ pub fn default_snapshot_settings_for_test<'a>( } }); + settings.add_dynamic_redaction(".error.message", |content, _content_path| match &content { + Content::String(s) => { + let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "$before[uuid]$after"); + Content::String(uuid_replaced.to_string()) + } + _ => content, + }); + let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name); let test_name = test_name.rsplit("::").next().unwrap().to_owned(); From 3e0de6cb83a219a38428a4c68b5b6217c4b8fb91 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 27 May 2025 14:39:45 +0300 Subject: [PATCH 056/131] Wait for the batched tasks bu their real uid. Some of them succeed, others fail. Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/documents/add_documents.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index efe1bfbf0..35f3d793a 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -2924,8 +2924,10 @@ async fn batch_several_documents_addition() { } // wait first batch of documents to finish - futures::future::join_all(waiter).await; - index.wait_task(4).await.succeeded(); + let finished_tasks = futures::future::join_all(waiter).await; + for (task, _code) in finished_tasks { + index.wait_task(task.uid()).await; + } // run a second completely failing batch documents[40] = json!({"title": "error", "desc": "error"}); @@ -2936,8 +2938,10 @@ async fn batch_several_documents_addition() { waiter.push(index.add_documents(json!(chunk), Some("id"))); } // wait second batch of documents to finish - futures::future::join_all(waiter).await; - index.wait_task(9).await.failed(); + let finished_tasks = futures::future::join_all(waiter).await; + for (task, _code) in finished_tasks { + index.wait_task(task.uid()).await; + } let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await; From ce65ad213bf85fcc8ec1fb1d7534f30688160eb1 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 27 May 2025 16:27:08 +0300 Subject: [PATCH 057/131] Add dynamic redactions for `uid`, `batchUid` and `taskUid` Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/documents/add_documents.rs | 286 +++++++++--------- 1 file changed, 142 insertions(+), 144 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 35f3d793a..99aa566d7 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -34,16 +34,16 @@ async fn add_documents_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() @@ -56,10 +56,10 @@ async fn add_documents_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", @@ -92,16 +92,16 @@ async fn add_single_document_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() @@ -114,10 +114,10 @@ async fn add_single_document_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", @@ -152,16 +152,16 @@ async fn add_single_document_gzip_encoded() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); // put let req = test::TestRequest::put() @@ -175,10 +175,10 @@ async fn add_single_document_gzip_encoded() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); snapshot!(status_code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", @@ -255,7 +255,7 @@ async fn add_single_document_with_every_encoding() { // post let document = serde_json::to_string(&document).unwrap(); - for (task_uid, encoder) in Encoder::iterator().enumerate() { + for encoder in Encoder::iterator() { let mut req = test::TestRequest::post() .uri(format!("/indexes/{index_name}/documents").as_str()) .set_payload(encoder.encode(document.clone())) @@ -268,9 +268,8 @@ async fn add_single_document_with_every_encoding() { let res = test::call_service(&app, req).await; let status_code = res.status(); let body = test::read_body(res).await; - let response: Value = serde_json::from_slice(&body).unwrap_or_default(); + let _response: Value = serde_json::from_slice(&body).unwrap_or_default(); assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], task_uid); } } @@ -285,20 +284,20 @@ async fn add_csv_document() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); let response = index.wait_task(response.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -350,20 +349,20 @@ async fn add_csv_document_with_types() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); let response = index.wait_task(response.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -426,20 +425,20 @@ async fn add_csv_document_with_custom_delimiter() { let (response, code) = index.raw_update_documents(document, Some("text/csv"), "?csvDelimiter=|").await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); let response = index.wait_task(response.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -991,17 +990,16 @@ async fn add_documents_no_index_creation() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); - assert_eq!(response["taskUid"], 0); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.get_task(0).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1061,26 +1059,26 @@ async fn document_addition_with_primary_key() { ]); let (response, code) = index.add_documents(documents, Some("primary")).await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.get_task(response.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1171,16 +1169,16 @@ async fn replace_document() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code,@"202 Accepted"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "documentAdditionOrUpdate", "enqueuedAt": "[date]" } - "###); + "#); index.wait_task(response.uid()).await.succeeded(); @@ -1198,11 +1196,11 @@ async fn replace_document() { let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1369,11 +1367,11 @@ async fn error_add_documents_bad_document_id() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -1393,7 +1391,7 @@ async fn error_add_documents_bad_document_id() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); // More than 512 bytes let documents = json!([ @@ -1406,11 +1404,11 @@ async fn error_add_documents_bad_document_id() { index.wait_task(value.uid()).await.failed(); let (response, code) = index.get_task(value.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -1443,11 +1441,11 @@ async fn error_add_documents_bad_document_id() { index.wait_task(value.uid()).await.failed(); let (response, code) = index.get_task(value.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -1485,11 +1483,11 @@ async fn error_add_documents_missing_document_id() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -1509,7 +1507,7 @@ async fn error_add_documents_missing_document_id() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); } #[actix_rt::test] @@ -1795,11 +1793,11 @@ async fn add_documents_with_geo_field() { let (task, _status_code) = index.add_documents(documents, None).await; let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1814,7 +1812,7 @@ async fn add_documents_with_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; @@ -1919,11 +1917,11 @@ async fn update_documents_with_geo_field() { let (task, _status_code) = index.add_documents(documents, None).await; let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -1938,7 +1936,7 @@ async fn update_documents_with_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await; snapshot!(code, @"200 OK"); @@ -1988,11 +1986,11 @@ async fn update_documents_with_geo_field() { ]); let (task, _status_code) = index.update_documents(updated_documents, None).await; let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", @@ -2104,11 +2102,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }), + @r#" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2128,7 +2126,7 @@ async fn add_documents_invalid_geo_field() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); // _geo is an object but is missing both the lat and lng let documents = json!([ @@ -2142,11 +2140,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2180,11 +2178,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 4, - "batchUid": 4, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2218,11 +2216,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 5, - "batchUid": 5, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2256,11 +2254,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 6, - "batchUid": 6, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2294,11 +2292,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 7, - "batchUid": 7, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2332,11 +2330,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 8, - "batchUid": 8, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2370,11 +2368,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 9, - "batchUid": 9, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2408,11 +2406,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 10, - "batchUid": 10, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2446,11 +2444,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 11, - "batchUid": 11, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2484,11 +2482,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 12, - "batchUid": 12, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2522,11 +2520,11 @@ async fn add_documents_invalid_geo_field() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.get_task(task.uid()).await; snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 13, - "batchUid": 13, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2561,11 +2559,11 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 14, - "batchUid": 14, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2598,11 +2596,11 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 15, - "batchUid": 15, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2635,11 +2633,11 @@ async fn add_documents_invalid_geo_field() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.failed(); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 16, - "batchUid": 16, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2773,11 +2771,11 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @r###" + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r#" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2797,7 +2795,7 @@ async fn error_primary_key_inference() { "startedAt": "[date]", "finishedAt": "[date]" } - "###); + "#); let documents = json!([ { @@ -2814,11 +2812,11 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentAdditionOrUpdate", @@ -2853,11 +2851,11 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(task.uid()).await; assert_eq!(code, 200); - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", From 36f0a1492ce7f4185381c99b286d03f919d8a0e9 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Wed, 28 May 2025 14:17:54 +0300 Subject: [PATCH 058/131] Apply suggestions from code review Co-authored-by: Tamo --- crates/meilisearch/tests/documents/add_documents.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 99aa566d7..522fc530b 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -991,11 +991,9 @@ async fn add_documents_no_index_creation() { let (response, code) = index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); - index.wait_task(response.uid()).await.succeeded(); - - let (response, code) = index.get_task(0).await; + let response = index.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"200 OK"); - snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(response, @r###" { "uid": "[uid]", From c13efde04241d496b2eada49c8da39c0f24cbb0b Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 28 May 2025 14:35:50 +0300 Subject: [PATCH 059/131] uuid is a production dependency of meili-snap Signed-off-by: Martin Tzvetanov Grigorov --- crates/meili-snap/Cargo.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/meili-snap/Cargo.toml b/crates/meili-snap/Cargo.toml index 9dba56256..be96769ab 100644 --- a/crates/meili-snap/Cargo.toml +++ b/crates/meili-snap/Cargo.toml @@ -16,6 +16,4 @@ insta = { version = "=1.39.0", features = ["json", "redactions"] } md5 = "0.7.0" once_cell = "1.20" regex-lite = "0.1.6" - -[dev-dependencies] uuid = { version = "1.17.0", features = ["v4"] } From 02929e241b8af615f149ea44e0738ab99d33c37d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 28 May 2025 14:36:13 +0300 Subject: [PATCH 060/131] Update the status code Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/documents/add_documents.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index 522fc530b..1cf492fc0 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -992,7 +992,7 @@ async fn add_documents_no_index_creation() { snapshot!(code, @"202 Accepted"); let response = index.wait_task(response.uid()).await.succeeded(); - snapshot!(code, @"200 OK"); + snapshot!(code, @"202 Accepted"); snapshot!(response, @r###" { From 43ec97fe457be2e01fde6a502044037e9bdf4c02 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 28 May 2025 15:01:04 +0300 Subject: [PATCH 061/131] format the code Signed-off-by: Martin Tzvetanov Grigorov --- crates/meili-snap/src/lib.rs | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/crates/meili-snap/src/lib.rs b/crates/meili-snap/src/lib.rs index 0f709b3de..1641a6335 100644 --- a/crates/meili-snap/src/lib.rs +++ b/crates/meili-snap/src/lib.rs @@ -45,14 +45,12 @@ pub fn default_snapshot_settings_for_test<'a>( settings.add_dynamic_redaction(".message", uuid_in_message_redaction); settings.add_dynamic_redaction(".error.message", uuid_in_message_redaction); - settings.add_dynamic_redaction(".indexUid", |content, _content_path| { - match &content { - Content::String(s) => match uuid::Uuid::parse_str(s) { - Ok(_) => Content::String("[uuid]".to_owned()), - Err(_) => content, - }, - _ => content, - } + settings.add_dynamic_redaction(".indexUid", |content, _content_path| match &content { + Content::String(s) => match uuid::Uuid::parse_str(s) { + Ok(_) => Content::String("[uuid]".to_owned()), + Err(_) => content, + }, + _ => content, }); settings.add_dynamic_redaction(".error.message", |content, _content_path| match &content { From 7e9909ee45177f14eb39d77fab76f36858b11c99 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 May 2025 16:10:17 +0300 Subject: [PATCH 062/131] perf: Faster index::update_index IT tests Use a shared server where possible. Assert succeeded/failed task waits. Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/index.rs | 4 ++ .../meilisearch/tests/index/update_index.rs | 55 ++++++++++--------- 2 files changed, 34 insertions(+), 25 deletions(-) diff --git a/crates/meilisearch/tests/common/index.rs b/crates/meilisearch/tests/common/index.rs index 09a7d623c..24db6776b 100644 --- a/crates/meilisearch/tests/common/index.rs +++ b/crates/meilisearch/tests/common/index.rs @@ -29,6 +29,10 @@ impl<'a> Index<'a, Owned> { } } + pub fn with_encoder(&self, encoder: Encoder) -> Index<'a, Owned> { + Index { uid: self.uid.clone(), service: self.service, encoder, marker: PhantomData } + } + pub async fn load_test_set(&self) -> u64 { let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref())); let (response, code) = self diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index a9b02e7d4..8880314d4 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -7,15 +7,16 @@ use crate::json; #[actix_rt::test] async fn update_primary_key() { - let server = Server::new().await; - let index = server.index("test"); - let (_, code) = index.create(None).await; + let server = Server::new_shared(); + let index = server.unique_index(); + let (task, code) = index.create(None).await; assert_eq!(code, 202); + index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = index.update(Some("primary")).await; - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.succeeded(); assert_eq!(response["status"], "succeeded"); @@ -23,7 +24,7 @@ async fn update_primary_key() { assert_eq!(code, 200); - assert_eq!(response["uid"], "test"); + assert_eq!(response["uid"], index.uid); assert!(response.get("createdAt").is_some()); assert!(response.get("updatedAt").is_some()); @@ -39,24 +40,25 @@ async fn update_primary_key() { #[actix_rt::test] async fn create_and_update_with_different_encoding() { - let server = Server::new().await; - let index = server.index_with_encoder("test", Encoder::Gzip); - let (_, code) = index.create(None).await; + let server = Server::new_shared(); + let index = server.unique_index_with_encoder(Encoder::Gzip); + let (create_task, code) = index.create(None).await; assert_eq!(code, 202); + index.wait_task(create_task.uid()).await.succeeded(); - let index = server.index_with_encoder("test", Encoder::Brotli); + let index = index.with_encoder(Encoder::Brotli); let (task, _status_code) = index.update(Some("primary")).await; - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.succeeded(); assert_eq!(response["status"], "succeeded"); } #[actix_rt::test] async fn update_nothing() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task1, code) = index.create(None).await; assert_eq!(code, 202); @@ -67,18 +69,19 @@ async fn update_nothing() { assert_eq!(code, 202); - let response = index.wait_task(task2.uid()).await; + let response = index.wait_task(task2.uid()).await.succeeded(); assert_eq!(response["status"], "succeeded"); } #[actix_rt::test] async fn error_update_existing_primary_key() { - let server = Server::new().await; - let index = server.index("test"); - let (_response, code) = index.create(Some("id")).await; + let server = Server::new_shared(); + let index = server.unique_index(); + let (create_task, code) = index.create(Some("id")).await; assert_eq!(code, 202); + index.wait_task(create_task.uid()).await.succeeded(); let documents = json!([ { @@ -86,16 +89,17 @@ async fn error_update_existing_primary_key() { "content": "foobar" } ]); - index.add_documents(documents, None).await; + let (add_docs_task, add_docs_status_code) = index.add_documents(documents, None).await; + assert_eq!(add_docs_status_code, 202); + index.wait_task(add_docs_task.uid()).await.succeeded(); - let (task, code) = index.update(Some("primary")).await; + let (update_task, code) = index.update(Some("primary")).await; assert_eq!(code, 202); - - let response = index.wait_task(task.uid()).await; + let response = index.wait_task(update_task.uid()).await.failed(); let expected_response = json!({ - "message": "Index `test`: Index already has a primary key: `id`.", + "message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid), "code": "index_primary_key_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists" @@ -106,15 +110,16 @@ async fn error_update_existing_primary_key() { #[actix_rt::test] async fn error_update_unexisting_index() { - let server = Server::new().await; - let (task, code) = server.index("test").update(None).await; + let server = Server::new_shared(); + let index = server.unique_index(); + let (task, code) = index.update(None).await; assert_eq!(code, 202); - let response = server.index("test").wait_task(task.uid()).await; + let response = index.wait_task(task.uid()).await.failed(); let expected_response = json!({ - "message": "Index `test` not found.", + "message": format!("Index `{}` not found.", index.uid), "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" From f77abc9dc833a669eb110a3d5fa38f5d6c437fc5 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Tue, 20 May 2025 11:49:27 +0300 Subject: [PATCH 063/131] Update crates/meilisearch/tests/index/update_index.rs Co-authored-by: Tamo --- crates/meilisearch/tests/index/update_index.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index 8880314d4..bc432fbd9 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -17,9 +17,6 @@ async fn update_primary_key() { let (task, _status_code) = index.update(Some("primary")).await; let response = index.wait_task(task.uid()).await.succeeded(); - - assert_eq!(response["status"], "succeeded"); - let (response, code) = index.get().await; assert_eq!(code, 200); From 025df397c037c0e02bbd2e76f5eddac6c1f7576b Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Tue, 20 May 2025 11:49:38 +0300 Subject: [PATCH 064/131] Update crates/meilisearch/tests/index/update_index.rs Co-authored-by: Tamo --- crates/meilisearch/tests/index/update_index.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index bc432fbd9..7f8325020 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -47,9 +47,7 @@ async fn create_and_update_with_different_encoding() { let index = index.with_encoder(Encoder::Brotli); let (task, _status_code) = index.update(Some("primary")).await; - let response = index.wait_task(task.uid()).await.succeeded(); - - assert_eq!(response["status"], "succeeded"); + index.wait_task(task.uid()).await.succeeded(); } #[actix_rt::test] From 65354b414a6af43e06f53cb67677c21d4163cabe Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Tue, 20 May 2025 11:49:45 +0300 Subject: [PATCH 065/131] Update crates/meilisearch/tests/index/update_index.rs Co-authored-by: Tamo --- crates/meilisearch/tests/index/update_index.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index 7f8325020..dd7ab9df7 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -64,9 +64,7 @@ async fn update_nothing() { assert_eq!(code, 202); - let response = index.wait_task(task2.uid()).await.succeeded(); - - assert_eq!(response["status"], "succeeded"); + index.wait_task(task2.uid()).await.succeeded(); } #[actix_rt::test] From f87e46cc16fa17f0d9d09150c31e8578b840917c Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 20 May 2025 13:43:48 +0300 Subject: [PATCH 066/131] Ignore the result from #wait_task() Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/update_index.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index dd7ab9df7..c32828c9e 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -15,8 +15,8 @@ async fn update_primary_key() { index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = index.update(Some("primary")).await; - - let response = index.wait_task(task.uid()).await.succeeded(); + index.wait_task(task.uid()).await.succeeded(); + let (response, code) = index.get().await; assert_eq!(code, 200); From b658e38acdbc3c2b52b4eebcffc278b0af605c6e Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 20 May 2025 13:53:51 +0300 Subject: [PATCH 067/131] Fix formatting Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/index/update_index.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index c32828c9e..a17e7c63f 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -16,7 +16,7 @@ async fn update_primary_key() { let (task, _status_code) = index.update(Some("primary")).await; index.wait_task(task.uid()).await.succeeded(); - + let (response, code) = index.get().await; assert_eq!(code, 200); From b4ca0a8c985b07161550a36fe012f834f961aa00 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 23 May 2025 23:44:07 +0300 Subject: [PATCH 068/131] Update the tests related to updating indices Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/index.rs | 22 ++++++++++++++++ .../meilisearch/tests/index/update_index.rs | 26 ++++--------------- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/crates/meilisearch/tests/common/index.rs b/crates/meilisearch/tests/common/index.rs index 24db6776b..e324d2ff5 100644 --- a/crates/meilisearch/tests/common/index.rs +++ b/crates/meilisearch/tests/common/index.rs @@ -294,6 +294,20 @@ impl Index<'_, Shared> { } (task, code) } + + pub async fn update_index_fail(&self, primary_key: Option<&str>) -> (Value, StatusCode) { + let (mut task, code) = self._update(primary_key).await; + if code.is_success() { + task = self.wait_task(task.uid()).await; + if task.is_success() { + panic!( + "`update_index_fail` succeeded: {}", + serde_json::to_string_pretty(&task).unwrap() + ); + } + } + (task, code) + } } #[allow(dead_code)] @@ -337,6 +351,14 @@ impl Index<'_, State> { self.service.post_encoded("/indexes", body, self.encoder).await } + pub(super) async fn _update(&self, primary_key: Option<&str>) -> (Value, StatusCode) { + let body = json!({ + "primaryKey": primary_key, + }); + let url = format!("/indexes/{}", urlencode(self.uid.as_ref())); + self.service.patch_encoded(url, body, self.encoder).await + } + pub(super) async fn _delete(&self) -> (Value, StatusCode) { let url = format!("/indexes/{}", urlencode(self.uid.as_ref())); self.service.delete(url).await diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index a17e7c63f..291700728 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -2,7 +2,7 @@ use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; use crate::common::encoder::Encoder; -use crate::common::Server; +use crate::common::{shared_does_not_exists_index, shared_index_with_documents, Server}; use crate::json; #[actix_rt::test] @@ -69,24 +69,9 @@ async fn update_nothing() { #[actix_rt::test] async fn error_update_existing_primary_key() { - let server = Server::new_shared(); - let index = server.unique_index(); - let (create_task, code) = index.create(Some("id")).await; + let index = shared_index_with_documents().await; - assert_eq!(code, 202); - index.wait_task(create_task.uid()).await.succeeded(); - - let documents = json!([ - { - "id": "11", - "content": "foobar" - } - ]); - let (add_docs_task, add_docs_status_code) = index.add_documents(documents, None).await; - assert_eq!(add_docs_status_code, 202); - index.wait_task(add_docs_task.uid()).await.succeeded(); - - let (update_task, code) = index.update(Some("primary")).await; + let (update_task, code) = index.update_index_fail(Some("primary")).await; assert_eq!(code, 202); let response = index.wait_task(update_task.uid()).await.failed(); @@ -103,9 +88,8 @@ async fn error_update_existing_primary_key() { #[actix_rt::test] async fn error_update_unexisting_index() { - let server = Server::new_shared(); - let index = server.unique_index(); - let (task, code) = index.update(None).await; + let index = shared_does_not_exists_index().await; + let (task, code) = index.update_index_fail(Some("my-primary-key")).await; assert_eq!(code, 202); From 44f812c36d958a3fd2477a943b649064e6fe8512 Mon Sep 17 00:00:00 2001 From: Nate Nethercott <53127799+nnethercott@users.noreply.github.com> Date: Wed, 28 May 2025 15:38:12 +0200 Subject: [PATCH 069/131] Update crates/milli/src/update/new/extract/faceted/extract_facets.rs Co-authored-by: Many the fish --- crates/milli/src/update/new/extract/faceted/extract_facets.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index a1d9e6553..9e5120bba 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -194,7 +194,7 @@ impl FacetedDocidsExtractor { } // 2. Maybe update geo - if is_geo_enabled && has_changed_for_geo_fields { + if is_geo_enabled && inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)? { extract_geo_document( inner.current(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), From b06cc1e0a2bba4f89fab8e80fd6e7e08a5670651 Mon Sep 17 00:00:00 2001 From: Nate Nethercott <53127799+nnethercott@users.noreply.github.com> Date: Wed, 28 May 2025 15:38:23 +0200 Subject: [PATCH 070/131] Update crates/milli/src/update/new/extract/faceted/extract_facets.rs Co-authored-by: Many the fish --- crates/milli/src/update/new/extract/faceted/extract_facets.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index 9e5120bba..2160c16eb 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -91,7 +91,8 @@ impl FacetedDocidsExtractor { let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc); let docid = document_change.docid(); - // Macro expanding to an insertion/deletion facet fn + // Macro expanding to an insertion/deletion facet fn, + // using a macro avoid to borrow the parameters as mutable in both closures at the same time by postponing their creation macro_rules! facet_fn { (del) => { |fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| { From 1811168b965fbfe695bbd5bfec4904d6bc96aab6 Mon Sep 17 00:00:00 2001 From: nnethercott Date: Wed, 28 May 2025 15:45:13 +0200 Subject: [PATCH 071/131] remove duplicated check on geo field changes --- .../src/update/new/extract/faceted/extract_facets.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/milli/src/update/new/extract/faceted/extract_facets.rs b/crates/milli/src/update/new/extract/faceted/extract_facets.rs index 2160c16eb..517ef3f2d 100644 --- a/crates/milli/src/update/new/extract/faceted/extract_facets.rs +++ b/crates/milli/src/update/new/extract/faceted/extract_facets.rs @@ -91,8 +91,8 @@ impl FacetedDocidsExtractor { let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc); let docid = document_change.docid(); - // Macro expanding to an insertion/deletion facet fn, - // using a macro avoid to borrow the parameters as mutable in both closures at the same time by postponing their creation + // Using a macro avoid borrowing the parameters as mutable in both closures at + // the same time by postponing their creation macro_rules! facet_fn { (del) => { |fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| { @@ -168,8 +168,6 @@ impl FacetedDocidsExtractor { index, context.db_fields_ids_map, )?; - let has_changed_for_geo_fields = - inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?; // 1. Maybe update doc if has_changed_for_facets { @@ -195,7 +193,9 @@ impl FacetedDocidsExtractor { } // 2. Maybe update geo - if is_geo_enabled && inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)? { + if is_geo_enabled + && inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)? + { extract_geo_document( inner.current(rtxn, index, context.db_fields_ids_map)?, inner.external_document_id(), From 3c13feebf7a193a99bf87ce6cc16fbf1c686cf2b Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Wed, 28 May 2025 17:44:38 +0200 Subject: [PATCH 072/131] Test that distinct is applied for hybrid search --- crates/meilisearch/tests/search/hybrid.rs | 86 +++++++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/crates/meilisearch/tests/search/hybrid.rs b/crates/meilisearch/tests/search/hybrid.rs index 3282a357a..3a8fb2b4c 100644 --- a/crates/meilisearch/tests/search/hybrid.rs +++ b/crates/meilisearch/tests/search/hybrid.rs @@ -76,6 +76,48 @@ static SINGLE_DOCUMENT_VEC: Lazy = Lazy::new(|| { }]) }); +static TEST_DISTINCT_DOCUMENTS: Lazy = Lazy::new(|| { + // for query "Captain Marvel" and vector [1.0, 1.0] + json!([ + { + "id": 0, + "search": "Captain Planet", + "desc": "#2 for keyword search, #3 for hybrid search", + "_vectors": { + "default": [-1.0, 0.0], + }, + "distinct": 0 + }, + { + "id": 1, + "search": "Captain Marvel", + "desc": "#1 for keyword search, #4 for hybrid search", + "_vectors": { + "default": [-1.0, -1.0], + }, + "distinct": 1 + }, + { + "id": 2, + "search": "Some Captain at least", + "desc": "#3 for keyword search, #1 for hybrid search", + "_vectors": { + "default": [1.0, 1.0], + }, + "distinct": 0 + }, + { + "id": 3, + "search": "Irrelevant Capitaine", + "desc": "#4 for keyword search, #2 for hybrid search", + "_vectors": { + "default": [1.0, 0.0], + }, + "distinct": 1 + }, + ]) +}); + static SIMPLE_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { json!([ { @@ -493,6 +535,50 @@ async fn query_combination() { snapshot!(response["semanticHitCount"], @"0"); } +// see +#[actix_rt::test] +async fn distinct_is_applied() { + let server = Server::new().await; + let index = index_with_documents_user_provided(&server, &TEST_DISTINCT_DOCUMENTS).await; + + let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await; + assert_eq!(202, code, "{:?}", response); + index.wait_task(response.uid()).await.succeeded(); + + // pure keyword + let (response, code) = index + .search_post( + json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.0, "embedder": "default"}}), + ) + .await; + snapshot!(code, @"200 OK"); + snapshot!(response["hits"], @r###"[{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1},{"id":0,"search":"Captain Planet","desc":"#2 for keyword search, #3 for hybrid search","distinct":0}]"###); + snapshot!(response["semanticHitCount"], @"null"); + snapshot!(response["estimatedTotalHits"], @"2"); + + // pure semantic + let (response, code) = index + .search_post( + json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "default"}}), + ) + .await; + snapshot!(code, @"200 OK"); + snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":3,"search":"Irrelevant Capitaine","desc":"#4 for keyword search, #2 for hybrid search","distinct":1}]"###); + snapshot!(response["semanticHitCount"], @"2"); + snapshot!(response["estimatedTotalHits"], @"2"); + + // hybrid + let (response, code) = index + .search_post( + json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5, "embedder": "default"}}), + ) + .await; + snapshot!(code, @"200 OK"); + snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1}]"###); + snapshot!(response["semanticHitCount"], @"1"); + snapshot!(response["estimatedTotalHits"], @"2"); +} + #[actix_rt::test] async fn retrieve_vectors() { let server = Server::new().await; From fd4b192a39372320916f11fd9f5610b3647a6760 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Wed, 28 May 2025 17:57:31 +0200 Subject: [PATCH 073/131] Add distinct_fid function and expose distinct_single_docid --- crates/milli/src/search/new/bucket_sort.rs | 15 ++++----------- crates/milli/src/search/new/distinct.rs | 17 ++++++++++++++++- crates/milli/src/search/new/mod.rs | 4 ++-- 3 files changed, 22 insertions(+), 14 deletions(-) diff --git a/crates/milli/src/search/new/bucket_sort.rs b/crates/milli/src/search/new/bucket_sort.rs index ca7a4a986..3c26cad5c 100644 --- a/crates/milli/src/search/new/bucket_sort.rs +++ b/crates/milli/src/search/new/bucket_sort.rs @@ -4,7 +4,9 @@ use super::logger::SearchLogger; use super::ranking_rules::{BoxRankingRule, RankingRuleQueryTrait}; use super::SearchContext; use crate::score_details::{ScoreDetails, ScoringStrategy}; -use crate::search::new::distinct::{apply_distinct_rule, distinct_single_docid, DistinctOutput}; +use crate::search::new::distinct::{ + apply_distinct_rule, distinct_fid, distinct_single_docid, DistinctOutput, +}; use crate::{Result, TimeBudget}; pub struct BucketSortOutput { @@ -35,16 +37,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( logger.ranking_rules(&ranking_rules); logger.initial_universe(universe); - let distinct_field = match distinct { - Some(distinct) => Some(distinct), - None => ctx.index.distinct_field(ctx.txn)?, - }; - - let distinct_fid = if let Some(field) = distinct_field { - ctx.index.fields_ids_map(ctx.txn)?.id(field) - } else { - None - }; + let distinct_fid = distinct_fid(distinct, ctx.index, ctx.txn)?; if universe.len() < from as u64 { return Ok(BucketSortOutput { diff --git a/crates/milli/src/search/new/distinct.rs b/crates/milli/src/search/new/distinct.rs index 17859b6f8..36172302a 100644 --- a/crates/milli/src/search/new/distinct.rs +++ b/crates/milli/src/search/new/distinct.rs @@ -9,7 +9,7 @@ use crate::heed_codec::facet::{ FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec, FieldDocIdFacetCodec, }; use crate::heed_codec::BytesRefCodec; -use crate::{Index, Result, SearchContext}; +use crate::{FieldId, Index, Result, SearchContext}; pub struct DistinctOutput { pub remaining: RoaringBitmap, @@ -121,3 +121,18 @@ pub fn facet_string_values<'a>( fn facet_values_prefix_key(distinct: u16, id: u32) -> [u8; FID_SIZE + DOCID_SIZE] { concat_arrays::concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes()) } + +pub fn distinct_fid( + query_distinct_field: Option<&str>, + index: &Index, + rtxn: &RoTxn<'_>, +) -> Result> { + let distinct_field = match query_distinct_field { + Some(distinct) => Some(distinct), + None => index.distinct_field(rtxn)?, + }; + + let distinct_fid = + if let Some(field) = distinct_field { index.fields_ids_map(rtxn)?.id(field) } else { None }; + Ok(distinct_fid) +} diff --git a/crates/milli/src/search/new/mod.rs b/crates/milli/src/search/new/mod.rs index 0a3bc1b04..a65b4076b 100644 --- a/crates/milli/src/search/new/mod.rs +++ b/crates/milli/src/search/new/mod.rs @@ -28,6 +28,7 @@ use std::time::Duration; use bucket_sort::{bucket_sort, BucketSortOutput}; use charabia::{Language, TokenizerBuilder}; use db_cache::DatabaseCache; +pub use distinct::{distinct_fid, distinct_single_docid}; use exact_attribute::ExactAttribute; use graph_based_ranking_rule::{Exactness, Fid, Position, Proximity, Typo}; use heed::RoTxn; @@ -47,8 +48,7 @@ use sort::Sort; use self::distinct::facet_string_values; use self::geo_sort::GeoSort; -pub use self::geo_sort::Parameter as GeoSortParameter; -pub use self::geo_sort::Strategy as GeoSortStrategy; +pub use self::geo_sort::{Parameter as GeoSortParameter, Strategy as GeoSortStrategy}; use self::graph_based_ranking_rule::Words; use self::interner::Interned; use self::vector_sort::VectorSort; From 54f5e74744205f467ddbaa229adc8844c5767f44 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Wed, 28 May 2025 17:58:02 +0200 Subject: [PATCH 074/131] Support distinct in hybrid search --- crates/milli/src/search/hybrid.rs | 69 ++++++++++++++++++++++++++----- 1 file changed, 58 insertions(+), 11 deletions(-) diff --git a/crates/milli/src/search/hybrid.rs b/crates/milli/src/search/hybrid.rs index e07f886c9..b63f6288f 100644 --- a/crates/milli/src/search/hybrid.rs +++ b/crates/milli/src/search/hybrid.rs @@ -1,11 +1,13 @@ use std::cmp::Ordering; +use heed::RoTxn; use itertools::Itertools; use roaring::RoaringBitmap; use crate::score_details::{ScoreDetails, ScoreValue, ScoringStrategy}; +use crate::search::new::{distinct_fid, distinct_single_docid}; use crate::search::SemanticSearch; -use crate::{MatchingWords, Result, Search, SearchResult}; +use crate::{Index, MatchingWords, Result, Search, SearchResult}; struct ScoreWithRatioResult { matching_words: MatchingWords, @@ -91,7 +93,10 @@ impl ScoreWithRatioResult { keyword_results: Self, from: usize, length: usize, - ) -> (SearchResult, u32) { + distinct: Option<&str>, + index: &Index, + rtxn: &RoTxn<'_>, + ) -> Result<(SearchResult, u32)> { #[derive(Clone, Copy)] enum ResultSource { Semantic, @@ -106,8 +111,9 @@ impl ScoreWithRatioResult { vector_results.document_scores.len() + keyword_results.document_scores.len(), ); - let mut documents_seen = RoaringBitmap::new(); - for ((docid, (main_score, _sub_score)), source) in vector_results + let distinct_fid = distinct_fid(distinct, index, rtxn)?; + let mut excluded_documents = RoaringBitmap::new(); + for res in vector_results .document_scores .into_iter() .zip(std::iter::repeat(ResultSource::Semantic)) @@ -121,13 +127,33 @@ impl ScoreWithRatioResult { compare_scores(left, right).is_ge() }, ) - // remove documents we already saw - .filter(|((docid, _), _)| documents_seen.insert(*docid)) + // remove documents we already saw and apply distinct rule + .filter_map(|item @ ((docid, _), _)| { + if !excluded_documents.insert(docid) { + // the document was already added, or is indistinct from an already-added document. + return None; + } + + if let Some(distinct_fid) = distinct_fid { + if let Err(error) = distinct_single_docid( + index, + rtxn, + distinct_fid, + docid, + &mut excluded_documents, + ) { + return Some(Err(error)); + } + } + + Some(Ok(item)) + }) // start skipping **after** the filter .skip(from) // take **after** skipping .take(length) { + let ((docid, (main_score, _sub_score)), source) = res?; if let ResultSource::Semantic = source { semantic_hit_count += 1; } @@ -136,10 +162,24 @@ impl ScoreWithRatioResult { document_scores.push(main_score); } - ( + // compute the set of candidates from both sets + let candidates = vector_results.candidates | keyword_results.candidates; + let must_remove_redundant_candidates = distinct_fid.is_some(); + let candidates = if must_remove_redundant_candidates { + // patch-up the candidates to remove the indistinct documents, then add back the actual hits + let mut candidates = candidates - excluded_documents; + for docid in &documents_ids { + candidates.insert(*docid); + } + candidates + } else { + candidates + }; + + Ok(( SearchResult { matching_words: keyword_results.matching_words, - candidates: vector_results.candidates | keyword_results.candidates, + candidates, documents_ids, document_scores, degraded: vector_results.degraded | keyword_results.degraded, @@ -147,7 +187,7 @@ impl ScoreWithRatioResult { | keyword_results.used_negative_operator, }, semantic_hit_count, - ) + )) } } @@ -226,8 +266,15 @@ impl Search<'_> { let keyword_results = ScoreWithRatioResult::new(keyword_results, 1.0 - semantic_ratio); let vector_results = ScoreWithRatioResult::new(vector_results, semantic_ratio); - let (merge_results, semantic_hit_count) = - ScoreWithRatioResult::merge(vector_results, keyword_results, self.offset, self.limit); + let (merge_results, semantic_hit_count) = ScoreWithRatioResult::merge( + vector_results, + keyword_results, + self.offset, + self.limit, + search.distinct.as_deref(), + search.index, + search.rtxn, + )?; assert!(merge_results.documents_ids.len() <= self.limit); Ok((merge_results, Some(semantic_hit_count))) } From d329e8625064e12c548250b498a9aa720de4470a Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 29 May 2025 08:42:10 +0300 Subject: [PATCH 075/131] tests: Use shared server + unique server where possible Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/errors.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/crates/meilisearch/tests/search/errors.rs b/crates/meilisearch/tests/search/errors.rs index 5e54bcff1..ba7853d11 100644 --- a/crates/meilisearch/tests/search/errors.rs +++ b/crates/meilisearch/tests/search/errors.rs @@ -1196,10 +1196,8 @@ async fn search_on_unknown_field_plus_joker() { #[actix_rt::test] async fn distinct_at_search_time() { - let server = Server::new().await; - let index = server.index("test"); - let (task, _) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, _code) = index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await; index.wait_task(response.uid()).await.succeeded(); @@ -1209,7 +1207,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.", + "message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" @@ -1224,7 +1222,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.", + "message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" @@ -1239,7 +1237,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.", + "message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" From 8b8b0d802c31016e6d02645ed1bdccd584dd906d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 29 May 2025 08:53:10 +0300 Subject: [PATCH 076/131] tests: Faster search::facet_search IT tests Use shared server + unique indices where possible. Assert .succeeded() for the waited tasks. Drop usage of dbg!() in the assertions. It caused noise in the logs Signed-off-by: Martin Tzvetanov Grigorov --- .../meilisearch/tests/search/facet_search.rs | 168 +++++++++--------- 1 file changed, 84 insertions(+), 84 deletions(-) diff --git a/crates/meilisearch/tests/search/facet_search.rs b/crates/meilisearch/tests/search/facet_search.rs index 65e204702..87af125f0 100644 --- a/crates/meilisearch/tests/search/facet_search.rs +++ b/crates/meilisearch/tests/search/facet_search.rs @@ -50,13 +50,13 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = index.add_documents(documents.clone(), None).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + let response = index.wait_task(task.uid()).await.succeeded(); + assert!(response.is_success(), "{response:?}"); let (task, code) = index.update_settings(settings.clone()).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + let response = index.wait_task(task.uid()).await.succeeded(); + assert!(response.is_success(), "{response:?}"); let (response, code) = index.facet_search(query.clone()).await; insta::allow_duplicates! { @@ -65,21 +65,21 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = server.delete_index("test").await; assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + let response = server.wait_task(task.uid()).await.succeeded(); + assert!(response.is_success(), "{response:?}"); eprintln!("Settings -> Documents -> test"); let index = server.index("test"); let (task, code) = index.update_settings(settings.clone()).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + let response = index.wait_task(task.uid()).await.succeeded(); + assert!(response.is_success(), "{response:?}"); let (task, code) = index.add_documents(documents.clone(), None).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + let response = index.wait_task(task.uid()).await.succeeded(); + assert!(response.is_success(), "{response:?}"); let (response, code) = index.facet_search(query.clone()).await; insta::allow_duplicates! { @@ -88,14 +88,14 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = server.delete_index("test").await; assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await; - assert!(response.is_success(), "{:?}", response); + let response = server.wait_task(task.uid()).await.succeeded(); + assert!(response.is_success(), "{response:?}"); } #[actix_rt::test] async fn simple_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["genres"])).await; @@ -105,20 +105,20 @@ async fn simple_facet_search() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 2); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "adventure"})).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["facetHits"].as_array().unwrap().len(), 1); } #[actix_rt::test] async fn simple_facet_search_on_movies() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -212,23 +212,23 @@ async fn simple_facet_search_on_movies() { ]); let (response, code) = index.update_settings_filterable_attributes(json!(["genres", "color"])).await; - assert_eq!(202, code, "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!(202, code, "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(response["facetHits"], @r###"[{"value":"Action","count":2},{"value":"Adventure","count":3},{"value":"Drama","count":3},{"value":"Fantasy","count":1},{"value":"Romance","count":1},{"value":"Science Fiction","count":1}]"###); } #[actix_rt::test] async fn advanced_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["genres"])).await; @@ -251,8 +251,8 @@ async fn advanced_facet_search() { #[actix_rt::test] async fn more_advanced_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["genres"])).await; @@ -275,8 +275,8 @@ async fn more_advanced_facet_search() { #[actix_rt::test] async fn simple_facet_search_with_max_values() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await; @@ -287,14 +287,14 @@ async fn simple_facet_search_with_max_values() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 1); } #[actix_rt::test] async fn simple_facet_search_by_count_with_max_values() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index @@ -309,14 +309,14 @@ async fn simple_facet_search_by_count_with_max_values() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 1); } #[actix_rt::test] async fn non_filterable_facet_search_error() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; @@ -324,17 +324,17 @@ async fn non_filterable_facet_search_error() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "adv"})).await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); } #[actix_rt::test] async fn facet_search_dont_support_words() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["genres"])).await; @@ -344,14 +344,14 @@ async fn facet_search_dont_support_words() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["facetHits"].as_array().unwrap().len(), 0); } #[actix_rt::test] async fn simple_facet_search_with_sort_by_count() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await; @@ -362,7 +362,7 @@ async fn simple_facet_search_with_sort_by_count() { let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); let hits = response["facetHits"].as_array().unwrap(); assert_eq!(hits.len(), 2); assert_eq!(hits[0], json!({ "value": "Action", "count": 3 })); @@ -371,25 +371,25 @@ async fn simple_facet_search_with_sort_by_count() { #[actix_rt::test] async fn add_documents_and_deactivate_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index .update_settings(json!({ "facetSearch": false, "filterableAttributes": ["genres"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); snapshot!(response, @r###" { "message": "The facet search is disabled for this index", @@ -402,8 +402,8 @@ async fn add_documents_and_deactivate_facet_search() { #[actix_rt::test] async fn deactivate_facet_search_and_add_documents() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -411,16 +411,16 @@ async fn deactivate_facet_search_and_add_documents() { "filterableAttributes": ["genres"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); snapshot!(response, @r###" { "message": "The facet search is disabled for this index", @@ -433,8 +433,8 @@ async fn deactivate_facet_search_and_add_documents() { #[actix_rt::test] async fn deactivate_facet_search_add_documents_and_activate_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -442,31 +442,31 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() { "filterableAttributes": ["genres"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index .update_settings(json!({ "facetSearch": true, })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn deactivate_facet_search_add_documents_and_reset_facet_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -474,25 +474,25 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() { "filterableAttributes": ["genres"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index .update_settings(json!({ "facetSearch": serde_json::Value::Null, })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!("202", code.as_str(), "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2); + assert_eq!(code, 200, "{response}"); + assert_eq!(response["facetHits"].as_array().unwrap().len(), 2); } #[actix_rt::test] @@ -618,8 +618,8 @@ async fn facet_search_with_filterable_attributes_rules_errors() { #[actix_rt::test] async fn distinct_facet_search_on_movies() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -925,26 +925,26 @@ async fn distinct_facet_search_on_movies() { ]); let (response, code) = index.update_settings_filterable_attributes(json!(["genres", "color"])).await; - assert_eq!(202, code, "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!(202, code, "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.update_settings_distinct_attribute(json!("color")).await; - assert_eq!(202, code, "{:?}", response); - index.wait_task(response.uid()).await; + assert_eq!(202, code, "{response:?}"); + index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index.add_documents(documents, None).await; - index.wait_task(response.uid()).await; + index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await; // non-exhaustive facet count is counting 27 documents with the facet query "blob" but there are only 23 documents with a distinct color. - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":27}]"###); let (response, code) = index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "", "exhaustiveFacetCount": true })).await; // exhaustive facet count is counting 23 documents with the facet query "blob" which is the number of distinct colors. - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":23}]"###); } From bd95503eba5aa79a0b32c2fb035c4cc136dd1855 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 29 May 2025 09:03:23 +0300 Subject: [PATCH 077/131] tests: Faster search::locales IT tests Use a shared server + unique indices where possible Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/locales.rs | 82 +++++++++++----------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/crates/meilisearch/tests/search/locales.rs b/crates/meilisearch/tests/search/locales.rs index 282589d6a..aaa65b7ea 100644 --- a/crates/meilisearch/tests/search/locales.rs +++ b/crates/meilisearch/tests/search/locales.rs @@ -89,9 +89,9 @@ static DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn simple_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); index .update_settings( @@ -196,9 +196,9 @@ async fn simple_search() { #[actix_rt::test] async fn force_locales() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -214,7 +214,7 @@ async fn force_locales() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -274,9 +274,9 @@ async fn force_locales() { #[actix_rt::test] async fn force_locales_with_pattern() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -292,7 +292,7 @@ async fn force_locales_with_pattern() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -352,9 +352,9 @@ async fn force_locales_with_pattern() { #[actix_rt::test] async fn force_locales_with_pattern_nested() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = NESTED_DOCUMENTS.clone(); let (response, _) = index .update_settings(json!({ @@ -368,7 +368,7 @@ async fn force_locales_with_pattern_nested() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -423,9 +423,9 @@ async fn force_locales_with_pattern_nested() { } #[actix_rt::test] async fn force_different_locales_with_pattern() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -443,7 +443,7 @@ async fn force_different_locales_with_pattern() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -499,9 +499,9 @@ async fn force_different_locales_with_pattern() { #[actix_rt::test] async fn auto_infer_locales_at_search_with_attributes_to_search_on() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -521,7 +521,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -577,9 +577,9 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() { #[actix_rt::test] async fn auto_infer_locales_at_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings( @@ -595,7 +595,7 @@ async fn auto_infer_locales_at_search() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -676,9 +676,9 @@ async fn auto_infer_locales_at_search() { #[actix_rt::test] async fn force_different_locales_with_pattern_nested() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = NESTED_DOCUMENTS.clone(); let (response, _) = index .update_settings(json!({ @@ -694,7 +694,7 @@ async fn force_different_locales_with_pattern_nested() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -774,9 +774,9 @@ async fn force_different_locales_with_pattern_nested() { #[actix_rt::test] async fn settings_change() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = NESTED_DOCUMENTS.clone(); let (task, _status_code) = index.add_documents(documents, None).await; index.wait_task(task.uid()).await.succeeded(); @@ -792,7 +792,7 @@ async fn settings_change() { snapshot!(response, @r###" { "taskUid": 1, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -855,7 +855,7 @@ async fn settings_change() { snapshot!(response, @r###" { "taskUid": 2, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -906,9 +906,9 @@ async fn settings_change() { #[actix_rt::test] async fn invalid_locales() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); index .update_settings( @@ -945,9 +945,9 @@ async fn invalid_locales() { #[actix_rt::test] async fn invalid_localized_attributes_rules() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let (response, _) = index .update_settings(json!({ "localizedAttributes": [ @@ -1015,9 +1015,9 @@ async fn invalid_localized_attributes_rules() { #[actix_rt::test] async fn simple_facet_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings(json!({ @@ -1027,7 +1027,7 @@ async fn simple_facet_search() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -1073,9 +1073,9 @@ async fn simple_facet_search() { #[actix_rt::test] async fn facet_search_with_localized_attributes() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = DOCUMENTS.clone(); let (response, _) = index .update_settings(json!({ @@ -1089,7 +1089,7 @@ async fn facet_search_with_localized_attributes() { snapshot!(response, @r###" { "taskUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -1146,9 +1146,9 @@ async fn facet_search_with_localized_attributes() { #[actix_rt::test] async fn swedish_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = json!([ {"id": "tra1-1", "product": "trä"}, {"id": "tra2-1", "product": "traktor"}, @@ -1269,9 +1269,9 @@ async fn swedish_search() { #[actix_rt::test] async fn german_search() { - let server = Server::new().await; + let server = Server::new_shared(); + let index = server.unique_index(); - let index = server.index("test"); let documents = json!([ {"id": 1, "product": "Interkulturalität"}, {"id": 2, "product": "Wissensorganisation"}, From 17207b54057adae3afc4908ed164e12529616495 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 29 May 2025 09:09:02 +0300 Subject: [PATCH 078/131] tests: Faster search::matching_strategy IT tests Use shared server + unique indices for all tests Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- .../meilisearch/tests/search/matching_strategy.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/meilisearch/tests/search/matching_strategy.rs b/crates/meilisearch/tests/search/matching_strategy.rs index 3b4325c10..7131ab9cc 100644 --- a/crates/meilisearch/tests/search/matching_strategy.rs +++ b/crates/meilisearch/tests/search/matching_strategy.rs @@ -2,11 +2,11 @@ use meili_snap::snapshot; use once_cell::sync::Lazy; use crate::common::index::Index; -use crate::common::{Server, Value}; +use crate::common::{Server, Shared, Value}; use crate::json; -async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> { - let index = server.index("test"); +async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> { + let index = server.unique_index(); let (task, _status_code) = index.add_documents(documents.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -48,8 +48,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn simple_search() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search(json!({"q": "Captain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| { @@ -75,7 +75,7 @@ async fn simple_search() { #[actix_rt::test] async fn search_with_typo() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; index @@ -102,7 +102,7 @@ async fn search_with_typo() { #[actix_rt::test] async fn search_with_unknown_word() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; index From fb8b8321922e40b2cc647bfe71e813cc0c8a40f6 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 29 May 2025 10:54:31 +0300 Subject: [PATCH 079/131] Trigger build Signed-off-by: Martin Tzvetanov Grigorov From 58207da9346f2cf965927d5b67c495fada9e3aa8 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 29 May 2025 10:56:33 +0300 Subject: [PATCH 080/131] Trigger build Signed-off-by: Martin Tzvetanov Grigorov From 27e7c00622860149698dc43ba514df959608e85b Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 29 May 2025 11:33:10 +0300 Subject: [PATCH 081/131] Add dynamic redactions for taskUid and enqueuedAt properties Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/locales.rs | 44 +++++++++++----------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/crates/meilisearch/tests/search/locales.rs b/crates/meilisearch/tests/search/locales.rs index aaa65b7ea..b1c9b2bc2 100644 --- a/crates/meilisearch/tests/search/locales.rs +++ b/crates/meilisearch/tests/search/locales.rs @@ -211,9 +211,9 @@ async fn force_locales() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -289,9 +289,9 @@ async fn force_locales_with_pattern() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -365,9 +365,9 @@ async fn force_locales_with_pattern_nested() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -440,9 +440,9 @@ async fn force_different_locales_with_pattern() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -518,9 +518,9 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -592,9 +592,9 @@ async fn auto_infer_locales_at_search() { }), ) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -691,9 +691,9 @@ async fn force_different_locales_with_pattern_nested() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -789,9 +789,9 @@ async fn settings_change() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 1, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -852,9 +852,9 @@ async fn settings_change() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 2, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -1024,9 +1024,9 @@ async fn simple_facet_search() { "filterableAttributes": ["name_en", "name_ja", "name_zh"], })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", @@ -1086,9 +1086,9 @@ async fn facet_search_with_localized_attributes() { ] })) .await; - snapshot!(response, @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 0, + "taskUid": "[task_uid]", "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", From 308fd7128e60907b7ce30364993fb6165ad622c0 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 29 May 2025 11:36:56 +0300 Subject: [PATCH 082/131] Fix clippy errors Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/matching_strategy.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/search/matching_strategy.rs b/crates/meilisearch/tests/search/matching_strategy.rs index 7131ab9cc..ece320b2a 100644 --- a/crates/meilisearch/tests/search/matching_strategy.rs +++ b/crates/meilisearch/tests/search/matching_strategy.rs @@ -76,7 +76,7 @@ async fn simple_search() { #[actix_rt::test] async fn search_with_typo() { let server = Server::new_shared(); - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search(json!({"q": "Capitain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| { @@ -103,7 +103,7 @@ async fn search_with_typo() { #[actix_rt::test] async fn search_with_unknown_word() { let server = Server::new_shared(); - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search(json!({"q": "Captain Supercopter Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| { From ae9899f179cc2a75794457f729647aa69089f499 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 30 May 2025 15:26:55 +0300 Subject: [PATCH 083/131] tests: search::pagination IT tests Minor cleanup. Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/pagination.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/meilisearch/tests/search/pagination.rs b/crates/meilisearch/tests/search/pagination.rs index ff601bd5b..f8b698a95 100644 --- a/crates/meilisearch/tests/search/pagination.rs +++ b/crates/meilisearch/tests/search/pagination.rs @@ -7,7 +7,7 @@ async fn default_search_should_return_estimated_total_hit() { let index = shared_index_with_documents().await; index .search(json!({}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert!(response.get("estimatedTotalHits").is_some()); assert!(response.get("limit").is_some()); assert!(response.get("offset").is_some()); @@ -25,7 +25,7 @@ async fn simple_search() { let index = shared_index_with_documents().await; index .search(json!({"page": 1}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 5); assert!(response.get("totalHits").is_some()); assert_eq!(response["page"], 1); @@ -44,7 +44,7 @@ async fn page_zero_should_not_return_any_result() { let index = shared_index_with_documents().await; index .search(json!({"page": 0}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 0); assert!(response.get("totalHits").is_some()); assert_eq!(response["page"], 0); @@ -58,7 +58,7 @@ async fn hits_per_page_1() { let index = shared_index_with_documents().await; index .search(json!({"hitsPerPage": 1}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); assert_eq!(response["totalHits"], 5); assert_eq!(response["page"], 1); @@ -72,7 +72,7 @@ async fn hits_per_page_0_should_not_return_any_result() { let index = shared_index_with_documents().await; index .search(json!({"hitsPerPage": 0}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 0); assert_eq!(response["totalHits"], 5); assert_eq!(response["page"], 1); @@ -126,7 +126,7 @@ async fn ensure_placeholder_search_hit_count_valid() { for page in 0..=4 { index .search(json!({"page": page, "hitsPerPage": 1}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["totalHits"], 4); assert_eq!(response["totalPages"], 4); }) From d9b0463a0b23e7a7a44bdddf065ad6a65675a542 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 30 May 2025 15:37:27 +0300 Subject: [PATCH 084/131] tests: Faster search::restricted_searchable IT tests Use shared server + unique indices Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/search/restrict_searchable.rs | 104 +++++++++--------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/crates/meilisearch/tests/search/restrict_searchable.rs b/crates/meilisearch/tests/search/restrict_searchable.rs index 8ef5db26d..e5408a210 100644 --- a/crates/meilisearch/tests/search/restrict_searchable.rs +++ b/crates/meilisearch/tests/search/restrict_searchable.rs @@ -2,11 +2,11 @@ use meili_snap::{json_string, snapshot}; use once_cell::sync::Lazy; use crate::common::index::Index; -use crate::common::{Server, Value}; +use crate::common::{Server, Shared, Value}; use crate::json; -async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> { - let index = server.index("test"); +async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> { + let index = server.unique_index(); let (task, _code) = index.add_documents(documents.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -34,8 +34,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn simple_search_on_title() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // simple search should return 2 documents (ids: 2 and 3). index @@ -51,8 +51,8 @@ async fn simple_search_on_title() { #[actix_rt::test] async fn search_no_searchable_attribute_set() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search( @@ -93,8 +93,8 @@ async fn search_no_searchable_attribute_set() { #[actix_rt::test] async fn search_on_all_attributes() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; index .search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| { @@ -106,8 +106,8 @@ async fn search_on_all_attributes() { #[actix_rt::test] async fn search_on_all_attributes_restricted_set() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await; index.wait_task(task.uid()).await.succeeded(); @@ -121,8 +121,8 @@ async fn search_on_all_attributes_restricted_set() { #[actix_rt::test] async fn simple_prefix_search_on_title() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // simple search should return 2 documents (ids: 2 and 3). index @@ -135,8 +135,8 @@ async fn simple_prefix_search_on_title() { #[actix_rt::test] async fn simple_search_on_title_matching_strategy_all() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // simple search matching strategy all should only return 1 document (ids: 2). index .search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["title"], "matchingStrategy": "all"}), |response, code| { @@ -148,8 +148,8 @@ async fn simple_search_on_title_matching_strategy_all() { #[actix_rt::test] async fn simple_search_on_no_field() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // simple search on no field shouldn't return any document. index .search(json!({"q": "Captain Marvel", "attributesToSearchOn": []}), |response, code| { @@ -161,8 +161,8 @@ async fn simple_search_on_no_field() { #[actix_rt::test] async fn word_ranking_rule_order() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; // Document 3 should appear before document 2. index @@ -189,8 +189,8 @@ async fn word_ranking_rule_order() { #[actix_rt::test] async fn word_ranking_rule_order_exact_words() { - let server = Server::new().await; - let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await; let (task, _status_code) = index .update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]})) .await; @@ -221,9 +221,9 @@ async fn word_ranking_rule_order_exact_words() { #[actix_rt::test] async fn typo_ranking_rule_order() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents( - &server, + server, &json!([ { "title": "Capitain Marivel", @@ -260,9 +260,9 @@ async fn typo_ranking_rule_order() { #[actix_rt::test] async fn attributes_ranking_rule_order() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents( - &server, + server, &json!([ { "title": "Captain Marvel", @@ -301,9 +301,9 @@ async fn attributes_ranking_rule_order() { #[actix_rt::test] async fn exactness_ranking_rule_order() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents( - &server, + server, &json!([ { "title": "Captain Marvel", @@ -340,9 +340,9 @@ async fn exactness_ranking_rule_order() { #[actix_rt::test] async fn search_on_exact_field() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents( - &server, + server, &json!([ { "title": "Captain Marvel", @@ -359,7 +359,7 @@ async fn search_on_exact_field() { let (response, code) = index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); // Searching on an exact attribute should only return the document matching without typo. index @@ -372,7 +372,7 @@ async fn search_on_exact_field() { #[actix_rt::test] async fn phrase_search_on_title() { - let server = Server::new().await; + let server = Server::new_shared(); let documents = json!([ { "id": 8, "desc": "Document Review", "title": "Document Review Specialist II" }, { "id": 5, "desc": "Document Review", "title": "Document Review Attorney" }, @@ -383,7 +383,7 @@ async fn phrase_search_on_title() { { "id": 7, "desc": "Document Review", "title": "Document Review Specialist II" }, { "id": 6, "desc": "Document Review", "title": "Document Review (Entry Level)" } ]); - let index = index_with_documents(&server, &documents).await; + let index = index_with_documents(server, &documents).await; index .search( @@ -460,8 +460,8 @@ static NESTED_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn nested_search_on_title_with_prefix_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; // Wildcard should match to 'details.' attribute index @@ -486,8 +486,8 @@ async fn nested_search_on_title_with_prefix_wildcard() { #[actix_rt::test] async fn nested_search_with_suffix_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; // Wildcard should match to any attribute inside 'details.' // It's worth noting the difference between 'details.*' and '*.title' @@ -553,8 +553,8 @@ async fn nested_search_with_suffix_wildcard() { #[actix_rt::test] async fn nested_search_on_title_restricted_set_with_suffix_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; let (task, _status_code) = index.update_settings_searchable_attributes(json!(["details.title"])).await; index.wait_task(task.uid()).await.succeeded(); @@ -581,8 +581,8 @@ async fn nested_search_on_title_restricted_set_with_suffix_wildcard() { #[actix_rt::test] async fn nested_search_no_searchable_attribute_set_with_any_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; index .search( @@ -632,8 +632,8 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() { #[actix_rt::test] async fn nested_prefix_search_on_title_with_prefix_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; // Nested prefix search with prefix wildcard should return 2 documents (ids: 2 and 3). index @@ -658,8 +658,8 @@ async fn nested_prefix_search_on_title_with_prefix_wildcard() { #[actix_rt::test] async fn nested_prefix_search_on_details_with_suffix_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; index .search( @@ -686,8 +686,8 @@ async fn nested_prefix_search_on_details_with_suffix_wildcard() { #[actix_rt::test] async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; // Wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3) index @@ -712,8 +712,8 @@ async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() { #[actix_rt::test] async fn nested_search_on_title_matching_strategy_all() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; // Nested search matching strategy all should only return 1 document (ids: 3) index @@ -735,8 +735,8 @@ async fn nested_search_on_title_matching_strategy_all() { #[actix_rt::test] async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; // Document 3 should appear before documents 1 and 2 index @@ -766,8 +766,8 @@ async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() { #[actix_rt::test] async fn nested_attributes_ranking_rule_order_with_suffix_wildcard() { - let server = Server::new().await; - let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await; // Document 3 should appear before documents 1 and 2 index From 1158d6689f0a787b4b171105bbaf3809fd771e9d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 30 May 2025 15:41:31 +0300 Subject: [PATCH 085/131] tests: Faster settings::distinct IT tests Use shared server + unique indices Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/settings/distinct.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/settings/distinct.rs b/crates/meilisearch/tests/settings/distinct.rs index 2c5b7517f..a3b1b5276 100644 --- a/crates/meilisearch/tests/settings/distinct.rs +++ b/crates/meilisearch/tests/settings/distinct.rs @@ -3,8 +3,8 @@ use crate::json; #[actix_rt::test] async fn set_and_reset_distinct_attribute() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await; index.wait_task(task1.uid()).await.succeeded(); @@ -24,8 +24,8 @@ async fn set_and_reset_distinct_attribute() { #[actix_rt::test] async fn set_and_reset_distinct_attribute_with_dedicated_route() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await; index.wait_task(update_task1.uid()).await.succeeded(); From 3ea167bade0103b837c713dd41f11b6301a6696a Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 30 May 2025 16:33:27 +0300 Subject: [PATCH 086/131] tests: Faster settings::get_settings IT tests Use shared server + unique indices Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/settings/get_settings.rs | 169 +++++++++--------- 1 file changed, 84 insertions(+), 85 deletions(-) diff --git a/crates/meilisearch/tests/settings/get_settings.rs b/crates/meilisearch/tests/settings/get_settings.rs index 5c0f89ed3..a52d4fa90 100644 --- a/crates/meilisearch/tests/settings/get_settings.rs +++ b/crates/meilisearch/tests/settings/get_settings.rs @@ -11,59 +11,62 @@ macro_rules! test_setting_routes { #[actix_rt::test] async fn get_unexisting_index() { - let server = Server::new().await; - let url = format!("/indexes/test/settings/{}", - stringify!($setting) - .chars() - .map(|c| if c == '_' { '-' } else { c }) - .collect::()); - let (_response, code) = server.service.get(url).await; - assert_eq!(code, 404); - } - - #[actix_rt::test] - async fn update_unexisting_index() { - let server = Server::new().await; - let url = format!("/indexes/test/settings/{}", - stringify!($setting) - .chars() - .map(|c| if c == '_' { '-' } else { c }) - .collect::()); - let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await; - assert_eq!(code, 202, "{}", response); - server.index("").wait_task(0).await; - let (response, code) = server.index("test").get().await; - assert_eq!(code, 200, "{}", response); - } - - #[actix_rt::test] - async fn delete_unexisting_index() { - let server = Server::new().await; - let url = format!("/indexes/test/settings/{}", - stringify!($setting) - .chars() - .map(|c| if c == '_' { '-' } else { c }) - .collect::()); - let (_, code) = server.service.delete(url).await; - assert_eq!(code, 202); - let response = server.index("").wait_task(0).await; - assert_eq!(response["status"], "failed"); - } - - #[actix_rt::test] - async fn get_default() { - let server = Server::new().await; - let index = server.index("test"); - let (response, code) = index.create(None).await; - assert_eq!(code, 202, "{}", response); - index.wait_task(0).await; - let url = format!("/indexes/test/settings/{}", + let server = Server::new_shared(); + let index_name = uuid::Uuid::new_v4().to_string(); + let url = format!("/indexes/{index_name}/settings/{}", stringify!($setting) .chars() .map(|c| if c == '_' { '-' } else { c }) .collect::()); let (response, code) = server.service.get(url).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 404, "{response}"); + } + + #[actix_rt::test] + async fn update_unexisting_index() { + let server = Server::new_shared(); + let index_name = uuid::Uuid::new_v4().to_string(); + let url = format!("/indexes/{index_name}/settings/{}", + stringify!($setting) + .chars() + .map(|c| if c == '_' { '-' } else { c }) + .collect::()); + let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await; + assert_eq!(code, 202, "{response}"); + let (response, code) = server.service.get(format!("/indixes/{index_name}")).await; + assert_eq!(code, 404, "{response}"); + } + + #[actix_rt::test] + async fn delete_unexisting_index() { + let server = Server::new_shared(); + let index_name = uuid::Uuid::new_v4().to_string(); + let url = format!("/indexes/{index_name}/settings/{}", + stringify!($setting) + .chars() + .map(|c| if c == '_' { '-' } else { c }) + .collect::()); + let (response, code) = server.service.delete(url).await; + assert_eq!(code, 202, "{response}"); + let (response, code) = server.service.get(format!("/indixes/{index_name}")).await; + assert_eq!(code, 404, "{response}"); + } + + #[actix_rt::test] + async fn get_default() { + let server = Server::new_shared(); + let index = server.unique_index(); + let (response, code) = index.create(None).await; + assert_eq!(code, 202, "{response}"); + index.wait_task(response.uid()).await.succeeded(); + let url = format!("/indexes/{}/settings/{}", + index.uid, + stringify!($setting) + .chars() + .map(|c| if c == '_' { '-' } else { c }) + .collect::()); + let (response, code) = server.service.get(url).await; + assert_eq!(code, 200, "{response}"); let expected = crate::json!($default_value); assert_eq!(expected, response); } @@ -185,15 +188,16 @@ test_setting_routes!( #[actix_rt::test] async fn get_settings_unexisting_index() { - let server = Server::new().await; - let (response, code) = server.index("test").settings().await; - assert_eq!(code, 404, "{}", response) + let server = Server::new_shared(); + let index = server.unique_index(); + let (response, code) = index.settings().await; + assert_eq!(code, 404, "{response}") } #[actix_rt::test] async fn get_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, _code) = index.create(None).await; index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.settings().await; @@ -237,9 +241,8 @@ async fn get_settings() { #[actix_rt::test] async fn secrets_are_hidden_in_settings() { - let server = Server::new().await; - - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, _code) = index.create(None).await; index.wait_task(response.uid()).await.succeeded(); @@ -259,11 +262,11 @@ async fn secrets_are_hidden_in_settings() { .await; meili_snap::snapshot!(code, @"202 Accepted"); - meili_snap::snapshot!(meili_snap::json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + meili_snap::snapshot!(meili_snap::json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "taskUid": 1, - "indexUid": "test", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "settingsUpdate", "enqueuedAt": "[date]" @@ -272,7 +275,7 @@ async fn secrets_are_hidden_in_settings() { let settings_update_uid = response.uid(); - index.wait_task(settings_update_uid).await; + index.wait_task(settings_update_uid).await.succeeded(); let (response, code) = index.settings().await; meili_snap::snapshot!(code, @"200 OK"); @@ -360,16 +363,16 @@ async fn secrets_are_hidden_in_settings() { #[actix_rt::test] async fn error_update_settings_unknown_field() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_response, code) = index.update_settings(json!({"foo": 12})).await; assert_eq!(code, 400); } #[actix_rt::test] async fn test_partial_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await; index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.settings().await; @@ -388,20 +391,18 @@ async fn test_partial_update() { #[actix_rt::test] async fn error_delete_settings_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.delete_settings().await; assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "failed"); + index.wait_task(task.uid()).await.failed(); } #[actix_rt::test] async fn reset_all_settings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = json!([ { @@ -413,7 +414,7 @@ async fn reset_all_settings() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - assert_eq!(response["taskUid"], 0); + assert!(response["taskUid"].as_i64().unwrap() >= 0); index.wait_task(response.uid()).await.succeeded(); let (update_task,_status_code) = index @@ -446,17 +447,15 @@ async fn reset_all_settings() { #[actix_rt::test] async fn update_setting_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.update_settings(json!({})).await; assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - assert_eq!(response["status"], "succeeded"); + index.wait_task(task.uid()).await.succeeded(); let (_response, code) = index.get().await; assert_eq!(code, 200); let (task, _status_code) = index.delete_settings().await; - let response = index.wait_task(task.uid()).await; - assert_eq!(response["status"], "succeeded"); + index.wait_task(task.uid()).await.succeeded(); } #[actix_rt::test] @@ -477,8 +476,8 @@ async fn error_update_setting_unexisting_index_invalid_uid() { #[actix_rt::test] async fn error_set_invalid_ranking_rules() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(None).await; let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await; @@ -495,8 +494,8 @@ async fn error_set_invalid_ranking_rules() { #[actix_rt::test] async fn set_and_reset_distinct_attribute_with_dedicated_route() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _code) = index.update_distinct_attribute(json!("test")).await; index.wait_task(task.uid()).await.succeeded(); @@ -516,8 +515,8 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() { #[actix_rt::test] async fn granular_filterable_attributes() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(None).await; let (response, code) = @@ -535,7 +534,7 @@ async fn granular_filterable_attributes() { index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.settings().await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response["filterableAttributes"]), @r###" [ { From b54ece690b5d57d07f86917e5f181ecd3dcf4967 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 08:20:05 +0300 Subject: [PATCH 087/131] tests: Faster settings::proximity_settings IT tests Use shared server + unique indices Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/settings/proximity_settings.rs | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/meilisearch/tests/settings/proximity_settings.rs b/crates/meilisearch/tests/settings/proximity_settings.rs index c5897bc51..6de1ffe0e 100644 --- a/crates/meilisearch/tests/settings/proximity_settings.rs +++ b/crates/meilisearch/tests/settings/proximity_settings.rs @@ -26,8 +26,8 @@ static DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn attribute_scale_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -38,7 +38,7 @@ async fn attribute_scale_search() { "rankingRules": ["words", "typo", "proximity"], })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); + assert_eq!("202", code.as_str(), "{response:?}"); index.wait_task(response.uid()).await.succeeded(); // the expected order is [1, 3, 2] instead of [3, 1, 2] @@ -99,8 +99,8 @@ async fn attribute_scale_search() { #[actix_rt::test] async fn attribute_scale_phrase_search() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -167,8 +167,8 @@ async fn attribute_scale_phrase_search() { #[actix_rt::test] async fn word_scale_set_and_reset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -282,8 +282,8 @@ async fn word_scale_set_and_reset() { #[actix_rt::test] async fn attribute_scale_default_ranking_rules() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await; index.wait_task(task.uid()).await.succeeded(); @@ -293,7 +293,7 @@ async fn attribute_scale_default_ranking_rules() { "proximityPrecision": "byAttribute" })) .await; - assert_eq!("202", code.as_str(), "{:?}", response); + assert_eq!("202", code.as_str(), "{response:?}"); index.wait_task(response.uid()).await.succeeded(); // the expected order is [3, 1, 2] From 6b1a345dce44d4aa4b8975cb754eea2710f61aab Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 08:23:09 +0300 Subject: [PATCH 088/131] tests: Faster settings::tokenizer_customization IT tests Use shared server + unique indices Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/settings/tokenizer_customization.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/meilisearch/tests/settings/tokenizer_customization.rs b/crates/meilisearch/tests/settings/tokenizer_customization.rs index 190918b34..7c58368f7 100644 --- a/crates/meilisearch/tests/settings/tokenizer_customization.rs +++ b/crates/meilisearch/tests/settings/tokenizer_customization.rs @@ -5,8 +5,8 @@ use crate::json; #[actix_rt::test] async fn set_and_reset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _code) = index .update_settings(json!({ @@ -70,8 +70,8 @@ async fn set_and_search() { }, ]); - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (add_task, _status_code) = index.add_documents(documents, None).await; index.wait_task(add_task.uid()).await.succeeded(); @@ -224,8 +224,8 @@ async fn advanced_synergies() { }, ]); - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (add_task, _status_code) = index.add_documents(documents, None).await; index.wait_task(add_task.uid()).await.succeeded(); From 790621dc29f95b1332fd6e8f00b63fbab021bae6 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Mon, 2 Jun 2025 10:55:28 +0300 Subject: [PATCH 089/131] Remove useless assert Co-authored-by: Many the fish --- crates/meilisearch/tests/settings/get_settings.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/meilisearch/tests/settings/get_settings.rs b/crates/meilisearch/tests/settings/get_settings.rs index a52d4fa90..2dc2b175f 100644 --- a/crates/meilisearch/tests/settings/get_settings.rs +++ b/crates/meilisearch/tests/settings/get_settings.rs @@ -414,7 +414,6 @@ async fn reset_all_settings() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - assert!(response["taskUid"].as_i64().unwrap() >= 0); index.wait_task(response.uid()).await.succeeded(); let (update_task,_status_code) = index From 5a7cfc57fd304c25332ba438f8fa7a52728568a4 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 10:56:43 +0300 Subject: [PATCH 090/131] tests: Faster tasks::mode IT tests Use shared server + unique indices Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/tasks/mod.rs | 359 +++++++++++++------------- 1 file changed, 180 insertions(+), 179 deletions(-) diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index f432ef7db..fcaf5f576 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -1,8 +1,8 @@ mod errors; mod webhook; -use meili_snap::insta::assert_json_snapshot; -use meili_snap::snapshot; +use insta::assert_json_snapshot; +use meili_snap::{json_string, snapshot}; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; @@ -11,11 +11,11 @@ use crate::json; #[actix_rt::test] async fn error_get_unexisting_task_status() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); - let (response, code) = index.get_task(1).await; + let (response, code) = index.get_task(u32::MAX as u64).await; let expected_response = json!({ "message": "Task `1` not found.", @@ -30,8 +30,8 @@ async fn error_get_unexisting_task_status() { #[actix_rt::test] async fn get_task_status() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (create_task, _status_code) = index.create(None).await; let (add_task, _status_code) = index .add_documents( @@ -50,8 +50,8 @@ async fn get_task_status() { #[actix_rt::test] async fn list_tasks() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index @@ -64,14 +64,14 @@ async fn list_tasks() { #[actix_rt::test] async fn list_tasks_pagination_and_reverse() { - let server = Server::new().await; + let server = Server::new_shared(); // First of all we want to create a lot of tasks very quickly. The fastest way is to delete a lot of unexisting indexes let mut last_task = None; - for i in 0..10 { - let index = server.index(format!("test-{i}")); + for _ in 0..10 { + let index = server.unique_index(); last_task = Some(index.create(None).await.0.uid()); } - server.wait_task(last_task.unwrap()).await; + server.wait_task(last_task.unwrap()).await.succeeded(); let (response, code) = server.tasks_filter("limit=3").await; assert_eq!(code, 200); @@ -102,14 +102,14 @@ async fn list_tasks_pagination_and_reverse() { #[actix_rt::test] async fn list_tasks_with_star_filters() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.service.get("/tasks?indexUids=test").await; + let (response, code) = index.service.get(format!("/tasks?indexUids={}", index.uid)).await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 2); @@ -127,50 +127,50 @@ async fn list_tasks_with_star_filters() { let (response, code) = index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index .service - .get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test") + .get(format!("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}", index.uid)) .await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index .service .get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*") .await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_status_filtered() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.failed(); let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_type_filtered() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index @@ -178,19 +178,19 @@ async fn list_tasks_type_filtered() { .await; let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_invalid_canceled_by_filter() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index @@ -198,14 +198,14 @@ async fn list_tasks_invalid_canceled_by_filter() { .await; let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); } #[actix_rt::test] async fn list_tasks_status_and_type_filtered() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index @@ -213,7 +213,7 @@ async fn list_tasks_status_and_type_filtered() { .await; let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); let (response, code) = index @@ -223,12 +223,12 @@ async fn list_tasks_status_and_type_filtered() { &[], ) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } macro_rules! assert_valid_summarized_task { - ($response:expr, $task_type:literal, $index:literal) => {{ + ($response:expr, $task_type:literal, $index:tt) => {{ assert_eq!($response.as_object().unwrap().len(), 5); assert!($response["taskUid"].as_u64().is_some()); assert_eq!($response["indexUid"], $index); @@ -242,49 +242,49 @@ macro_rules! assert_valid_summarized_task { #[actix_web::test] async fn test_summarized_task_view() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); + let index_uid = index.uid.clone(); let (response, _) = index.create(None).await; - assert_valid_summarized_task!(response, "indexCreation", "test"); + assert_valid_summarized_task!(response, "indexCreation", index_uid); let (response, _) = index.update(None).await; - assert_valid_summarized_task!(response, "indexUpdate", "test"); + assert_valid_summarized_task!(response, "indexUpdate", index_uid); let (response, _) = index.update_settings(json!({})).await; - assert_valid_summarized_task!(response, "settingsUpdate", "test"); + assert_valid_summarized_task!(response, "settingsUpdate", index_uid); let (response, _) = index.update_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid); let (response, _) = index.add_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid); let (response, _) = index.delete_document(1).await; - assert_valid_summarized_task!(response, "documentDeletion", "test"); + assert_valid_summarized_task!(response, "documentDeletion", index_uid); let (response, _) = index.clear_all_documents().await; - assert_valid_summarized_task!(response, "documentDeletion", "test"); + assert_valid_summarized_task!(response, "documentDeletion", index_uid); let (response, _) = index.delete().await; - assert_valid_summarized_task!(response, "indexDeletion", "test"); + assert_valid_summarized_task!(response, "indexDeletion", index_uid); } #[actix_web::test] async fn test_summarized_document_addition_or_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await; index.wait_task(task.uid()).await.succeeded(); - let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -329,18 +329,17 @@ async fn test_summarized_document_addition_or_update() { #[actix_web::test] async fn test_summarized_delete_documents_by_batch() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await; index.wait_task(task.uid()).await.failed(); - let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { "uid": 0, "batchUid": 0, - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -366,13 +365,13 @@ async fn test_summarized_delete_documents_by_batch() { let (del_task, _status_code) = index.delete_batch(vec![42]).await; index.wait_task(del_task.uid()).await.succeeded(); let (task, _) = index.get_task(del_task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { "uid": 2, "batchUid": 2, - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -392,20 +391,20 @@ async fn test_summarized_delete_documents_by_batch() { #[actix_web::test] async fn test_summarized_delete_documents_by_filter() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -415,7 +414,7 @@ async fn test_summarized_delete_documents_by_filter() { "originalFilter": "\"doggo = bernese\"" }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -432,13 +431,13 @@ async fn test_summarized_delete_documents_by_filter() { index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -448,7 +447,7 @@ async fn test_summarized_delete_documents_by_filter() { "originalFilter": "\"doggo = bernese\"" }, "error": { - "message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", + "message": "Index `[uuid]`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", "code": "invalid_document_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_filter" @@ -465,13 +464,13 @@ async fn test_summarized_delete_documents_by_filter() { index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 4, - "batchUid": 4, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -491,18 +490,18 @@ async fn test_summarized_delete_documents_by_filter() { #[actix_web::test] async fn test_summarized_delete_document_by_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.delete_document(1).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -512,7 +511,7 @@ async fn test_summarized_delete_document_by_id() { "originalFilter": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -528,13 +527,13 @@ async fn test_summarized_delete_document_by_id() { let (task, _status_code) = index.delete_document(42).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -554,12 +553,12 @@ async fn test_summarized_delete_document_by_id() { #[actix_web::test] async fn test_summarized_settings_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // here we should find my payload even in the failed task. let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await; - meili_snap::snapshot!(code, @"400 Bad Request"); - meili_snap::snapshot!(meili_snap::json_string!(response), @r###" + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" { "message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.", "code": "invalid_settings_ranking_rules", @@ -571,13 +570,13 @@ async fn test_summarized_settings_update() { let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, @@ -605,18 +604,18 @@ async fn test_summarized_settings_update() { #[actix_web::test] async fn test_summarized_index_creation() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexCreation", "canceledBy": null, @@ -634,13 +633,13 @@ async fn test_summarized_index_creation() { let (task, _status_code) = index.create(Some("doggos")).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexCreation", "canceledBy": null, @@ -648,7 +647,7 @@ async fn test_summarized_index_creation() { "primaryKey": "doggos" }, "error": { - "message": "Index `test` already exists.", + "message": "Index `[uuid]` already exists.", "code": "index_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_already_exists" @@ -663,8 +662,8 @@ async fn test_summarized_index_creation() { #[actix_web::test] async fn test_summarized_index_deletion() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (ret, _code) = index.delete().await; let task = index.wait_task(ret.uid()).await; snapshot!(task, @@ -672,7 +671,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "indexDeletion", "canceledBy": null, @@ -680,7 +679,7 @@ async fn test_summarized_index_deletion() { "deletedDocuments": 0 }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -703,7 +702,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -726,7 +725,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexDeletion", "canceledBy": null, @@ -749,7 +748,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "indexDeletion", "canceledBy": null, @@ -757,7 +756,7 @@ async fn test_summarized_index_deletion() { "deletedDocuments": 0 }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -772,19 +771,19 @@ async fn test_summarized_index_deletion() { #[actix_web::test] async fn test_summarized_index_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // If the index doesn't exist yet, we should get errors with or without the primary key. let (task, _status_code) = index.update(None).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexUpdate", "canceledBy": null, @@ -792,7 +791,7 @@ async fn test_summarized_index_update() { "primaryKey": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -807,13 +806,13 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(Some("bones")).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexUpdate", "canceledBy": null, @@ -821,7 +820,7 @@ async fn test_summarized_index_update() { "primaryKey": "bones" }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -839,13 +838,13 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexUpdate", "canceledBy": null, @@ -863,13 +862,13 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(Some("bones")).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 4, - "batchUid": 4, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexUpdate", "canceledBy": null, @@ -887,7 +886,7 @@ async fn test_summarized_index_update() { #[actix_web::test] async fn test_summarized_index_swap() { - let server = Server::new().await; + let server = Server::new_shared(); let (task, _status_code) = server .index_swap(json!([ { "indexes": ["doggos", "cattos"] } @@ -895,12 +894,12 @@ async fn test_summarized_index_swap() { .await; server.wait_task(task.uid()).await.failed(); let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "failed", "type": "indexSwap", @@ -928,23 +927,25 @@ async fn test_summarized_index_swap() { } "###); - let (task, _code) = server.index("doggos").create(None).await; + let doggos_index = server.unique_index(); + let (task, _code) = doggos_index.create(None).await; server.wait_task(task.uid()).await.succeeded(); - let (task, _code) = server.index("cattos").create(None).await; + let cattos_index = server.unique_index(); + let (task, _code) = cattos_index.create(None).await; server.wait_task(task.uid()).await.succeeded(); let (task, _code) = server .index_swap(json!([ - { "indexes": ["doggos", "cattos"] } + { "indexes": [doggos_index.uid, cattos_index.uid] } ])) .await; server.wait_task(task.uid()).await.succeeded(); let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.indexes[0]" => "doggos", ".**.indexes[1]" => "cattos", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "indexSwap", @@ -970,20 +971,20 @@ async fn test_summarized_index_swap() { #[actix_web::test] async fn test_summarized_task_cancelation() { - let server = Server::new().await; - let index = server.index("doggos"); + let server = Server::new_shared(); + let index = server.unique_index(); // to avoid being flaky we're only going to cancel an already finished task :( let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = server.cancel_tasks("uids=0").await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "taskCancelation", @@ -1004,20 +1005,20 @@ async fn test_summarized_task_cancelation() { #[actix_web::test] async fn test_summarized_task_deletion() { - let server = Server::new().await; - let index = server.index("doggos"); + let server = Server::new_shared(); + let index = server.unique_index(); // to avoid being flaky we're only going to delete an already finished task :( let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = server.delete_tasks("uids=0").await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "taskDeletion", @@ -1038,16 +1039,16 @@ async fn test_summarized_task_deletion() { #[actix_web::test] async fn test_summarized_dump_creation() { - let server = Server::new().await; + let server = Server::new_shared(); let (task, _status_code) = server.create_dump().await; server.wait_task(task.uid()).await; let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "dumpCreation", From db28d13cb11e788aeded1c38e4460e679b7fb73b Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 10:59:46 +0300 Subject: [PATCH 091/131] Remove useless assertion. .succeeded() does the same Signed-off-by: Martin Tzvetanov Grigorov --- .../meilisearch/tests/search/facet_search.rs | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/crates/meilisearch/tests/search/facet_search.rs b/crates/meilisearch/tests/search/facet_search.rs index 87af125f0..57d2cfcd2 100644 --- a/crates/meilisearch/tests/search/facet_search.rs +++ b/crates/meilisearch/tests/search/facet_search.rs @@ -50,13 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = index.add_documents(documents.clone(), None).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await.succeeded(); - assert!(response.is_success(), "{response:?}"); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.update_settings(settings.clone()).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await.succeeded(); - assert!(response.is_success(), "{response:?}"); + index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.facet_search(query.clone()).await; insta::allow_duplicates! { @@ -65,21 +63,18 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = server.delete_index("test").await; assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await.succeeded(); - assert!(response.is_success(), "{response:?}"); + server.wait_task(task.uid()).await.succeeded(); eprintln!("Settings -> Documents -> test"); let index = server.index("test"); let (task, code) = index.update_settings(settings.clone()).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await.succeeded(); - assert!(response.is_success(), "{response:?}"); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.add_documents(documents.clone(), None).await; assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await.succeeded(); - assert!(response.is_success(), "{response:?}"); + index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.facet_search(query.clone()).await; insta::allow_duplicates! { @@ -88,8 +83,7 @@ async fn test_settings_documents_indexing_swapping_and_facet_search( let (task, code) = server.delete_index("test").await; assert_eq!(code, 202, "{}", task); - let response = server.wait_task(task.uid()).await.succeeded(); - assert!(response.is_success(), "{response:?}"); + server.wait_task(task.uid()).await.succeeded(); } #[actix_rt::test] From 8d8fcb9846640f383573387503dd764de55dd1bb Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 11:44:21 +0300 Subject: [PATCH 092/131] Revert to unique server + named index for some tests Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/tasks/mod.rs | 64 ++++++++++++++++----------- 1 file changed, 38 insertions(+), 26 deletions(-) diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index fcaf5f576..57cb87364 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -1,7 +1,6 @@ mod errors; mod webhook; -use insta::assert_json_snapshot; use meili_snap::{json_string, snapshot}; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; @@ -18,7 +17,7 @@ async fn error_get_unexisting_task_status() { let (response, code) = index.get_task(u32::MAX as u64).await; let expected_response = json!({ - "message": "Task `1` not found.", + "message": "Task `4294967295` not found.", "code": "task_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#task_not_found" @@ -64,11 +63,12 @@ async fn list_tasks() { #[actix_rt::test] async fn list_tasks_pagination_and_reverse() { - let server = Server::new_shared(); + // do not use a shared server here, as we want to assert tasks ids and we need them to be stable + let server = Server::new().await; // First of all we want to create a lot of tasks very quickly. The fastest way is to delete a lot of unexisting indexes let mut last_task = None; - for _ in 0..10 { - let index = server.unique_index(); + for i in 0..10 { + let index = server.index(format!("test-{i}")); last_task = Some(index.create(None).await.0.uid()); } server.wait_task(last_task.unwrap()).await.succeeded(); @@ -102,8 +102,9 @@ async fn list_tasks_pagination_and_reverse() { #[actix_rt::test] async fn list_tasks_with_star_filters() { - let server = Server::new_shared(); - let index = server.unique_index(); + let server = Server::new().await; + // Do not use a unique index here, as we want to test the `indexUids=*` filter. + let index = server.index("test"); let (task, _code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index @@ -132,7 +133,10 @@ async fn list_tasks_with_star_filters() { let (response, code) = index .service - .get(format!("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}", index.uid)) + .get(format!( + "/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}", + index.uid + )) .await; assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); @@ -278,6 +282,7 @@ async fn test_summarized_document_addition_or_update() { let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await; index.wait_task(task.uid()).await.succeeded(); + let (task, _) = index.get_task(task.uid()).await; snapshot!(json_string!(task, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" @@ -303,14 +308,14 @@ async fn test_summarized_document_addition_or_update() { let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; index.wait_task(task.uid()).await.succeeded(); - let (task, _) = index.get_task(1).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + let (task, _) = index.get_task(task.uid()).await; + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -331,14 +336,20 @@ async fn test_summarized_document_addition_or_update() { async fn test_summarized_delete_documents_by_batch() { let server = Server::new_shared(); let index = server.unique_index(); - let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await; + let non_existing_task_id1 = u32::MAX as u64; + let non_existing_task_id2 = non_existing_task_id1 - 1; + let non_existing_task_id3 = non_existing_task_id1 - 2; + let (task, _status_code) = index + .delete_batch(vec![non_existing_task_id1, non_existing_task_id2, non_existing_task_id3]) + .await; index.wait_task(task.uid()).await.failed(); + let (task, _) = index.get_task(task.uid()).await; snapshot!(json_string!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", @@ -349,7 +360,7 @@ async fn test_summarized_delete_documents_by_batch() { "originalFilter": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -366,11 +377,11 @@ async fn test_summarized_delete_documents_by_batch() { index.wait_task(del_task.uid()).await.succeeded(); let (task, _) = index.get_task(del_task.uid()).await; snapshot!(json_string!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 2, - "batchUid": 2, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", @@ -975,12 +986,13 @@ async fn test_summarized_task_cancelation() { let index = server.unique_index(); // to avoid being flaky we're only going to cancel an already finished task :( let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); - let (task, _status_code) = server.cancel_tasks("uids=0").await; + let task_uid = task.uid(); + index.wait_task(task_uid).await.succeeded(); + let (task, _status_code) = server.cancel_tasks(format!("uids={task_uid}").as_str()).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.originalFilter" => "[of]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { "uid": "[uid]", @@ -992,7 +1004,7 @@ async fn test_summarized_task_cancelation() { "details": { "matchedTasks": 1, "canceledTasks": 0, - "originalFilter": "?uids=0" + "originalFilter": "[of]" }, "error": null, "duration": "[duration]", From cffbe3fcb68c3d8fa9cf238c9f20602d9f82496d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 14:17:19 +0300 Subject: [PATCH 093/131] Trigger build Signed-off-by: Martin Tzvetanov Grigorov From 5efc78db550f82776c701b430f2469d99ae67179 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 14:47:18 +0300 Subject: [PATCH 094/131] tests: Faster vector::binary_quantized IT tests Use shared server + unique indices where possible Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/vector/binary_quantized.rs | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/crates/meilisearch/tests/vector/binary_quantized.rs b/crates/meilisearch/tests/vector/binary_quantized.rs index 96e32c1a3..89d32cc50 100644 --- a/crates/meilisearch/tests/vector/binary_quantized.rs +++ b/crates/meilisearch/tests/vector/binary_quantized.rs @@ -6,8 +6,8 @@ use crate::vector::generate_default_user_provided_documents; #[actix_rt::test] async fn retrieve_binary_quantize_status_in_the_settings() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -65,8 +65,8 @@ async fn retrieve_binary_quantize_status_in_the_settings() { #[actix_rt::test] async fn binary_quantize_before_sending_documents() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -139,8 +139,8 @@ async fn binary_quantize_before_sending_documents() { #[actix_rt::test] async fn binary_quantize_after_sending_documents() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -226,8 +226,8 @@ async fn binary_quantize_after_sending_documents() { #[actix_rt::test] async fn try_to_disable_binary_quantization() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -256,11 +256,11 @@ async fn try_to_disable_binary_quantization() { .await; snapshot!(code, @"202 Accepted"); let ret = server.wait_task(response.uid()).await; - snapshot!(ret, @r#" + snapshot!(json_string!(ret, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".finishedAt" => "[date]", ".startedAt" => "[date]" }), @r#" { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "doggo", + "indexUid": "[uuid]", "status": "failed", "type": "settingsUpdate", "canceledBy": null, @@ -274,7 +274,7 @@ async fn try_to_disable_binary_quantization() { } }, "error": { - "message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.", + "message": "Index `[uuid]`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.", "code": "invalid_settings_embedders", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_settings_embedders" From 3b30b6a57abe5044b4f97929fb001c671eab9bc0 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 15:04:48 +0300 Subject: [PATCH 095/131] tests: Faster documents::delete_documents IT tests Use shared server + unique indices Assert .succeeded()/.failed() for the waited tasks Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- .../tests/documents/delete_documents.rs | 141 +++++++++--------- 1 file changed, 67 insertions(+), 74 deletions(-) diff --git a/crates/meilisearch/tests/documents/delete_documents.rs b/crates/meilisearch/tests/documents/delete_documents.rs index 060f17958..26dfba8c8 100644 --- a/crates/meilisearch/tests/documents/delete_documents.rs +++ b/crates/meilisearch/tests/documents/delete_documents.rs @@ -5,35 +5,32 @@ use crate::json; #[actix_rt::test] async fn delete_one_document_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.delete_document(0).await; assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "failed"); + index.wait_task(task.uid()).await.failed(); } #[actix_rt::test] async fn delete_one_unexisting_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index.create(None).await; let (response, code) = index.delete_document(0).await; - assert_eq!(code, 202, "{}", response); - let update = index.wait_task(response.uid()).await; - assert_eq!(update["status"], "succeeded"); + assert_eq!(code, 202, "{response}"); + index.wait_task(response.uid()).await.succeeded(); } #[actix_rt::test] async fn delete_one_document() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await; index.wait_task(task.uid()).await.succeeded(); - let (task, status_code) = server.index("test").delete_document(0).await; + let (task, status_code) = index.delete_document(0).await; assert_eq!(status_code, 202); index.wait_task(task.uid()).await.succeeded(); @@ -43,20 +40,18 @@ async fn delete_one_document() { #[actix_rt::test] async fn clear_all_documents_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.clear_all_documents().await; assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "failed"); + index.wait_task(task.uid()).await.failed(); } #[actix_rt::test] async fn clear_all_documents() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index .add_documents( json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]), @@ -67,7 +62,7 @@ async fn clear_all_documents() { let (task, code) = index.clear_all_documents().await; assert_eq!(code, 202); - let _update = index.wait_task(task.uid()).await; + let _update = index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; assert_eq!(code, 200); assert!(response["results"].as_array().unwrap().is_empty()); @@ -75,14 +70,14 @@ async fn clear_all_documents() { #[actix_rt::test] async fn clear_all_documents_empty_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.clear_all_documents().await; assert_eq!(code, 202); - let _update = index.wait_task(task.uid()).await; + let _update = index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; assert_eq!(code, 200); assert!(response["results"].as_array().unwrap().is_empty()); @@ -90,33 +85,31 @@ async fn clear_all_documents_empty_index() { #[actix_rt::test] async fn error_delete_batch_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.delete_batch(vec![]).await; let expected_response = json!({ - "message": "Index `test` not found.", + "message": format!("Index `{}` not found.", index.uid), "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" }); assert_eq!(code, 202); - let response = index.wait_task(task.uid()).await; - - assert_eq!(response["status"], "failed"); + let response = index.wait_task(task.uid()).await.failed(); assert_eq!(response["error"], expected_response); } #[actix_rt::test] async fn delete_batch() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await; index.wait_task(task.uid()).await.succeeded(); let (task, code) = index.delete_batch(vec![1, 0]).await; assert_eq!(code, 202); - let _update = index.wait_task(task.uid()).await; + let _update = index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 1); @@ -125,14 +118,14 @@ async fn delete_batch() { #[actix_rt::test] async fn delete_no_document_batch() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await; index.wait_task(task.uid()).await.succeeded(); - let (_response, code) = index.delete_batch(vec![]).await; - assert_eq!(code, 202, "{}", _response); + let (response, code) = index.delete_batch(vec![]).await; + assert_eq!(code, 202, "{response}"); - let _update = index.wait_task(_response.uid()).await; + let _update = index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 3); @@ -140,8 +133,8 @@ async fn delete_no_document_batch() { #[actix_rt::test] async fn delete_document_by_filter() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings_filterable_attributes(json!(["color"])).await; let (task, _status_code) = index .add_documents( @@ -178,22 +171,22 @@ async fn delete_document_by_filter() { let (response, code) = index.delete_document_by_filter(json!({ "filter": "color = blue"})).await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 2, - "indexUid": "doggo", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentDeletion", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response.uid()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -251,22 +244,22 @@ async fn delete_document_by_filter() { let (response, code) = index.delete_document_by_filter(json!({ "filter": "color NOT EXISTS"})).await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "taskUid": 3, - "indexUid": "doggo", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentDeletion", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response.uid()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -321,8 +314,8 @@ async fn delete_document_by_filter() { #[actix_rt::test] async fn delete_document_by_complex_filter() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); index.update_settings_filterable_attributes(json!(["color"])).await; let (task, _status_code) = index .add_documents( @@ -343,22 +336,22 @@ async fn delete_document_by_complex_filter() { ) .await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###" { - "taskUid": 2, - "indexUid": "doggo", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentDeletion", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response.uid()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -402,22 +395,22 @@ async fn delete_document_by_complex_filter() { .delete_document_by_filter(json!({ "filter": [["color = green", "color NOT EXISTS"]] })) .await; snapshot!(code, @"202 Accepted"); - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "taskUid": 3, - "indexUid": "doggo", + "taskUid": "[task_uid]", + "indexUid": "[uuid]", "status": "enqueued", "type": "documentDeletion", "enqueuedAt": "[date]" } "###); - let response = index.wait_task(response.uid()).await; - snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" + let response = index.wait_task(response.uid()).await.succeeded(); + snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "doggo", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, From f80182f0a91dfe6cb40340cfedca4dc4ebdfa1d2 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 15:20:17 +0300 Subject: [PATCH 096/131] tests: Faster similar::errors IT tests Use shared server + unique indices Related to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/similar/errors.rs | 136 ++++++++++----------- 1 file changed, 68 insertions(+), 68 deletions(-) diff --git a/crates/meilisearch/tests/similar/errors.rs b/crates/meilisearch/tests/similar/errors.rs index 30ff5b145..fa4118fe3 100644 --- a/crates/meilisearch/tests/similar/errors.rs +++ b/crates/meilisearch/tests/similar/errors.rs @@ -6,11 +6,11 @@ use crate::json; #[actix_rt::test] async fn similar_unexisting_index() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let expected_response = json!({ - "message": "Index `test` not found.", + "message": format!("Index `{}` not found.", index.uid), "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -26,12 +26,12 @@ async fn similar_unexisting_index() { #[actix_rt::test] async fn similar_unexisting_parameter() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); index .similar(json!({"id": 287947, "marin": "hello"}), |response, code| { - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); assert_eq!(response["code"], "bad_request"); }) .await; @@ -39,8 +39,8 @@ async fn similar_unexisting_parameter() { #[actix_rt::test] async fn similar_bad_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -53,7 +53,7 @@ async fn similar_bad_id() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": ["doggo"], "embedder": "manual"})).await; snapshot!(code, @"400 Bad Request"); @@ -69,8 +69,8 @@ async fn similar_bad_id() { #[actix_rt::test] async fn similar_bad_ranking_score_threshold() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -83,7 +83,7 @@ async fn similar_bad_ranking_score_threshold() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"rankingScoreThreshold": ["doggo"]})).await; snapshot!(code, @"400 Bad Request"); @@ -99,8 +99,8 @@ async fn similar_bad_ranking_score_threshold() { #[actix_rt::test] async fn similar_invalid_ranking_score_threshold() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -113,7 +113,7 @@ async fn similar_invalid_ranking_score_threshold() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"rankingScoreThreshold": 42})).await; snapshot!(code, @"400 Bad Request"); @@ -129,8 +129,8 @@ async fn similar_invalid_ranking_score_threshold() { #[actix_rt::test] async fn similar_invalid_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -143,7 +143,7 @@ async fn similar_invalid_id() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": "http://invalid-docid/", "embedder": "manual"})).await; @@ -160,8 +160,8 @@ async fn similar_invalid_id() { #[actix_rt::test] async fn similar_not_found_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -174,7 +174,7 @@ async fn similar_not_found_id() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": "definitely-doesnt-exist", "embedder": "manual"})).await; @@ -191,8 +191,8 @@ async fn similar_not_found_id() { #[actix_rt::test] async fn similar_bad_offset() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -205,7 +205,7 @@ async fn similar_bad_offset() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": 287947, "offset": "doggo", "embedder": "manual"})).await; @@ -233,8 +233,8 @@ async fn similar_bad_offset() { #[actix_rt::test] async fn similar_bad_limit() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -247,7 +247,7 @@ async fn similar_bad_limit() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let (response, code) = index.similar_post(json!({"id": 287947, "limit": "doggo", "embedder": "manual"})).await; @@ -277,8 +277,8 @@ async fn similar_bad_limit() { async fn similar_bad_filter() { // Since a filter is deserialized as a json Value it will never fail to deserialize. // Thus the error message is not generated by deserr but written by us. - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -291,7 +291,7 @@ async fn similar_bad_filter() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); snapshot!(code, @"202 Accepted"); @@ -316,8 +316,8 @@ async fn similar_bad_filter() { #[actix_rt::test] async fn filter_invalid_syntax_object() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -330,7 +330,7 @@ async fn filter_invalid_syntax_object() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -354,8 +354,8 @@ async fn filter_invalid_syntax_object() { #[actix_rt::test] async fn filter_invalid_syntax_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -368,7 +368,7 @@ async fn filter_invalid_syntax_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -392,8 +392,8 @@ async fn filter_invalid_syntax_array() { #[actix_rt::test] async fn filter_invalid_syntax_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -406,7 +406,7 @@ async fn filter_invalid_syntax_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -432,8 +432,8 @@ async fn filter_invalid_syntax_string() { #[actix_rt::test] async fn filter_invalid_attribute_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -446,7 +446,7 @@ async fn filter_invalid_attribute_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -473,8 +473,8 @@ async fn filter_invalid_attribute_array() { #[actix_rt::test] async fn filter_invalid_attribute_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -487,7 +487,7 @@ async fn filter_invalid_attribute_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -514,8 +514,8 @@ async fn filter_invalid_attribute_string() { #[actix_rt::test] async fn filter_reserved_geo_attribute_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -528,7 +528,7 @@ async fn filter_reserved_geo_attribute_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -554,8 +554,8 @@ async fn filter_reserved_geo_attribute_array() { #[actix_rt::test] async fn filter_reserved_geo_attribute_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -568,7 +568,7 @@ async fn filter_reserved_geo_attribute_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -594,8 +594,8 @@ async fn filter_reserved_geo_attribute_string() { #[actix_rt::test] async fn filter_reserved_attribute_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -608,7 +608,7 @@ async fn filter_reserved_attribute_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -634,8 +634,8 @@ async fn filter_reserved_attribute_array() { #[actix_rt::test] async fn filter_reserved_attribute_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -648,7 +648,7 @@ async fn filter_reserved_attribute_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -674,8 +674,8 @@ async fn filter_reserved_attribute_string() { #[actix_rt::test] async fn filter_reserved_geo_point_array() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -688,7 +688,7 @@ async fn filter_reserved_geo_point_array() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -714,8 +714,8 @@ async fn filter_reserved_geo_point_array() { #[actix_rt::test] async fn filter_reserved_geo_point_string() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -728,7 +728,7 @@ async fn filter_reserved_geo_point_string() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; @@ -754,8 +754,8 @@ async fn filter_reserved_geo_point_string() { #[actix_rt::test] async fn similar_bad_retrieve_vectors() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index.similar_post(json!({"retrieveVectors": "doggo", "embedder": "manual"})).await; @@ -806,8 +806,8 @@ async fn similar_bad_retrieve_vectors() { #[actix_rt::test] async fn similar_bad_embedder() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -820,7 +820,7 @@ async fn similar_bad_embedder() { "filterableAttributes": ["title"]})) .await; snapshot!(code, @"202 Accepted"); - server.wait_task(response.uid()).await; + server.wait_task(response.uid()).await.succeeded(); let documents = DOCUMENTS.clone(); let (value, code) = index.add_documents(documents, None).await; From 52591761af4c0ec73f05a88ab4bf163cccf7e512 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 15:32:32 +0300 Subject: [PATCH 097/131] tests: Faster search::geo IT tests Use shared server + unique indices Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/geo.rs | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/crates/meilisearch/tests/search/geo.rs b/crates/meilisearch/tests/search/geo.rs index a314ca241..b74e4e9e4 100644 --- a/crates/meilisearch/tests/search/geo.rs +++ b/crates/meilisearch/tests/search/geo.rs @@ -43,8 +43,8 @@ static DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn geo_sort_with_geo_strings() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["_geo"])).await; @@ -59,7 +59,7 @@ async fn geo_sort_with_geo_strings() { "sort": ["_geoPoint(0.0, 0.0):asc"] }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); }, ) .await; @@ -67,8 +67,8 @@ async fn geo_sort_with_geo_strings() { #[actix_rt::test] async fn geo_bounding_box_with_string_and_number() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.update_settings_filterable_attributes(json!(["_geo"])).await; @@ -82,7 +82,7 @@ async fn geo_bounding_box_with_string_and_number() { "filter": "_geoBoundingBox([89, 179], [-89, -179])", }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -124,8 +124,8 @@ async fn geo_bounding_box_with_string_and_number() { #[actix_rt::test] async fn bug_4640() { // https://github.com/meilisearch/meilisearch/issues/4640 - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.add_documents(documents, None).await; @@ -140,7 +140,7 @@ async fn bug_4640() { "sort": ["_geoPoint(45.4777599, 9.1967508):asc"], }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -203,7 +203,7 @@ async fn geo_asc_with_words() { &json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}), &json!({"q": "jean"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -248,7 +248,7 @@ async fn geo_asc_with_words() { &json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}), &json!({"q": "bob"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -285,7 +285,7 @@ async fn geo_asc_with_words() { &json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}), &json!({"q": "intel"}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ @@ -325,7 +325,7 @@ async fn geo_sort_with_words() { &json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "sort"], "sortableAttributes": [RESERVED_GEO_FIELD_NAME]}), &json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###" { "hits": [ From 921e3c4ffebf6b0e67498cec4dd6bde6aa45c7b0 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 15:36:08 +0300 Subject: [PATCH 098/131] tests: Faster documents::get_documents IT tests Use shared server + unique index Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/documents/get_documents.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/documents/get_documents.rs b/crates/meilisearch/tests/documents/get_documents.rs index f87a18b9f..4f82faf99 100644 --- a/crates/meilisearch/tests/documents/get_documents.rs +++ b/crates/meilisearch/tests/documents/get_documents.rs @@ -832,8 +832,8 @@ async fn get_document_by_ids_and_filter() { #[actix_rt::test] async fn get_document_with_vectors() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ From 5cb75d1f2a79eba35bbceb92482046d2d0d1f8fd Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 2 Jun 2025 17:06:53 +0200 Subject: [PATCH 099/131] ignore flaky test --- crates/meilisearch/tests/search/multi/proxy.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/meilisearch/tests/search/multi/proxy.rs b/crates/meilisearch/tests/search/multi/proxy.rs index d267ee153..55736d058 100644 --- a/crates/meilisearch/tests/search/multi/proxy.rs +++ b/crates/meilisearch/tests/search/multi/proxy.rs @@ -2296,6 +2296,7 @@ async fn error_remote_500_once() { } #[actix_rt::test] +#[ignore] async fn error_remote_timeout() { let ms0 = Server::new().await; let ms1 = Server::new().await; From a3d2f64725f7bb0c40460e70f78f8823a1f6e6e8 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 15:12:09 +0300 Subject: [PATCH 100/131] tests: Faster search::distinct IT tests Use shared server + unique indices Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/distinct.rs | 40 +++++++++---------- .../distinct_at_search_time/succeed.snap | 2 +- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/crates/meilisearch/tests/search/distinct.rs b/crates/meilisearch/tests/search/distinct.rs index 094ef7bbf..bdc5875e0 100644 --- a/crates/meilisearch/tests/search/distinct.rs +++ b/crates/meilisearch/tests/search/distinct.rs @@ -146,8 +146,8 @@ static DOCUMENT_DISTINCT_KEY: &str = "product_id"; /// testing: https://github.com/meilisearch/meilisearch/issues/4078 #[actix_rt::test] async fn distinct_search_with_offset_no_ranking() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await; @@ -163,50 +163,50 @@ async fn distinct_search_with_offset_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#); + snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#); snapshot!(response["estimatedTotalHits"] , @"11"); let (response, code) = index.search_post(json!({"offset": 2, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#); + snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#); snapshot!(response["estimatedTotalHits"], @"10"); let (response, code) = index.search_post(json!({"offset": 4, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#); + snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#); snapshot!(response["estimatedTotalHits"], @"6"); let (response, code) = index.search_post(json!({"offset": 5, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"1"); - snapshot!(format!("{:?}", hits), @r#"["345678"]"#); + snapshot!(format!("{hits:?}"), @r#"["345678"]"#); snapshot!(response["estimatedTotalHits"], @"6"); let (response, code) = index.search_post(json!({"offset": 6, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"0"); - snapshot!(format!("{:?}", hits), @r#"[]"#); + snapshot!(format!("{hits:?}"), @r#"[]"#); snapshot!(response["estimatedTotalHits"], @"6"); let (response, code) = index.search_post(json!({"offset": 7, "limit": 2})).await; let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"0"); - snapshot!(format!("{:?}", hits), @r#"[]"#); + snapshot!(format!("{hits:?}"), @r#"[]"#); snapshot!(response["estimatedTotalHits"], @"6"); } /// testing: https://github.com/meilisearch/meilisearch/issues/4130 #[actix_rt::test] async fn distinct_search_with_pagination_no_ranking() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = DOCUMENTS.clone(); index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await; @@ -222,7 +222,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"0"); - snapshot!(format!("{:?}", hits), @r#"[]"#); + snapshot!(format!("{hits:?}"), @r#"[]"#); snapshot!(response["page"], @"0"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -231,7 +231,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#); + snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#); snapshot!(response["page"], @"1"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -240,7 +240,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#); + snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#); snapshot!(response["page"], @"2"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -249,7 +249,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"2"); - snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#); + snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#); snapshot!(response["page"], @"3"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -258,7 +258,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"0"); - snapshot!(format!("{:?}", hits), @r#"[]"#); + snapshot!(format!("{hits:?}"), @r#"[]"#); snapshot!(response["page"], @"4"); snapshot!(response["totalPages"], @"3"); snapshot!(response["totalHits"], @"6"); @@ -267,7 +267,7 @@ async fn distinct_search_with_pagination_no_ranking() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"3"); - snapshot!(format!("{:?}", hits), @r#"["987654", "234567", "345678"]"#); + snapshot!(format!("{hits:?}"), @r#"["987654", "234567", "345678"]"#); snapshot!(response["page"], @"2"); snapshot!(response["totalPages"], @"2"); snapshot!(response["totalHits"], @"6"); @@ -275,13 +275,13 @@ async fn distinct_search_with_pagination_no_ranking() { #[actix_rt::test] async fn distinct_at_search_time() { - let server = Server::new().await; - let index = server.index("tamo"); + let server = Server::new_shared(); + let index = server.unique_index(); let documents = NESTED_DOCUMENTS.clone(); index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await; let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await; - let task = index.wait_task(task.uid()).await; + let task = index.wait_task(task.uid()).await.succeeded(); snapshot!(task, name: "succeed"); fn get_hits(response: &Value) -> Vec { @@ -299,7 +299,7 @@ async fn distinct_at_search_time() { let hits = get_hits(&response); snapshot!(code, @"200 OK"); snapshot!(hits.len(), @"3"); - snapshot!(format!("{:?}", hits), @r###"["1", "2", "3"]"###); + snapshot!(format!("{hits:?}"), @r###"["1", "2", "3"]"###); snapshot!(response["page"], @"1"); snapshot!(response["totalPages"], @"1"); snapshot!(response["totalHits"], @"3"); diff --git a/crates/meilisearch/tests/search/snapshots/distinct.rs/distinct_at_search_time/succeed.snap b/crates/meilisearch/tests/search/snapshots/distinct.rs/distinct_at_search_time/succeed.snap index ea55d9c61..c1b01a5e2 100644 --- a/crates/meilisearch/tests/search/snapshots/distinct.rs/distinct_at_search_time/succeed.snap +++ b/crates/meilisearch/tests/search/snapshots/distinct.rs/distinct_at_search_time/succeed.snap @@ -4,7 +4,7 @@ source: crates/meilisearch/tests/search/distinct.rs { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "tamo", + "indexUid": "[uuid]", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, From 41b1cd5a73ab8cb7643fbe30908bbff78f041ca7 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 13:08:12 +0300 Subject: [PATCH 101/131] Extract GEO_DOCUMENTS static variable and shared index with these docs Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/mod.rs | 54 ++++++++++++++++++++++ crates/meilisearch/tests/search/geo.rs | 63 ++------------------------ 2 files changed, 58 insertions(+), 59 deletions(-) diff --git a/crates/meilisearch/tests/common/mod.rs b/crates/meilisearch/tests/common/mod.rs index 046519a0e..9823bf483 100644 --- a/crates/meilisearch/tests/common/mod.rs +++ b/crates/meilisearch/tests/common/mod.rs @@ -453,3 +453,57 @@ pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> { }) .await } + +pub static GEO_DOCUMENTS: Lazy = Lazy::new(|| { + json!([ + { + "id": 1, + "name": "Taco Truck", + "address": "444 Salsa Street, Burritoville", + "type": "Mexican", + "rating": 9, + "_geo": { + "lat": 34.0522, + "lng": -118.2437 + } + }, + { + "id": 2, + "name": "La Bella Italia", + "address": "456 Elm Street, Townsville", + "type": "Italian", + "rating": 9, + "_geo": { + "lat": "45.4777599", + "lng": "9.1967508" + } + }, + { + "id": 3, + "name": "Crêpe Truck", + "address": "2 Billig Avenue, Rouenville", + "type": "French", + "rating": 10 + } + ]) +}); + +pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared> { + static INDEX: OnceCell> = OnceCell::const_new(); + INDEX + .get_or_init(|| async { + let server = Server::new_shared(); + let index = server._index("SHARED_GEO_DOCUMENTS").to_shared(); + let (response, _code) = index._add_documents(GEO_DOCUMENTS.clone(), None).await; + index.wait_task(response.uid()).await.succeeded(); + + let (response, _code) = index + ._update_settings( + json!({"filterableAttributes": ["_geo"], "sortableAttributes": ["_geo"]}), + ) + .await; + index.wait_task(response.uid()).await.succeeded(); + index + }) + .await +} diff --git a/crates/meilisearch/tests/search/geo.rs b/crates/meilisearch/tests/search/geo.rs index b74e4e9e4..5e731ffde 100644 --- a/crates/meilisearch/tests/search/geo.rs +++ b/crates/meilisearch/tests/search/geo.rs @@ -2,55 +2,14 @@ use meili_snap::{json_string, snapshot}; use meilisearch_types::milli::constants::RESERVED_GEO_FIELD_NAME; use once_cell::sync::Lazy; -use crate::common::{Server, Value}; +use crate::common::shared_index_with_geo_documents; use crate::json; use super::test_settings_documents_indexing_swapping_and_search; -static DOCUMENTS: Lazy = Lazy::new(|| { - json!([ - { - "id": 1, - "name": "Taco Truck", - "address": "444 Salsa Street, Burritoville", - "type": "Mexican", - "rating": 9, - "_geo": { - "lat": 34.0522, - "lng": -118.2437 - } - }, - { - "id": 2, - "name": "La Bella Italia", - "address": "456 Elm Street, Townsville", - "type": "Italian", - "rating": 9, - "_geo": { - "lat": "45.4777599", - "lng": "9.1967508" - } - }, - { - "id": 3, - "name": "Crêpe Truck", - "address": "2 Billig Avenue, Rouenville", - "type": "French", - "rating": 10 - } - ]) -}); - #[actix_rt::test] async fn geo_sort_with_geo_strings() { - let server = Server::new_shared(); - let index = server.unique_index(); - - let documents = DOCUMENTS.clone(); - index.update_settings_filterable_attributes(json!(["_geo"])).await; - index.update_settings_sortable_attributes(json!(["_geo"])).await; - let (task, _status_code) = index.add_documents(documents, None).await; - index.wait_task(task.uid()).await.succeeded(); + let index = shared_index_with_geo_documents().await; index .search( @@ -67,14 +26,7 @@ async fn geo_sort_with_geo_strings() { #[actix_rt::test] async fn geo_bounding_box_with_string_and_number() { - let server = Server::new_shared(); - let index = server.unique_index(); - - let documents = DOCUMENTS.clone(); - index.update_settings_filterable_attributes(json!(["_geo"])).await; - index.update_settings_sortable_attributes(json!(["_geo"])).await; - let (ret, _code) = index.add_documents(documents, None).await; - index.wait_task(ret.uid()).await.succeeded(); + let index = shared_index_with_geo_documents().await; index .search( @@ -124,14 +76,7 @@ async fn geo_bounding_box_with_string_and_number() { #[actix_rt::test] async fn bug_4640() { // https://github.com/meilisearch/meilisearch/issues/4640 - let server = Server::new_shared(); - let index = server.unique_index(); - - let documents = DOCUMENTS.clone(); - index.add_documents(documents, None).await; - index.update_settings_filterable_attributes(json!(["_geo"])).await; - let (ret, _code) = index.update_settings_sortable_attributes(json!(["_geo"])).await; - index.wait_task(ret.uid()).await.succeeded(); + let index = shared_index_with_geo_documents().await; // Sort the document with the second one first index From 54fdf379bb7da4849e57fb92f638934c8271baf1 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 13:41:13 +0300 Subject: [PATCH 102/131] Use shared_does_not_exists_index() index for delete_one_document_unexisting_index() test case Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/documents/delete_documents.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/crates/meilisearch/tests/documents/delete_documents.rs b/crates/meilisearch/tests/documents/delete_documents.rs index 26dfba8c8..5ea122bd0 100644 --- a/crates/meilisearch/tests/documents/delete_documents.rs +++ b/crates/meilisearch/tests/documents/delete_documents.rs @@ -1,13 +1,12 @@ use meili_snap::{json_string, snapshot}; -use crate::common::{GetAllDocumentsOptions, Server}; +use crate::common::{shared_does_not_exists_index, GetAllDocumentsOptions, Server}; use crate::json; #[actix_rt::test] async fn delete_one_document_unexisting_index() { - let server = Server::new_shared(); - let index = server.unique_index(); - let (task, code) = index.delete_document(0).await; + let index = shared_does_not_exists_index().await; + let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await; assert_eq!(code, 202); index.wait_task(task.uid()).await.failed(); From 3668f5f021a74db5c08fbceca83f1eeb3b3a4096 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Tue, 3 Jun 2025 13:44:38 +0300 Subject: [PATCH 103/131] Use unique server+index for `list_tasks()` test case Co-authored-by: Tamo --- crates/meilisearch/tests/tasks/mod.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index 57cb87364..303e0aa91 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -49,8 +49,9 @@ async fn get_task_status() { #[actix_rt::test] async fn list_tasks() { - let server = Server::new_shared(); - let index = server.unique_index(); + // Do not use a shared server because we want to assert stuff against the global list of tasks + let server = Server::new().await; + let index = server.index("test"); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index From a50b69b868f07bda7c28376aadd8c510c162bce9 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Tue, 3 Jun 2025 13:45:17 +0300 Subject: [PATCH 104/131] Use unique server+index for `list_tasks_status_filtered()` test case Co-authored-by: Tamo --- crates/meilisearch/tests/tasks/mod.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index 303e0aa91..9f60d3caf 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -152,8 +152,9 @@ async fn list_tasks_with_star_filters() { #[actix_rt::test] async fn list_tasks_status_filtered() { - let server = Server::new_shared(); - let index = server.unique_index(); + // Do not use a shared server because we want to assert stuff against the global list of tasks + let server = Server::new().await; + let index = server.index("test"); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = index.create(None).await; From f95f29c492606f37024300185fbcf6e6c61b3967 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Tue, 3 Jun 2025 13:45:46 +0300 Subject: [PATCH 105/131] Use unique server+index for `list_tasks_type_filtered()` test case Co-authored-by: Tamo --- crates/meilisearch/tests/tasks/mod.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index 9f60d3caf..093817476 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -175,8 +175,9 @@ async fn list_tasks_status_filtered() { #[actix_rt::test] async fn list_tasks_type_filtered() { - let server = Server::new_shared(); - let index = server.unique_index(); + // Do not use a shared server because we want to assert stuff against the global list of tasks + let server = Server::new().await; + let index = server.index("test"); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index From d3d501585422a57a45c7fdf65411c7cff92a1a6d Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 13:50:04 +0300 Subject: [PATCH 106/131] Use the cancelled task uid Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/tasks/mod.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index 093817476..2bf8f0aa8 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -200,11 +200,12 @@ async fn list_tasks_invalid_canceled_by_filter() { let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); - index + let (task, _code) = index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await; + let (response, code) = + index.filtered_tasks(&[], &[], &[format!("{}", task.uid()).as_str()]).await; assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); } From 0b1f634afa82db8d5c2efb8ebd2701c13a8efced Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 13:52:55 +0300 Subject: [PATCH 107/131] Remove useless code Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/tasks/mod.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index 2bf8f0aa8..16948c571 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -12,8 +12,6 @@ use crate::json; async fn error_get_unexisting_task_status() { let server = Server::new_shared(); let index = server.unique_index(); - let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.get_task(u32::MAX as u64).await; let expected_response = json!({ From 96c81762ed7b8ad228e71e7dda8e11706310e754 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Tue, 3 Jun 2025 14:00:38 +0300 Subject: [PATCH 108/131] Apply suggestions from code review Do not redactions for the snapshot assertions Co-authored-by: Tamo --- crates/meilisearch/tests/tasks/mod.rs | 62 ++++++++++----------------- 1 file changed, 22 insertions(+), 40 deletions(-) diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index 16948c571..fb45223e6 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -210,8 +210,9 @@ async fn list_tasks_invalid_canceled_by_filter() { #[actix_rt::test] async fn list_tasks_status_and_type_filtered() { - let server = Server::new_shared(); - let index = server.unique_index(); + // Do not use a shared server because we want to assert stuff against the global list of tasks + let server = Server::new().await; + let index = server.index("test"); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index @@ -285,8 +286,7 @@ async fn test_summarized_document_addition_or_update() { index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -311,8 +311,7 @@ async fn test_summarized_document_addition_or_update() { index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -346,8 +345,7 @@ async fn test_summarized_delete_documents_by_batch() { .await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -378,8 +376,7 @@ async fn test_summarized_delete_documents_by_batch() { let (del_task, _status_code) = index.delete_batch(vec![42]).await; index.wait_task(del_task.uid()).await.succeeded(); let (task, _) = index.get_task(del_task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -411,8 +408,7 @@ async fn test_summarized_delete_documents_by_filter() { index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -444,8 +440,7 @@ async fn test_summarized_delete_documents_by_filter() { index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -477,8 +472,7 @@ async fn test_summarized_delete_documents_by_filter() { index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -508,8 +502,7 @@ async fn test_summarized_delete_document_by_id() { let (task, _status_code) = index.delete_document(1).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -540,8 +533,7 @@ async fn test_summarized_delete_document_by_id() { let (task, _status_code) = index.delete_document(42).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -583,8 +575,7 @@ async fn test_summarized_settings_update() { let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -622,8 +613,7 @@ async fn test_summarized_index_creation() { let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -646,8 +636,7 @@ async fn test_summarized_index_creation() { let (task, _status_code) = index.create(Some("doggos")).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -790,8 +779,7 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(None).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -819,8 +807,7 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(Some("bones")).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -851,8 +838,7 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -875,8 +861,7 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(Some("bones")).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -907,8 +892,7 @@ async fn test_summarized_index_swap() { .await; server.wait_task(task.uid()).await.failed(); let (task, _) = server.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -1027,8 +1011,7 @@ async fn test_summarized_task_deletion() { let (task, _status_code) = server.delete_tasks("uids=0").await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", @@ -1057,8 +1040,7 @@ async fn test_summarized_dump_creation() { let (task, _status_code) = server.create_dump().await; server.wait_task(task.uid()).await; let (task, _) = server.get_task(task.uid()).await; - snapshot!(json_string!(task, - { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + snapshot!(task, @r###" { "uid": "[uid]", From 70aa78a2c2c5aa0bff68f2d5caa81f8de82ef724 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 14:04:15 +0300 Subject: [PATCH 109/131] Remove unused import Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/geo.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/meilisearch/tests/search/geo.rs b/crates/meilisearch/tests/search/geo.rs index 5e731ffde..5f4eff947 100644 --- a/crates/meilisearch/tests/search/geo.rs +++ b/crates/meilisearch/tests/search/geo.rs @@ -1,6 +1,5 @@ use meili_snap::{json_string, snapshot}; use meilisearch_types::milli::constants::RESERVED_GEO_FIELD_NAME; -use once_cell::sync::Lazy; use crate::common::shared_index_with_geo_documents; use crate::json; From d6ae39bf0f51ee0f9e5e0059a22289ba4ce1459e Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 15:56:03 +0300 Subject: [PATCH 110/131] tests: Faster search::hybrid IT tests Use shared server + unique indices Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/hybrid.rs | 62 +++++++++++------------ 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/crates/meilisearch/tests/search/hybrid.rs b/crates/meilisearch/tests/search/hybrid.rs index 3a8fb2b4c..01de9a057 100644 --- a/crates/meilisearch/tests/search/hybrid.rs +++ b/crates/meilisearch/tests/search/hybrid.rs @@ -2,31 +2,31 @@ use meili_snap::snapshot; use once_cell::sync::Lazy; use crate::common::index::Index; -use crate::common::{Server, Value}; +use crate::common::{Server, Shared, Value}; use crate::json; async fn index_with_documents_user_provided<'a>( - server: &'a Server, + server: &'a Server, documents: &Value, ) -> Index<'a> { - let index = server.index("test"); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ "embedders": {"default": { "source": "userProvided", "dimensions": 2}}} )) .await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.add_documents(documents.clone(), None).await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); index } -async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> { - let index = server.index("test"); +async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> { + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ "embedders": {"default": { @@ -36,11 +36,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> I "documentTemplate": "{{doc.title}}, {{doc.desc}}" }}} )) .await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index.add_documents(documents.clone(), None).await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); index } @@ -139,8 +139,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy = Lazy::new(|| { #[actix_rt::test] async fn simple_search() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post( @@ -172,8 +172,8 @@ async fn simple_search() { #[actix_rt::test] async fn limit_offset() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post( @@ -185,8 +185,8 @@ async fn limit_offset() { snapshot!(response["semanticHitCount"], @"0"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post( @@ -201,8 +201,8 @@ async fn limit_offset() { #[actix_rt::test] async fn simple_search_hf() { - let server = Server::new().await; - let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await; let (response, code) = index .search_post( @@ -253,8 +253,8 @@ async fn simple_search_hf() { #[actix_rt::test] async fn distribution_shift() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "retrieveVectors": true}); let (response, code) = index.search_post(search.clone()).await; @@ -275,7 +275,7 @@ async fn distribution_shift() { .await; snapshot!(code, @"202 Accepted"); - let response = server.wait_task(response.uid()).await; + let response = server.wait_task(response.uid()).await.succeeded(); snapshot!(response["details"], @r#"{"embedders":{"default":{"distribution":{"mean":0.998,"sigma":0.01}}}}"#); let (response, code) = index.search_post(search).await; @@ -285,8 +285,8 @@ async fn distribution_shift() { #[actix_rt::test] async fn highlighter() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0], @@ -340,8 +340,8 @@ async fn highlighter() { #[actix_rt::test] async fn invalid_semantic_ratio() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let (response, code) = index .search_post( @@ -412,8 +412,8 @@ async fn invalid_semantic_ratio() { #[actix_rt::test] async fn single_document() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SINGLE_DOCUMENT_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SINGLE_DOCUMENT_VEC).await; let (response, code) = index .search_post( @@ -428,8 +428,8 @@ async fn single_document() { #[actix_rt::test] async fn query_combination() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; // search without query and vector, but with hybrid => still placeholder let (response, code) = index @@ -581,7 +581,7 @@ async fn distinct_is_applied() { #[actix_rt::test] async fn retrieve_vectors() { - let server = Server::new().await; + let server = Server::new_shared(); let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await; let (response, code) = index @@ -632,7 +632,7 @@ async fn retrieve_vectors() { let (response, code) = index .update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} )) .await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index @@ -682,7 +682,7 @@ async fn retrieve_vectors() { // remove `_vectors` from displayed attributes let (response, code) = index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await; - assert_eq!(202, code, "{:?}", response); + assert_eq!(202, code, "{response:?}"); index.wait_task(response.uid()).await.succeeded(); let (response, code) = index From 7c6162f0bf65039fd20db63c32e4472f295fdcae Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 08:25:33 +0300 Subject: [PATCH 111/131] Fix clippy error Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/hybrid.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/search/hybrid.rs b/crates/meilisearch/tests/search/hybrid.rs index 01de9a057..96a7ee1c9 100644 --- a/crates/meilisearch/tests/search/hybrid.rs +++ b/crates/meilisearch/tests/search/hybrid.rs @@ -582,7 +582,7 @@ async fn distinct_is_applied() { #[actix_rt::test] async fn retrieve_vectors() { let server = Server::new_shared(); - let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await; + let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await; let (response, code) = index .search_post( From 733175359a073686b021d8cf03e2152933eb1481 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 15:29:45 +0300 Subject: [PATCH 112/131] Update the new test case to use the new signature of index_with_documents_user_provided() Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/hybrid.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/search/hybrid.rs b/crates/meilisearch/tests/search/hybrid.rs index 96a7ee1c9..c6eb39a3a 100644 --- a/crates/meilisearch/tests/search/hybrid.rs +++ b/crates/meilisearch/tests/search/hybrid.rs @@ -538,8 +538,8 @@ async fn query_combination() { // see #[actix_rt::test] async fn distinct_is_applied() { - let server = Server::new().await; - let index = index_with_documents_user_provided(&server, &TEST_DISTINCT_DOCUMENTS).await; + let server = Server::new_shared(); + let index = index_with_documents_user_provided(server, &TEST_DISTINCT_DOCUMENTS).await; let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await; assert_eq!(202, code, "{:?}", response); From 693fcd5752b1fc59ee0e56101bf11f063c020e11 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 15:40:40 +0300 Subject: [PATCH 113/131] Try with GITHUB_TOKEN Signed-off-by: Martin Tzvetanov Grigorov --- .github/workflows/db-change-missing.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/db-change-missing.yml b/.github/workflows/db-change-missing.yml index 12a616f88..1ae48527f 100644 --- a/.github/workflows/db-change-missing.yml +++ b/.github/workflows/db-change-missing.yml @@ -4,22 +4,22 @@ on: pull_request: types: [opened, synchronize, reopened, labeled, unlabeled] -env: - GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }} - jobs: check-labels: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Check db change labels id: check_labels + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels echo ${{ github.event.pull_request.number }} echo $URL LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name) + echo "Labels: $LABELS" if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then echo "::error::Pull request must contain either the 'db change' or 'no db change' label." exit 1 From af54c8381e7d6986b7ce0710b5cea755fa910617 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 15:46:16 +0300 Subject: [PATCH 114/131] Use ${{ github.repository }} instead of hardcoding the repo/owner Signed-off-by: Martin Tzvetanov Grigorov --- .github/workflows/db-change-missing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/db-change-missing.yml b/.github/workflows/db-change-missing.yml index 1ae48527f..6d15afd69 100644 --- a/.github/workflows/db-change-missing.yml +++ b/.github/workflows/db-change-missing.yml @@ -18,7 +18,7 @@ jobs: URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels echo ${{ github.event.pull_request.number }} echo $URL - LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name) + LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/labels -q .[].name) echo "Labels: $LABELS" if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then echo "::error::Pull request must contain either the 'db change' or 'no db change' label." From c9efdf8c880d4a28aaa55070e5d538825a76c7d9 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 4 Jun 2025 13:00:47 +0300 Subject: [PATCH 115/131] Render details.dumpUid as [dump_uid] in Value's Display Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/mod.rs | 3 ++- crates/meilisearch/tests/tasks/mod.rs | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/common/mod.rs b/crates/meilisearch/tests/common/mod.rs index 046519a0e..e5f910589 100644 --- a/crates/meilisearch/tests/common/mod.rs +++ b/crates/meilisearch/tests/common/mod.rs @@ -128,7 +128,8 @@ impl Display for Value { ".finishedAt" => "[date]", ".duration" => "[duration]", ".processingTimeMs" => "[duration]", - ".details.embedders.*.url" => "[url]" + ".details.embedders.*.url" => "[url]", + ".details.dumpUid" => "[dump_uid]", }) ) } diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index fb45223e6..c31e43883 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -1050,7 +1050,7 @@ async fn test_summarized_dump_creation() { "type": "dumpCreation", "canceledBy": null, "details": { - "dumpUid": "[dumpUid]" + "dumpUid": "[dump_uid]" }, "error": null, "duration": "[duration]", From 8b0c4291ae81d3616090914a3d02ae4c7c4e1c48 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 08:29:15 +0300 Subject: [PATCH 116/131] tests: Fater stats::mod IT tests Use shared server + unique indices Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/stats/mod.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/crates/meilisearch/tests/stats/mod.rs b/crates/meilisearch/tests/stats/mod.rs index aee626460..f448d893f 100644 --- a/crates/meilisearch/tests/stats/mod.rs +++ b/crates/meilisearch/tests/stats/mod.rs @@ -6,8 +6,8 @@ use crate::common::Server; use crate::json; #[actix_rt::test] -async fn get_settings_unexisting_index() { - let server = Server::new().await; +async fn get_version() { + let server = Server::new_shared(); let (response, code) = server.version().await; assert_eq!(code, 200); let version = response.as_object().unwrap(); @@ -18,7 +18,7 @@ async fn get_settings_unexisting_index() { #[actix_rt::test] async fn test_healthyness() { - let server = Server::new().await; + let server = Server::new_shared(); let (response, status_code) = server.service.get("/health").await; assert_eq!(status_code, 200); @@ -27,8 +27,8 @@ async fn test_healthyness() { #[actix_rt::test] async fn stats() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, code) = index.create(Some("id")).await; assert_eq!(code, 202); @@ -55,7 +55,7 @@ async fn stats() { ]); let (response, code) = index.add_documents(documents, None).await; - assert_eq!(code, 202, "{}", response); + assert_eq!(code, 202, "{response}"); assert_eq!(response["taskUid"], 1); index.wait_task(response.uid()).await.succeeded(); @@ -78,8 +78,8 @@ async fn stats() { #[actix_rt::test] async fn add_remove_embeddings() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -216,8 +216,8 @@ async fn add_remove_embeddings() { #[actix_rt::test] async fn add_remove_embedded_documents() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); let (response, code) = index .update_settings(json!({ @@ -293,8 +293,8 @@ async fn add_remove_embedded_documents() { #[actix_rt::test] async fn update_embedder_settings() { - let server = Server::new().await; - let index = server.index("doggo"); + let server = Server::new_shared(); + let index = server.unique_index(); // 2 embedded documents for 3 embeddings in total // but no embedders are added in the settings yet so we expect 0 embedded documents for 0 embeddings in total From 930d5a09a842a453edb655759576c3ab195e6203 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 11:51:52 +0300 Subject: [PATCH 117/131] Use unique server + its own index for #stats() test Using a shared server will make this test fragile Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/stats/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/stats/mod.rs b/crates/meilisearch/tests/stats/mod.rs index f448d893f..f44812014 100644 --- a/crates/meilisearch/tests/stats/mod.rs +++ b/crates/meilisearch/tests/stats/mod.rs @@ -27,8 +27,8 @@ async fn test_healthyness() { #[actix_rt::test] async fn stats() { - let server = Server::new_shared(); - let index = server.unique_index(); + let server = Server::new().await; + let index = server.index("test"); let (task, code) = index.create(Some("id")).await; assert_eq!(code, 202); From 0557a4dd2f3eaac2867384c269c0c9a7e2f23759 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 14:28:30 +0300 Subject: [PATCH 118/131] Trigger build Signed-off-by: Martin Tzvetanov Grigorov From 61b0f50d4d8f0ece1db3a2efe0242869a6d62322 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 4 Jun 2025 13:37:42 +0300 Subject: [PATCH 119/131] Trigger build Signed-off-by: Martin Tzvetanov Grigorov From 3ebff65ef313b3ef4f54d5ea8c920f924e573654 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 15:28:30 +0300 Subject: [PATCH 120/131] tests: Faster search::filters IT tests Use shared server + unique indices Related-to: https://github.com/meilisearch/meilisearch/issues/4840 Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/filters.rs | 62 +++++++++++----------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/crates/meilisearch/tests/search/filters.rs b/crates/meilisearch/tests/search/filters.rs index 49409efa6..1e2fac998 100644 --- a/crates/meilisearch/tests/search/filters.rs +++ b/crates/meilisearch/tests/search/filters.rs @@ -10,17 +10,17 @@ use crate::{ #[actix_rt::test] async fn search_with_filter_string_notation() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await; - meili_snap::snapshot!(code, @"202 Accepted"); + snapshot!(code, @"202 Accepted"); let documents = DOCUMENTS.clone(); let (task, code) = index.add_documents(documents, None).await; - meili_snap::snapshot!(code, @"202 Accepted"); - let res = index.wait_task(task.uid()).await; - meili_snap::snapshot!(res["status"], @r###""succeeded""###); + snapshot!(code, @"202 Accepted"); + let res = index.wait_task(task.uid()).await.succeeded(); + snapshot!(res["status"], @r###""succeeded""###); index .search( @@ -28,44 +28,44 @@ async fn search_with_filter_string_notation() { "filter": "title = Gläss" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); }, ) .await; - let index = server.index("nested"); + let nested_index = server.unique_index(); let (_, code) = - index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await; - meili_snap::snapshot!(code, @"202 Accepted"); + nested_index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await; + snapshot!(code, @"202 Accepted"); let documents = NESTED_DOCUMENTS.clone(); - let (task, code) = index.add_documents(documents, None).await; - meili_snap::snapshot!(code, @"202 Accepted"); - let res = index.wait_task(task.uid()).await; - meili_snap::snapshot!(res["status"], @r###""succeeded""###); + let (task, code) = nested_index.add_documents(documents, None).await; + snapshot!(code, @"202 Accepted"); + let res = nested_index.wait_task(task.uid()).await.succeeded(); + snapshot!(res["status"], @r###""succeeded""###); - index + nested_index .search( json!({ "filter": "cattos = pésti" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); assert_eq!(response["hits"][0]["id"], json!(852)); }, ) .await; - index + nested_index .search( json!({ "filter": "doggos.age > 5" }), |response, code| { - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 2); assert_eq!(response["hits"][0]["id"], json!(654)); assert_eq!(response["hits"][1]["id"], json!(951)); @@ -82,7 +82,7 @@ async fn search_with_filter_array_notation() { "filter": ["title = Gläss"] })) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 1); let (response, code) = index @@ -90,7 +90,7 @@ async fn search_with_filter_array_notation() { "filter": [["title = Gläss", "title = \"Shazam!\"", "title = \"Escape Room\""]] })) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 3); } @@ -116,7 +116,7 @@ async fn search_with_contains_filter() { "filter": "title CONTAINS cap" })) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["hits"].as_array().unwrap().len(), 2); } @@ -276,8 +276,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { let index = server.index("test"); let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; + assert_eq!(code, 202, "{task}"); + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(response["status"], @r###""succeeded""###); let (task, code) = index @@ -289,8 +289,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { } }]})) .await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; + assert_eq!(code, 202, "{task}"); + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(response["status"], @r###""succeeded""###); // Check if the Equality filter works @@ -355,8 +355,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { } }]})) .await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; + assert_eq!(code, 202, "{task}"); + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(response["status"], @r###""succeeded""###); // Check if the Equality filter works @@ -467,8 +467,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { } }]})) .await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; + assert_eq!(code, 202, "{task}"); + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(response["status"], @r###""succeeded""###); // Check if the Equality filter returns an error @@ -567,8 +567,8 @@ async fn search_with_pattern_filter_settings_scenario_1() { } }]})) .await; - assert_eq!(code, 202, "{}", task); - let response = index.wait_task(task.uid()).await; + assert_eq!(code, 202, "{task}"); + let response = index.wait_task(task.uid()).await.succeeded(); snapshot!(response["status"], @r###""succeeded""###); // Check if the Equality filter works From b41af0d0f6cb1e315d002b2fd70bb472274d56c0 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 2 Jun 2025 15:57:43 +0300 Subject: [PATCH 121/131] Formatting Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/filters.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/search/filters.rs b/crates/meilisearch/tests/search/filters.rs index 1e2fac998..19a6ad775 100644 --- a/crates/meilisearch/tests/search/filters.rs +++ b/crates/meilisearch/tests/search/filters.rs @@ -36,8 +36,9 @@ async fn search_with_filter_string_notation() { let nested_index = server.unique_index(); - let (_, code) = - nested_index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await; + let (_, code) = nested_index + .update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})) + .await; snapshot!(code, @"202 Accepted"); let documents = NESTED_DOCUMENTS.clone(); From 3f256a79592e4e6527da67855eaf6cad9b5650a0 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 4 Jun 2025 13:51:34 +0300 Subject: [PATCH 122/131] Use the shared index with DOCUMENTS where possible Remove useless assertion that is covered by the earlier call of .succeeded() Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/filters.rs | 32 ++++++---------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/crates/meilisearch/tests/search/filters.rs b/crates/meilisearch/tests/search/filters.rs index 19a6ad775..f6ce5d0b7 100644 --- a/crates/meilisearch/tests/search/filters.rs +++ b/crates/meilisearch/tests/search/filters.rs @@ -3,6 +3,7 @@ use meilisearch::Opt; use tempfile::TempDir; use super::test_settings_documents_indexing_swapping_and_search; +use crate::common::shared_index_with_nested_documents; use crate::{ common::{default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS}, json, @@ -10,17 +11,7 @@ use crate::{ #[actix_rt::test] async fn search_with_filter_string_notation() { - let server = Server::new_shared(); - let index = server.unique_index(); - - let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await; - snapshot!(code, @"202 Accepted"); - - let documents = DOCUMENTS.clone(); - let (task, code) = index.add_documents(documents, None).await; - snapshot!(code, @"202 Accepted"); - let res = index.wait_task(task.uid()).await.succeeded(); - snapshot!(res["status"], @r###""succeeded""###); + let index = shared_index_with_documents().await; index .search( @@ -34,6 +25,7 @@ async fn search_with_filter_string_notation() { ) .await; + let server = Server::new_shared(); let nested_index = server.unique_index(); let (_, code) = nested_index @@ -44,8 +36,7 @@ async fn search_with_filter_string_notation() { let documents = NESTED_DOCUMENTS.clone(); let (task, code) = nested_index.add_documents(documents, None).await; snapshot!(code, @"202 Accepted"); - let res = nested_index.wait_task(task.uid()).await.succeeded(); - snapshot!(res["status"], @r###""succeeded""###); + nested_index.wait_task(task.uid()).await.succeeded(); nested_index .search( @@ -278,8 +269,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await; assert_eq!(code, 202, "{task}"); - let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(response["status"], @r###""succeeded""###); + index.wait_task(task.uid()).await.succeeded(); let (task, code) = index .update_settings(json!({"filterableAttributes": [{ @@ -291,8 +281,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { }]})) .await; assert_eq!(code, 202, "{task}"); - let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(response["status"], @r###""succeeded""###); + index.wait_task(task.uid()).await.succeeded(); // Check if the Equality filter works index @@ -357,8 +346,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { }]})) .await; assert_eq!(code, 202, "{task}"); - let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(response["status"], @r###""succeeded""###); + index.wait_task(task.uid()).await.succeeded(); // Check if the Equality filter works index @@ -469,8 +457,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { }]})) .await; assert_eq!(code, 202, "{task}"); - let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(response["status"], @r###""succeeded""###); + index.wait_task(task.uid()).await.succeeded(); // Check if the Equality filter returns an error index @@ -569,8 +556,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { }]})) .await; assert_eq!(code, 202, "{task}"); - let response = index.wait_task(task.uid()).await.succeeded(); - snapshot!(response["status"], @r###""succeeded""###); + index.wait_task(task.uid()).await.succeeded(); // Check if the Equality filter works index From a15ebb283f2b7087be3c5247698a6176d200d864 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Wed, 4 Jun 2025 14:17:01 +0300 Subject: [PATCH 123/131] Remove unused import Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/filters.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/meilisearch/tests/search/filters.rs b/crates/meilisearch/tests/search/filters.rs index f6ce5d0b7..26f7c88e4 100644 --- a/crates/meilisearch/tests/search/filters.rs +++ b/crates/meilisearch/tests/search/filters.rs @@ -3,7 +3,6 @@ use meilisearch::Opt; use tempfile::TempDir; use super::test_settings_documents_indexing_swapping_and_search; -use crate::common::shared_index_with_nested_documents; use crate::{ common::{default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS}, json, From e497008161b4f1e4f213d1baa0088c40fb98e239 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 5 Jun 2025 11:28:29 +0300 Subject: [PATCH 124/131] Add `cattos` to the shared_index_with_nested_documents() as a filterable attribute This allows to make some more search::filters IT tests using shared server + unique/shared indices Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/mod.rs | 2 +- crates/meilisearch/tests/search/filters.rs | 25 ++++++---------------- 2 files changed, 8 insertions(+), 19 deletions(-) diff --git a/crates/meilisearch/tests/common/mod.rs b/crates/meilisearch/tests/common/mod.rs index 00e4732d0..373f89f78 100644 --- a/crates/meilisearch/tests/common/mod.rs +++ b/crates/meilisearch/tests/common/mod.rs @@ -352,7 +352,7 @@ pub async fn shared_index_with_nested_documents() -> &'static Index<'static, Sha index.wait_task(response.uid()).await.succeeded(); let (response, _code) = index ._update_settings( - json!({"filterableAttributes": ["father", "doggos"], "sortableAttributes": ["doggos"]}), + json!({"filterableAttributes": ["father", "doggos", "cattos"], "sortableAttributes": ["doggos"]}), ) .await; index.wait_task(response.uid()).await.succeeded(); diff --git a/crates/meilisearch/tests/search/filters.rs b/crates/meilisearch/tests/search/filters.rs index 26f7c88e4..3df80b90c 100644 --- a/crates/meilisearch/tests/search/filters.rs +++ b/crates/meilisearch/tests/search/filters.rs @@ -3,6 +3,7 @@ use meilisearch::Opt; use tempfile::TempDir; use super::test_settings_documents_indexing_swapping_and_search; +use crate::common::shared_index_with_nested_documents; use crate::{ common::{default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS}, json, @@ -24,18 +25,7 @@ async fn search_with_filter_string_notation() { ) .await; - let server = Server::new_shared(); - let nested_index = server.unique_index(); - - let (_, code) = nested_index - .update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})) - .await; - snapshot!(code, @"202 Accepted"); - - let documents = NESTED_DOCUMENTS.clone(); - let (task, code) = nested_index.add_documents(documents, None).await; - snapshot!(code, @"202 Accepted"); - nested_index.wait_task(task.uid()).await.succeeded(); + let nested_index = shared_index_with_nested_documents().await; nested_index .search( @@ -260,11 +250,10 @@ async fn search_with_pattern_filter_settings() { #[actix_rt::test] async fn search_with_pattern_filter_settings_scenario_1() { - let temp = TempDir::new().unwrap(); - let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap(); + let server = Server::new_shared(); eprintln!("Documents -> Settings -> test"); - let index = server.index("test"); + let index = server.unique_index(); let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await; assert_eq!(code, 202, "{task}"); @@ -324,7 +313,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -468,7 +457,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -599,7 +588,7 @@ async fn search_with_pattern_filter_settings_scenario_1() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", + "message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" From 3770e705818cce2735ba4fa544249d78a752c80b Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Thu, 5 Jun 2025 11:30:39 +0300 Subject: [PATCH 125/131] Optimize the imports Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/search/filters.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/search/filters.rs b/crates/meilisearch/tests/search/filters.rs index 3df80b90c..9670a036c 100644 --- a/crates/meilisearch/tests/search/filters.rs +++ b/crates/meilisearch/tests/search/filters.rs @@ -3,9 +3,11 @@ use meilisearch::Opt; use tempfile::TempDir; use super::test_settings_documents_indexing_swapping_and_search; -use crate::common::shared_index_with_nested_documents; use crate::{ - common::{default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS}, + common::{ + default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server, + DOCUMENTS, NESTED_DOCUMENTS, + }, json, }; From 89c0cf9b12159932951914bffa9b3d7eb6020bb0 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 6 Jun 2025 09:51:18 +0300 Subject: [PATCH 126/131] temporary: Dump the threads stack traces when .wait_task() times out Signed-off-by: Martin Tzvetanov Grigorov --- .cargo/config.toml | 3 +++ .github/workflows/test-suite.yml | 2 ++ crates/meilisearch/tests/common/server.rs | 16 ++++++++++------ 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/.cargo/config.toml b/.cargo/config.toml index e11d56a31..b172ee2af 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,2 +1,5 @@ [alias] xtask = "run --release --package xtask --" + +[build] +rustflags = ["--cfg", "tokio_unstable", "--cfg", "tokio_taskdump"] diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 6cf8bfa0f..100068965 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -158,6 +158,8 @@ jobs: uses: Swatinem/rust-cache@v2.7.8 - name: Run tests in debug uses: actions-rs/cargo@v1 + env: + RUSTFLAGS: "--cfg tokio_unstable --cfg tokio_taskdump" with: command: test args: --locked --all diff --git a/crates/meilisearch/tests/common/server.rs b/crates/meilisearch/tests/common/server.rs index 7e30c5d17..02ea2f79a 100644 --- a/crates/meilisearch/tests/common/server.rs +++ b/crates/meilisearch/tests/common/server.rs @@ -400,12 +400,8 @@ impl Server { // try several times to get status, or panic to not wait forever let url = format!("/tasks/{}", update_id); // Increase timeout for vector-related tests - let max_attempts = if url.contains("/tasks/") { - if update_id > 1000 { - 400 // 200 seconds for vector tests - } else { - 100 // 50 seconds for other tests - } + let max_attempts = if update_id > 1000 { + 400 // 200 seconds for vector tests } else { 100 // 50 seconds for other tests }; @@ -421,6 +417,14 @@ impl Server { // wait 0.5 second. sleep(Duration::from_millis(500)).await; } + let handle = tokio::runtime::Handle::current(); + if let Ok(dump) = tokio::time::timeout(Duration::from_secs(2), handle.dump()).await { + for (i, task) in dump.tasks().iter().enumerate() { + let trace = task.trace(); + println!("TASK {i}:"); + println!("{trace}\n"); + } + } panic!("Timeout waiting for update id"); } From 1b4d344e18bf58f3202bcbe706863f16598ca558 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 6 Jun 2025 11:42:54 +0300 Subject: [PATCH 127/131] Increase the wait time in the tests Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/server.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/common/server.rs b/crates/meilisearch/tests/common/server.rs index 02ea2f79a..1bfbf7641 100644 --- a/crates/meilisearch/tests/common/server.rs +++ b/crates/meilisearch/tests/common/server.rs @@ -403,7 +403,7 @@ impl Server { let max_attempts = if update_id > 1000 { 400 // 200 seconds for vector tests } else { - 100 // 50 seconds for other tests + 1000 // 50 seconds for other tests }; for _ in 0..max_attempts { From 63ccd19ab16ac7b336531d7485f06d749185987c Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 6 Jun 2025 13:45:05 +0300 Subject: [PATCH 128/131] Use Server::wait_task() instead of Index::wait_task() for tasks IT tests Revert the debugging helper that dumped the thread stack traces. Try with 400 max attempts for the task success/failure (200 secs) Signed-off-by: Martin Tzvetanov Grigorov --- .cargo/config.toml | 3 -- .github/workflows/test-suite.yml | 2 - crates/meilisearch/tests/common/server.rs | 19 ++----- crates/meilisearch/tests/tasks/mod.rs | 66 ++++++++++++----------- 4 files changed, 39 insertions(+), 51 deletions(-) diff --git a/.cargo/config.toml b/.cargo/config.toml index b172ee2af..e11d56a31 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,5 +1,2 @@ [alias] xtask = "run --release --package xtask --" - -[build] -rustflags = ["--cfg", "tokio_unstable", "--cfg", "tokio_taskdump"] diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 100068965..6cf8bfa0f 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -158,8 +158,6 @@ jobs: uses: Swatinem/rust-cache@v2.7.8 - name: Run tests in debug uses: actions-rs/cargo@v1 - env: - RUSTFLAGS: "--cfg tokio_unstable --cfg tokio_taskdump" with: command: test args: --locked --all diff --git a/crates/meilisearch/tests/common/server.rs b/crates/meilisearch/tests/common/server.rs index 1bfbf7641..431972983 100644 --- a/crates/meilisearch/tests/common/server.rs +++ b/crates/meilisearch/tests/common/server.rs @@ -399,14 +399,9 @@ impl Server { pub async fn wait_task(&self, update_id: u64) -> Value { // try several times to get status, or panic to not wait forever let url = format!("/tasks/{}", update_id); - // Increase timeout for vector-related tests - let max_attempts = if update_id > 1000 { - 400 // 200 seconds for vector tests - } else { - 1000 // 50 seconds for other tests - }; + let max_attempts = 400; // 200 seconds total, 0.5s per attempt - for _ in 0..max_attempts { + for i in 0..max_attempts { let (response, status_code) = self.service.get(&url).await; assert_eq!(200, status_code, "response: {}", response); @@ -416,13 +411,9 @@ impl Server { // wait 0.5 second. sleep(Duration::from_millis(500)).await; - } - let handle = tokio::runtime::Handle::current(); - if let Ok(dump) = tokio::time::timeout(Duration::from_secs(2), handle.dump()).await { - for (i, task) in dump.tasks().iter().enumerate() { - let trace = task.trace(); - println!("TASK {i}:"); - println!("{trace}\n"); + + if i == max_attempts - 1 { + dbg!(response); } } panic!("Timeout waiting for update id"); diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index c31e43883..064b52a1f 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -39,7 +39,7 @@ async fn get_task_status() { None, ) .await; - index.wait_task(create_task.uid()).await.succeeded(); + server.wait_task(create_task.uid()).await.succeeded(); let (_response, code) = index.get_task(add_task.uid()).await; assert_eq!(code, 200); // TODO check response format, as per #48 @@ -51,7 +51,7 @@ async fn list_tasks() { let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; @@ -105,7 +105,7 @@ async fn list_tasks_with_star_filters() { // Do not use a unique index here, as we want to test the `indexUids=*` filter. let index = server.index("test"); let (task, _code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; @@ -154,9 +154,9 @@ async fn list_tasks_status_filtered() { let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; assert_eq!(code, 200, "{response}"); @@ -177,7 +177,7 @@ async fn list_tasks_type_filtered() { let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; @@ -197,10 +197,12 @@ async fn list_tasks_invalid_canceled_by_filter() { let server = Server::new_shared(); let index = server.unique_index(); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); + let (task, _code) = index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; + server.wait_task(task.uid()).await.succeeded(); let (response, code) = index.filtered_tasks(&[], &[], &[format!("{}", task.uid()).as_str()]).await; @@ -214,7 +216,7 @@ async fn list_tasks_status_and_type_filtered() { let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; @@ -284,7 +286,7 @@ async fn test_summarized_document_addition_or_update() { let index = server.unique_index(); let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -309,7 +311,7 @@ async fn test_summarized_document_addition_or_update() { let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -343,7 +345,7 @@ async fn test_summarized_delete_documents_by_batch() { let (task, _status_code) = index .delete_batch(vec![non_existing_task_id1, non_existing_task_id2, non_existing_task_id3]) .await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -374,7 +376,7 @@ async fn test_summarized_delete_documents_by_batch() { index.create(None).await; let (del_task, _status_code) = index.delete_batch(vec![42]).await; - index.wait_task(del_task.uid()).await.succeeded(); + server.wait_task(del_task.uid()).await.succeeded(); let (task, _) = index.get_task(del_task.uid()).await; snapshot!(task, @r###" @@ -406,7 +408,7 @@ async fn test_summarized_delete_documents_by_filter() { let (task, _status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -438,7 +440,7 @@ async fn test_summarized_delete_documents_by_filter() { index.create(None).await; let (task, _status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -470,7 +472,7 @@ async fn test_summarized_delete_documents_by_filter() { index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await; let (task, _status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -500,7 +502,7 @@ async fn test_summarized_delete_document_by_id() { let server = Server::new_shared(); let index = server.unique_index(); let (task, _status_code) = index.delete_document(1).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -531,7 +533,7 @@ async fn test_summarized_delete_document_by_id() { index.create(None).await; let (task, _status_code) = index.delete_document(42).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -573,7 +575,7 @@ async fn test_summarized_settings_update() { "###); let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -611,7 +613,7 @@ async fn test_summarized_index_creation() { let server = Server::new_shared(); let index = server.unique_index(); let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -634,7 +636,7 @@ async fn test_summarized_index_creation() { "###); let (task, _status_code) = index.create(Some("doggos")).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -667,7 +669,7 @@ async fn test_summarized_index_deletion() { let server = Server::new_shared(); let index = server.unique_index(); let (ret, _code) = index.delete().await; - let task = index.wait_task(ret.uid()).await; + let task = server.wait_task(ret.uid()).await; snapshot!(task, @r###" { @@ -698,7 +700,7 @@ async fn test_summarized_index_deletion() { // both tasks may get autobatched and the deleted documents count will be wrong. let (ret, _code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; - let task = index.wait_task(ret.uid()).await; + let task = server.wait_task(ret.uid()).await; snapshot!(task, @r###" { @@ -721,7 +723,7 @@ async fn test_summarized_index_deletion() { "###); let (ret, _code) = index.delete().await; - let task = index.wait_task(ret.uid()).await; + let task = server.wait_task(ret.uid()).await; snapshot!(task, @r###" { @@ -744,7 +746,7 @@ async fn test_summarized_index_deletion() { // What happens when you delete an index that doesn't exists. let (ret, _code) = index.delete().await; - let task = index.wait_task(ret.uid()).await; + let task = server.wait_task(ret.uid()).await; snapshot!(task, @r###" { @@ -777,7 +779,7 @@ async fn test_summarized_index_update() { let index = server.unique_index(); // If the index doesn't exist yet, we should get errors with or without the primary key. let (task, _status_code) = index.update(None).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -805,7 +807,7 @@ async fn test_summarized_index_update() { "###); let (task, _status_code) = index.update(Some("bones")).await; - index.wait_task(task.uid()).await.failed(); + server.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -836,7 +838,7 @@ async fn test_summarized_index_update() { index.create(None).await; let (task, _status_code) = index.update(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -859,7 +861,7 @@ async fn test_summarized_index_update() { "###); let (task, _status_code) = index.update(Some("bones")).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" @@ -973,9 +975,9 @@ async fn test_summarized_task_cancelation() { // to avoid being flaky we're only going to cancel an already finished task :( let (task, _status_code) = index.create(None).await; let task_uid = task.uid(); - index.wait_task(task_uid).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = server.cancel_tasks(format!("uids={task_uid}").as_str()).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(json_string!(task, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.originalFilter" => "[of]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @@ -1007,9 +1009,9 @@ async fn test_summarized_task_deletion() { let index = server.unique_index(); // to avoid being flaky we're only going to delete an already finished task :( let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = server.delete_tasks("uids=0").await; - index.wait_task(task.uid()).await.succeeded(); + server.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; snapshot!(task, @r###" From 10028515acca965be2b37c6d488c2231edb15205 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 6 Jun 2025 14:52:05 +0300 Subject: [PATCH 129/131] Use a unique server for the summarized dump creation test Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/common/server.rs | 2 +- crates/meilisearch/tests/tasks/mod.rs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/meilisearch/tests/common/server.rs b/crates/meilisearch/tests/common/server.rs index 431972983..e6f2aca2c 100644 --- a/crates/meilisearch/tests/common/server.rs +++ b/crates/meilisearch/tests/common/server.rs @@ -399,7 +399,7 @@ impl Server { pub async fn wait_task(&self, update_id: u64) -> Value { // try several times to get status, or panic to not wait forever let url = format!("/tasks/{}", update_id); - let max_attempts = 400; // 200 seconds total, 0.5s per attempt + let max_attempts = 100; // 50 seconds total, 0.5s per attempt for i in 0..max_attempts { let (response, status_code) = self.service.get(&url).await; diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index 064b52a1f..09700d3c5 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -1038,7 +1038,8 @@ async fn test_summarized_task_deletion() { #[actix_web::test] async fn test_summarized_dump_creation() { - let server = Server::new_shared(); + // Do not use a shared server because it takes too long to create a dump + let server = Server::new().await; let (task, _status_code) = server.create_dump().await; server.wait_task(task.uid()).await; let (task, _) = server.get_task(task.uid()).await; From 2ec966487850e8327d2a8c48895d9b4c77cca543 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 6 Jun 2025 13:49:29 +0300 Subject: [PATCH 130/131] chore: Fix English grammar in SearchQueue's comments No functional changes! Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/src/search_queue.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/meilisearch/src/search_queue.rs b/crates/meilisearch/src/search_queue.rs index 6ab910164..9ff8da36c 100644 --- a/crates/meilisearch/src/search_queue.rs +++ b/crates/meilisearch/src/search_queue.rs @@ -1,5 +1,5 @@ //! This file implements a queue of searches to process and the ability to control how many searches can be run in parallel. -//! We need this because we don't want to process more search requests than we have cores. +//! We need this because we don't want to process more search requests than the available CPU cores. //! That slows down everything and consumes RAM for no reason. //! The steps to do a search are to get the `SearchQueue` data structure and try to get a search permit. //! This can fail if the queue is full, and we need to drop your search request to register a new one. @@ -8,7 +8,7 @@ //! //! In order to do a search request you should try to get a search permit. //! Retrieve the `SearchQueue` structure from actix-web (`search_queue: Data`) -//! and right before processing the search, calls the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;` +//! and right before processing the search, call the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;` //! //! What is going to happen at this point is that you're going to send a oneshot::Sender over an async mpsc channel. //! Then, the queue/scheduler is going to either: @@ -121,12 +121,12 @@ impl SearchQueue { let mut queue: Vec> = Default::default(); let mut rng: StdRng = StdRng::from_entropy(); let mut searches_running: usize = 0; - // By having a capacity of parallelism we ensures that every time a search finish it can release its RAM asap + // By having a capacity of parallelism we ensure that every time a search finish it can release its RAM asap let (sender, mut search_finished) = mpsc::channel(parallelism.into()); loop { tokio::select! { - // biased select because we wants to free up space before trying to register new tasks + // biased select because we want to free up space before trying to register new tasks biased; _ = search_finished.recv() => { searches_running = searches_running.saturating_sub(1); @@ -148,11 +148,11 @@ impl SearchQueue { if searches_running < usize::from(parallelism) && queue.is_empty() { searches_running += 1; - // if the search requests die it's not a hard error on our side + // if the search requests die, it's not a hard error on our side let _ = search_request.send(Permit { sender: sender.clone() }); continue; } else if capacity == 0 { - // in the very specific case where we have a capacity of zero + // in the very specific case where we have a capacity of zero, // we must refuse the request straight away without going through // the queue stuff. drop(search_request); @@ -183,7 +183,7 @@ impl SearchQueue { .map_err(|_| MeilisearchHttpError::TooManySearchRequests(self.capacity))?; // If we've been for more than one minute to get a search permit, it's better to simply - // abort the search request than spending time processing something were the client + // abort the search request than spending time processing something where the client // most certainly exited or got a timeout a long time ago. // We may find a better solution in https://github.com/actix/actix-web/issues/3462. if now.elapsed() > self.time_to_abort { From 8f96724adf54ef6da06dae274ddef414a58ec1c8 Mon Sep 17 00:00:00 2001 From: Martin Grigorov Date: Mon, 9 Jun 2025 14:03:49 +0300 Subject: [PATCH 131/131] Set max_attempts to 400 for Server::wait_task() Co-authored-by: Tamo --- crates/meilisearch/tests/common/server.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/meilisearch/tests/common/server.rs b/crates/meilisearch/tests/common/server.rs index e6f2aca2c..431972983 100644 --- a/crates/meilisearch/tests/common/server.rs +++ b/crates/meilisearch/tests/common/server.rs @@ -399,7 +399,7 @@ impl Server { pub async fn wait_task(&self, update_id: u64) -> Value { // try several times to get status, or panic to not wait forever let url = format!("/tasks/{}", update_id); - let max_attempts = 100; // 50 seconds total, 0.5s per attempt + let max_attempts = 400; // 200 seconds total, 0.5s per attempt for i in 0..max_attempts { let (response, status_code) = self.service.get(&url).await;