Fix some of the edition 2024 warnings

This commit is contained in:
Clément Renault 2025-04-01 12:21:33 +02:00
parent 2762d5a32a
commit aa87064a13
No known key found for this signature in database
GPG Key ID: F250A4C4E3AE5F5F
82 changed files with 323 additions and 317 deletions

View File

@ -30,7 +30,7 @@ authors = [
description = "Meilisearch HTTP server" description = "Meilisearch HTTP server"
homepage = "https://meilisearch.com" homepage = "https://meilisearch.com"
readme = "README.md" readme = "README.md"
edition = "2021" edition = "2024"
license = "MIT" license = "MIT"
[profile.release] [profile.release]

View File

@ -275,19 +275,19 @@ impl From<Task> for TaskView {
match (result, &mut details) { match (result, &mut details) {
( (
TaskResult::DocumentAddition { indexed_documents: num, .. }, TaskResult::DocumentAddition { indexed_documents: num, .. },
Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }), Some(TaskDetails::DocumentAddition { indexed_documents, .. }),
) => { ) => {
indexed_documents.replace(*num); indexed_documents.replace(*num);
} }
( (
TaskResult::DocumentDeletion { deleted_documents: docs, .. }, TaskResult::DocumentDeletion { deleted_documents: docs, .. },
Some(TaskDetails::DocumentDeletion { ref mut deleted_documents, .. }), Some(TaskDetails::DocumentDeletion { deleted_documents, .. }),
) => { ) => {
deleted_documents.replace(*docs); deleted_documents.replace(*docs);
} }
( (
TaskResult::ClearAll { deleted_documents: docs }, TaskResult::ClearAll { deleted_documents: docs },
Some(TaskDetails::ClearAll { ref mut deleted_documents }), Some(TaskDetails::ClearAll { deleted_documents }),
) => { ) => {
deleted_documents.replace(*docs); deleted_documents.replace(*docs);
} }

View File

@ -170,14 +170,14 @@ impl UpdateFile {
} }
pub fn push_document(&mut self, document: &Document) -> Result<()> { pub fn push_document(&mut self, document: &Document) -> Result<()> {
if let Some(mut writer) = self.writer.as_mut() { match self.writer.as_mut() { Some(mut writer) => {
serde_json::to_writer(&mut writer, &document)?; serde_json::to_writer(&mut writer, &document)?;
writer.write_all(b"\n")?; writer.write_all(b"\n")?;
} else { } _ => {
let file = File::create(&self.path).unwrap(); let file = File::create(&self.path).unwrap();
self.writer = Some(BufWriter::new(file)); self.writer = Some(BufWriter::new(file));
self.push_document(document)?; self.push_document(document)?;
} }}
Ok(()) Ok(())
} }

View File

@ -111,7 +111,7 @@ impl FileStore {
} }
/// List the Uuids of the files in the FileStore /// List the Uuids of the files in the FileStore
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> { pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>> + use<>> {
Ok(self.path.read_dir()?.filter_map(|entry| { Ok(self.path.read_dir()?.filter_map(|entry| {
let file_name = match entry { let file_name = match entry {
Ok(entry) => entry.file_name(), Ok(entry) => entry.file_name(),
@ -158,19 +158,19 @@ impl File {
impl Write for File { impl Write for File {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
if let Some(file) = self.file.as_mut() { match self.file.as_mut() { Some(file) => {
file.write(buf) file.write(buf)
} else { } _ => {
Ok(buf.len()) Ok(buf.len())
} }}
} }
fn flush(&mut self) -> std::io::Result<()> { fn flush(&mut self) -> std::io::Result<()> {
if let Some(file) = self.file.as_mut() { match self.file.as_mut() { Some(file) => {
file.flush() file.flush()
} else { } _ => {
Ok(()) Ok(())
} }}
} }
} }

View File

@ -3,7 +3,7 @@ name = "filter-parser-fuzz"
version = "0.0.0" version = "0.0.0"
authors = ["Automatically generated"] authors = ["Automatically generated"]
publish = false publish = false
edition = "2018" edition = "2024"
[package.metadata] [package.metadata]
cargo-fuzz = true cargo-fuzz = true

View File

@ -198,7 +198,7 @@ impl Display for Error<'_> {
f, f,
"Encountered an internal `{:?}` error while parsing your filter. Please fill an issue", kind "Encountered an internal `{:?}` error while parsing your filter. Please fill an issue", kind
)?, )?,
ErrorKind::External(ref error) => writeln!(f, "{}", error)?, ErrorKind::External(error) => writeln!(f, "{}", error)?,
} }
let base_column = self.context.get_utf8_column(); let base_column = self.context.get_utf8_column();
let size = self.context.fragment().chars().count(); let size = self.context.fragment().chars().count();

View File

@ -3,7 +3,7 @@ name = "flatten-serde-json-fuzz"
version = "0.0.0" version = "0.0.0"
authors = ["Automatically generated"] authors = ["Automatically generated"]
publish = false publish = false
edition = "2018" edition = "2024"
[package.metadata] [package.metadata]
cargo-fuzz = true cargo-fuzz = true

View File

@ -272,11 +272,11 @@ impl IndexMapper {
if tries >= 100 { if tries >= 100 {
panic!("Too many attempts to close index {name} prior to deletion.") panic!("Too many attempts to close index {name} prior to deletion.")
} }
let reopen = if let Some(reopen) = reopen.wait_timeout(Duration::from_secs(6)) { let reopen = match reopen.wait_timeout(Duration::from_secs(6)) { Some(reopen) => {
reopen reopen
} else { } _ => {
continue; continue;
}; }};
reopen.close(&mut self.index_map.write().unwrap()); reopen.close(&mut self.index_map.write().unwrap());
continue; continue;
} }
@ -382,11 +382,11 @@ impl IndexMapper {
Available(index) => break index, Available(index) => break index,
Closing(reopen) => { Closing(reopen) => {
// Avoiding deadlocks: no lock taken while doing this operation. // Avoiding deadlocks: no lock taken while doing this operation.
let reopen = if let Some(reopen) = reopen.wait_timeout(Duration::from_secs(6)) { let reopen = match reopen.wait_timeout(Duration::from_secs(6)) { Some(reopen) => {
reopen reopen
} else { } _ => {
continue; continue;
}; }};
let index_path = self.base_path.join(uuid.to_string()); let index_path = self.base_path.join(uuid.to_string());
// take the lock to reopen the environment. // take the lock to reopen the environment.
reopen reopen

View File

@ -355,19 +355,19 @@ impl IndexScheduler {
} }
fn is_good_heed(tasks_path: &Path, map_size: usize) -> bool { fn is_good_heed(tasks_path: &Path, map_size: usize) -> bool {
if let Ok(env) = unsafe { match unsafe {
heed::EnvOpenOptions::new().map_size(clamp_to_page_size(map_size)).open(tasks_path) heed::EnvOpenOptions::new().map_size(clamp_to_page_size(map_size)).open(tasks_path)
} { } { Ok(env) => {
env.prepare_for_closing().wait(); env.prepare_for_closing().wait();
true true
} else { } _ => {
// We're treating all errors equally here, not only allocation errors. // We're treating all errors equally here, not only allocation errors.
// This means there's a possiblity for the budget to lower due to errors different from allocation errors. // This means there's a possiblity for the budget to lower due to errors different from allocation errors.
// For persistent errors, this is OK as long as the task db is then reopened normally without ignoring the error this time. // For persistent errors, this is OK as long as the task db is then reopened normally without ignoring the error this time.
// For transient errors, this could lead to an instance with too low a budget. // For transient errors, this could lead to an instance with too low a budget.
// However transient errors are: 1) less likely than persistent errors 2) likely to cause other issues down the line anyway. // However transient errors are: 1) less likely than persistent errors 2) likely to cause other issues down the line anyway.
false false
} }}
} }
pub fn read_txn(&self) -> Result<RoTxn<WithoutTls>> { pub fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {

View File

@ -10,7 +10,7 @@ use crate::TaskId;
#[macro_export] #[macro_export]
macro_rules! debug_snapshot { macro_rules! debug_snapshot {
($value:expr, @$snapshot:literal) => {{ ($value:expr_2021, @$snapshot:literal) => {{
let value = format!("{:?}", $value); let value = format!("{:?}", $value);
meili_snap::snapshot!(value, @$snapshot); meili_snap::snapshot!(value, @$snapshot);
}}; }};

View File

@ -499,13 +499,13 @@ impl IndexScheduler {
// create the batch directly. Otherwise, get the index name associated with the task // create the batch directly. Otherwise, get the index name associated with the task
// and use the autobatcher to batch the enqueued tasks associated with it // and use the autobatcher to batch the enqueued tasks associated with it
let index_name = if let Some(&index_name) = task.indexes().first() { let index_name = match task.indexes().first() { Some(&index_name) => {
index_name index_name
} else { } _ => {
assert!(matches!(&task.kind, KindWithContent::IndexSwap { swaps } if swaps.is_empty())); assert!(matches!(&task.kind, KindWithContent::IndexSwap { swaps } if swaps.is_empty()));
current_batch.processing(Some(&mut task)); current_batch.processing(Some(&mut task));
return Ok(Some((Batch::IndexSwap { task }, current_batch))); return Ok(Some((Batch::IndexSwap { task }, current_batch)));
}; }};
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?; let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
let mut primary_key = None; let mut primary_key = None;

View File

@ -47,11 +47,11 @@ impl IndexScheduler {
Batch::TaskCancelation { mut task } => { Batch::TaskCancelation { mut task } => {
// 1. Retrieve the tasks that matched the query at enqueue-time. // 1. Retrieve the tasks that matched the query at enqueue-time.
let matched_tasks = let matched_tasks =
if let KindWithContent::TaskCancelation { tasks, query: _ } = &task.kind { match &task.kind { KindWithContent::TaskCancelation { tasks, query: _ } => {
tasks tasks
} else { } _ => {
unreachable!() unreachable!()
}; }};
let rtxn = self.env.read_txn()?; let rtxn = self.env.read_txn()?;
let mut canceled_tasks = self.cancel_matched_tasks( let mut canceled_tasks = self.cancel_matched_tasks(
@ -83,11 +83,11 @@ impl IndexScheduler {
let mut matched_tasks = RoaringBitmap::new(); let mut matched_tasks = RoaringBitmap::new();
for task in tasks.iter() { for task in tasks.iter() {
if let KindWithContent::TaskDeletion { tasks, query: _ } = &task.kind { match &task.kind { KindWithContent::TaskDeletion { tasks, query: _ } => {
matched_tasks |= tasks; matched_tasks |= tasks;
} else { } _ => {
unreachable!() unreachable!()
} }}
} }
let mut wtxn = self.env.write_txn()?; let mut wtxn = self.env.write_txn()?;
@ -279,11 +279,11 @@ impl IndexScheduler {
progress.update_progress(SwappingTheIndexes::EnsuringCorrectnessOfTheSwap); progress.update_progress(SwappingTheIndexes::EnsuringCorrectnessOfTheSwap);
let mut wtxn = self.env.write_txn()?; let mut wtxn = self.env.write_txn()?;
let swaps = if let KindWithContent::IndexSwap { swaps } = &task.kind { let swaps = match &task.kind { KindWithContent::IndexSwap { swaps } => {
swaps swaps
} else { } _ => {
unreachable!() unreachable!()
}; }};
let mut not_found_indexes = BTreeSet::new(); let mut not_found_indexes = BTreeSet::new();
for IndexSwap { indexes: (lhs, rhs) } in swaps { for IndexSwap { indexes: (lhs, rhs) } in swaps {
for index in [lhs, rhs] { for index in [lhs, rhs] {
@ -532,7 +532,7 @@ impl IndexScheduler {
// We must remove the batch entirely // We must remove the batch entirely
if tasks.is_empty() { if tasks.is_empty() {
if let Some(batch) = self.queue.batches.get_batch(wtxn, batch_id)? { if let Some(batch) = self.queue.batches.get_batch(wtxn, batch_id)? {
if let Some(BatchEnqueuedAt { earliest, oldest }) = batch.enqueued_at { match batch.enqueued_at { Some(BatchEnqueuedAt { earliest, oldest }) => {
remove_task_datetime( remove_task_datetime(
wtxn, wtxn,
self.queue.batches.enqueued_at, self.queue.batches.enqueued_at,
@ -545,7 +545,7 @@ impl IndexScheduler {
oldest, oldest,
batch_id, batch_id,
)?; )?;
} else { } _ => {
// If we don't have the enqueued at in the batch it means the database comes from the v1.12 // If we don't have the enqueued at in the batch it means the database comes from the v1.12
// and we still need to find the date by scrolling the database // and we still need to find the date by scrolling the database
remove_n_tasks_datetime_earlier_than( remove_n_tasks_datetime_earlier_than(
@ -555,7 +555,7 @@ impl IndexScheduler {
batch.stats.total_nb_tasks.clamp(1, 2) as usize, batch.stats.total_nb_tasks.clamp(1, 2) as usize,
batch_id, batch_id,
)?; )?;
} }}
remove_task_datetime( remove_task_datetime(
wtxn, wtxn,
self.queue.batches.started_at, self.queue.batches.started_at,

View File

@ -26,11 +26,11 @@ impl IndexScheduler {
progress.update_progress(DumpCreationProgress::StartTheDumpCreation); progress.update_progress(DumpCreationProgress::StartTheDumpCreation);
let started_at = OffsetDateTime::now_utc(); let started_at = OffsetDateTime::now_utc();
let (keys, instance_uid) = let (keys, instance_uid) =
if let KindWithContent::DumpCreation { keys, instance_uid } = &task.kind { match &task.kind { KindWithContent::DumpCreation { keys, instance_uid } => {
(keys, instance_uid) (keys, instance_uid)
} else { } _ => {
unreachable!(); unreachable!();
}; }};
let dump = dump::DumpWriter::new(*instance_uid)?; let dump = dump::DumpWriter::new(*instance_uid)?;
// 1. dump the keys // 1. dump the keys
@ -206,14 +206,14 @@ impl IndexScheduler {
let user_err = let user_err =
milli::Error::UserError(milli::UserError::InvalidVectorsMapType { milli::Error::UserError(milli::UserError::InvalidVectorsMapType {
document_id: { document_id: {
if let Ok(Some(Ok(index))) = index match index
.external_id_of(&rtxn, std::iter::once(id)) .external_id_of(&rtxn, std::iter::once(id))
.map(|it| it.into_iter().next()) .map(|it| it.into_iter().next())
{ { Ok(Some(Ok(index))) => {
index index
} else { } _ => {
format!("internal docid={id}") format!("internal docid={id}")
} }}
}, },
value: vectors.clone(), value: vectors.clone(),
}); });

View File

@ -206,17 +206,17 @@ impl IndexScheduler {
IndexOperation::DocumentEdition { index_uid, mut task } => { IndexOperation::DocumentEdition { index_uid, mut task } => {
progress.update_progress(DocumentEditionProgress::RetrievingConfig); progress.update_progress(DocumentEditionProgress::RetrievingConfig);
let (filter, code) = if let KindWithContent::DocumentEdition { let (filter, code) = match &task.kind
{ KindWithContent::DocumentEdition {
filter_expr, filter_expr,
context: _, context: _,
function, function,
.. ..
} = &task.kind } => {
{
(filter_expr, function) (filter_expr, function)
} else { } _ => {
unreachable!() unreachable!()
}; }};
let candidates = match filter.as_ref().map(Filter::from_json) { let candidates = match filter.as_ref().map(Filter::from_json) {
Some(Ok(Some(filter))) => filter Some(Ok(Some(filter))) => filter
@ -226,18 +226,18 @@ impl IndexScheduler {
Some(Err(e)) => return Err(Error::from_milli(e, Some(index_uid.clone()))), Some(Err(e)) => return Err(Error::from_milli(e, Some(index_uid.clone()))),
}; };
let (original_filter, context, function) = if let Some(Details::DocumentEdition { let (original_filter, context, function) = match task.details
{ Some(Details::DocumentEdition {
original_filter, original_filter,
context, context,
function, function,
.. ..
}) = task.details }) => {
{
(original_filter, context, function) (original_filter, context, function)
} else { } _ => {
// In the case of a `documentEdition` the details MUST be set // In the case of a `documentEdition` the details MUST be set
unreachable!(); unreachable!();
}; }};
if candidates.is_empty() { if candidates.is_empty() {
task.status = Status::Succeeded; task.status = Status::Succeeded;
@ -397,16 +397,16 @@ impl IndexScheduler {
}; };
} }
let will_be_removed = to_delete.len() - before; let will_be_removed = to_delete.len() - before;
if let Some(Details::DocumentDeletionByFilter { match &mut task.details
{ Some(Details::DocumentDeletionByFilter {
original_filter: _, original_filter: _,
deleted_documents, deleted_documents,
}) = &mut task.details }) => {
{
*deleted_documents = Some(will_be_removed); *deleted_documents = Some(will_be_removed);
} else { } _ => {
// In the case of a `documentDeleteByFilter` the details MUST be set // In the case of a `documentDeleteByFilter` the details MUST be set
unreachable!() unreachable!()
} }}
} }
_ => unreachable!(), _ => unreachable!(),
} }

View File

@ -307,7 +307,7 @@ pub(crate) fn filter_out_references_to_newer_tasks(task: &mut Task) {
pub(crate) fn check_index_swap_validity(task: &Task) -> Result<()> { pub(crate) fn check_index_swap_validity(task: &Task) -> Result<()> {
let swaps = let swaps =
if let KindWithContent::IndexSwap { swaps } = &task.kind { swaps } else { return Ok(()) }; match &task.kind { KindWithContent::IndexSwap { swaps } => { swaps } _ => { return Ok(()) }};
let mut all_indexes = HashSet::new(); let mut all_indexes = HashSet::new();
let mut duplicate_indexes = BTreeSet::new(); let mut duplicate_indexes = BTreeSet::new();
for IndexSwap { indexes: (lhs, rhs) } in swaps { for IndexSwap { indexes: (lhs, rhs) } in swaps {
@ -501,15 +501,15 @@ impl crate::IndexScheduler {
} => { } => {
assert_eq!(kind.as_kind(), Kind::DocumentDeletion); assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
let (index_uid, documents_ids) = let (index_uid, documents_ids) =
if let KindWithContent::DocumentDeletion { match kind
{ KindWithContent::DocumentDeletion {
ref index_uid, ref index_uid,
ref documents_ids, ref documents_ids,
} = kind } => {
{
(index_uid, documents_ids) (index_uid, documents_ids)
} else { } _ => {
unreachable!() unreachable!()
}; }};
assert_eq!(&task_index_uid.unwrap(), index_uid); assert_eq!(&task_index_uid.unwrap(), index_uid);
match status { match status {
@ -526,15 +526,15 @@ impl crate::IndexScheduler {
} }
Details::DocumentDeletionByFilter { deleted_documents, original_filter: _ } => { Details::DocumentDeletionByFilter { deleted_documents, original_filter: _ } => {
assert_eq!(kind.as_kind(), Kind::DocumentDeletion); assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
let (index_uid, _) = if let KindWithContent::DocumentDeletionByFilter { let (index_uid, _) = match kind
{ KindWithContent::DocumentDeletionByFilter {
ref index_uid, ref index_uid,
ref filter_expr, ref filter_expr,
} = kind } => {
{
(index_uid, filter_expr) (index_uid, filter_expr)
} else { } _ => {
unreachable!() unreachable!()
}; }};
assert_eq!(&task_index_uid.unwrap(), index_uid); assert_eq!(&task_index_uid.unwrap(), index_uid);
match status { match status {

View File

@ -3,7 +3,7 @@ name = "json-depth-checker"
version = "0.0.0" version = "0.0.0"
authors = ["Automatically generated"] authors = ["Automatically generated"]
publish = false publish = false
edition = "2018" edition = "2024"
[package.metadata] [package.metadata]
cargo-fuzz = true cargo-fuzz = true

View File

@ -77,7 +77,7 @@ snapshot_hash!("hello world", name: "snap_name", @"5f93f983524def3dca464469d2cf9
*/ */
#[macro_export] #[macro_export]
macro_rules! snapshot_hash { macro_rules! snapshot_hash {
($value:expr, @$inline:literal) => { ($value:expr_2021, @$inline:literal) => {
let test_name = { let test_name = {
fn f() {} fn f() {}
fn type_name_of_val<T>(_: T) -> &'static str { fn type_name_of_val<T>(_: T) -> &'static str {
@ -99,7 +99,7 @@ macro_rules! snapshot_hash {
} }
}); });
}; };
($value:expr, name: $name:expr, @$inline:literal) => { ($value:expr_2021, name: $name:expr_2021, @$inline:literal) => {
let test_name = { let test_name = {
fn f() {} fn f() {}
fn type_name_of_val<T>(_: T) -> &'static str { fn type_name_of_val<T>(_: T) -> &'static str {
@ -151,7 +151,7 @@ snapshot!(format!("{:?}", vec![1, 2]), @"[1, 2]");
*/ */
#[macro_export] #[macro_export]
macro_rules! snapshot { macro_rules! snapshot {
($value:expr, name: $name:expr) => { ($value:expr_2021, name: $name:expr_2021) => {
let test_name = { let test_name = {
fn f() {} fn f() {}
fn type_name_of_val<T>(_: T) -> &'static str { fn type_name_of_val<T>(_: T) -> &'static str {
@ -172,7 +172,7 @@ macro_rules! snapshot {
} }
}); });
}; };
($value:expr, @$inline:literal) => { ($value:expr_2021, @$inline:literal) => {
// Note that the name given as argument does not matter since it is only an inline snapshot // Note that the name given as argument does not matter since it is only an inline snapshot
// We don't pass None because otherwise `meili-snap` will try to assign it a unique identifier // We don't pass None because otherwise `meili-snap` will try to assign it a unique identifier
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument")); let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
@ -183,7 +183,7 @@ macro_rules! snapshot {
} }
}); });
}; };
($value:expr) => { ($value:expr_2021) => {
let test_name = { let test_name = {
fn f() {} fn f() {}
fn type_name_of_val<T>(_: T) -> &'static str { fn type_name_of_val<T>(_: T) -> &'static str {
@ -213,13 +213,13 @@ macro_rules! snapshot {
/// refer to the redactions feature in the `insta` guide. /// refer to the redactions feature in the `insta` guide.
#[macro_export] #[macro_export]
macro_rules! json_string { macro_rules! json_string {
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => { ($value:expr_2021, {$($k:expr_2021 => $v:expr_2021),*$(,)?}) => {
{ {
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File); let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
snap snap
} }
}; };
($value:expr) => {{ ($value:expr_2021) => {{
let value = meili_snap::insta::_macro_support::serialize_value( let value = meili_snap::insta::_macro_support::serialize_value(
&$value, &$value,
meili_snap::insta::_macro_support::SerializationFormat::Json, meili_snap::insta::_macro_support::SerializationFormat::Json,

View File

@ -403,7 +403,7 @@ impl ErrorCode for milli::Error {
match self { match self {
Error::InternalError(_) => Code::Internal, Error::InternalError(_) => Code::Internal,
Error::IoError(e) => e.error_code(), Error::IoError(e) => e.error_code(),
Error::UserError(ref error) => { Error::UserError(error) => {
match error { match error {
// TODO: wait for spec for new error codes. // TODO: wait for spec for new error codes.
UserError::SerdeJson(_) UserError::SerdeJson(_)

View File

@ -33,7 +33,7 @@ impl From<LocalizedAttributesRuleView> for LocalizedAttributesRule {
/// ///
/// this enum implements `Deserr` in order to be used in the API. /// this enum implements `Deserr` in order to be used in the API.
macro_rules! make_locale { macro_rules! make_locale {
($(($iso_639_1:ident, $iso_639_1_str:expr) => ($iso_639_3:ident, $iso_639_3_str:expr),)+) => { ($(($iso_639_1:ident, $iso_639_1_str:expr_2021) => ($iso_639_3:ident, $iso_639_3_str:expr_2021),)+) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, Serialize, Deserialize, Ord, PartialOrd, ToSchema)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, Serialize, Deserialize, Ord, PartialOrd, ToSchema)]
#[deserr(rename_all = camelCase)] #[deserr(rename_all = camelCase)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]

View File

@ -572,19 +572,19 @@ pub fn apply_settings_to_builder(
} = settings; } = settings;
match searchable_attributes.deref() { match searchable_attributes.deref() {
Setting::Set(ref names) => builder.set_searchable_fields(names.clone()), Setting::Set(names) => builder.set_searchable_fields(names.clone()),
Setting::Reset => builder.reset_searchable_fields(), Setting::Reset => builder.reset_searchable_fields(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match displayed_attributes.deref() { match displayed_attributes.deref() {
Setting::Set(ref names) => builder.set_displayed_fields(names.clone()), Setting::Set(names) => builder.set_displayed_fields(names.clone()),
Setting::Reset => builder.reset_displayed_fields(), Setting::Reset => builder.reset_displayed_fields(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match filterable_attributes { match filterable_attributes {
Setting::Set(ref facets) => { Setting::Set(facets) => {
builder.set_filterable_fields(facets.clone().into_iter().collect()) builder.set_filterable_fields(facets.clone().into_iter().collect())
} }
Setting::Reset => builder.reset_filterable_fields(), Setting::Reset => builder.reset_filterable_fields(),
@ -592,13 +592,13 @@ pub fn apply_settings_to_builder(
} }
match sortable_attributes { match sortable_attributes {
Setting::Set(ref fields) => builder.set_sortable_fields(fields.iter().cloned().collect()), Setting::Set(fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
Setting::Reset => builder.reset_sortable_fields(), Setting::Reset => builder.reset_sortable_fields(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match ranking_rules { match ranking_rules {
Setting::Set(ref criteria) => { Setting::Set(criteria) => {
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect()) builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
} }
Setting::Reset => builder.reset_criteria(), Setting::Reset => builder.reset_criteria(),
@ -606,13 +606,13 @@ pub fn apply_settings_to_builder(
} }
match stop_words { match stop_words {
Setting::Set(ref stop_words) => builder.set_stop_words(stop_words.clone()), Setting::Set(stop_words) => builder.set_stop_words(stop_words.clone()),
Setting::Reset => builder.reset_stop_words(), Setting::Reset => builder.reset_stop_words(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match non_separator_tokens { match non_separator_tokens {
Setting::Set(ref non_separator_tokens) => { Setting::Set(non_separator_tokens) => {
builder.set_non_separator_tokens(non_separator_tokens.clone()) builder.set_non_separator_tokens(non_separator_tokens.clone())
} }
Setting::Reset => builder.reset_non_separator_tokens(), Setting::Reset => builder.reset_non_separator_tokens(),
@ -620,7 +620,7 @@ pub fn apply_settings_to_builder(
} }
match separator_tokens { match separator_tokens {
Setting::Set(ref separator_tokens) => { Setting::Set(separator_tokens) => {
builder.set_separator_tokens(separator_tokens.clone()) builder.set_separator_tokens(separator_tokens.clone())
} }
Setting::Reset => builder.reset_separator_tokens(), Setting::Reset => builder.reset_separator_tokens(),
@ -628,38 +628,38 @@ pub fn apply_settings_to_builder(
} }
match dictionary { match dictionary {
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()), Setting::Set(dictionary) => builder.set_dictionary(dictionary.clone()),
Setting::Reset => builder.reset_dictionary(), Setting::Reset => builder.reset_dictionary(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match synonyms { match synonyms {
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()), Setting::Set(synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
Setting::Reset => builder.reset_synonyms(), Setting::Reset => builder.reset_synonyms(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match distinct_attribute { match distinct_attribute {
Setting::Set(ref attr) => builder.set_distinct_field(attr.clone()), Setting::Set(attr) => builder.set_distinct_field(attr.clone()),
Setting::Reset => builder.reset_distinct_field(), Setting::Reset => builder.reset_distinct_field(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match proximity_precision { match proximity_precision {
Setting::Set(ref precision) => builder.set_proximity_precision((*precision).into()), Setting::Set(precision) => builder.set_proximity_precision((*precision).into()),
Setting::Reset => builder.reset_proximity_precision(), Setting::Reset => builder.reset_proximity_precision(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match localized_attributes_rules { match localized_attributes_rules {
Setting::Set(ref rules) => builder Setting::Set(rules) => builder
.set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()), .set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()),
Setting::Reset => builder.reset_localized_attributes_rules(), Setting::Reset => builder.reset_localized_attributes_rules(),
Setting::NotSet => (), Setting::NotSet => (),
} }
match typo_tolerance { match typo_tolerance {
Setting::Set(ref value) => { Setting::Set(value) => {
match value.enabled { match value.enabled {
Setting::Set(val) => builder.set_autorize_typos(val), Setting::Set(val) => builder.set_autorize_typos(val),
Setting::Reset => builder.reset_authorize_typos(), Setting::Reset => builder.reset_authorize_typos(),
@ -736,7 +736,7 @@ pub fn apply_settings_to_builder(
} }
match pagination { match pagination {
Setting::Set(ref value) => match value.max_total_hits { Setting::Set(value) => match value.max_total_hits {
Setting::Set(val) => builder.set_pagination_max_total_hits(val), Setting::Set(val) => builder.set_pagination_max_total_hits(val),
Setting::Reset => builder.reset_pagination_max_total_hits(), Setting::Reset => builder.reset_pagination_max_total_hits(),
Setting::NotSet => (), Setting::NotSet => (),

View File

@ -89,11 +89,11 @@ fn is_empty_db(db_path: impl AsRef<Path>) -> bool {
if !db_path.exists() { if !db_path.exists() {
true true
// if we encounter an error or if the db is a file we consider the db non empty // if we encounter an error or if the db is a file we consider the db non empty
} else if let Ok(dir) = db_path.read_dir() { } else { match db_path.read_dir() { Ok(dir) => {
dir.count() == 0 dir.count() == 0
} else { } _ => {
true true
} }}}
} }
/// The handle used to update the logs at runtime. Must be accessible from the `main.rs` and the `route/logs.rs`. /// The handle used to update the logs at runtime. Must be accessible from the `main.rs` and the `route/logs.rs`.
@ -466,18 +466,18 @@ fn import_dump(
let reader = File::open(dump_path)?; let reader = File::open(dump_path)?;
let mut dump_reader = dump::DumpReader::open(reader)?; let mut dump_reader = dump::DumpReader::open(reader)?;
if let Some(date) = dump_reader.date() { match dump_reader.date() { Some(date) => {
tracing::info!( tracing::info!(
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
%date, %date,
"Importing a dump of meilisearch" "Importing a dump of meilisearch"
); );
} else { } _ => {
tracing::info!( tracing::info!(
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
"Importing a dump of meilisearch", "Importing a dump of meilisearch",
); );
} }}
let instance_uid = dump_reader.instance_uid()?; let instance_uid = dump_reader.instance_uid()?;

View File

@ -178,11 +178,11 @@ async fn run_http(
.disable_signals() .disable_signals()
.keep_alive(KeepAlive::Os); .keep_alive(KeepAlive::Os);
if let Some(config) = opt_clone.get_ssl_config()? { match opt_clone.get_ssl_config()? { Some(config) => {
http_server.bind_rustls_0_23(opt_clone.http_addr, config)?.run().await?; http_server.bind_rustls_0_23(opt_clone.http_addr, config)?.run().await?;
} else { } _ => {
http_server.bind(&opt_clone.http_addr)?.run().await?; http_server.bind(&opt_clone.http_addr)?.run().await?;
} }}
Ok(()) Ok(())
} }

View File

@ -907,7 +907,7 @@ fn load_private_key(
fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> { fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> {
let mut ret = Vec::new(); let mut ret = Vec::new();
if let Some(ref name) = filename { if let Some(name) = filename {
fs::File::open(name) fs::File::open(name)
.map_err(|_| anyhow::anyhow!("cannot open ocsp file"))? .map_err(|_| anyhow::anyhow!("cannot open ocsp file"))?
.read_to_end(&mut ret) .read_to_end(&mut ret)
@ -924,7 +924,8 @@ where
T: AsRef<OsStr>, T: AsRef<OsStr>,
{ {
if let Err(VarError::NotPresent) = std::env::var(key) { if let Err(VarError::NotPresent) = std::env::var(key) {
std::env::set_var(key, value); // TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var(key, value) };
} }
} }

View File

@ -97,12 +97,12 @@ async fn get_batch(
let filters = index_scheduler.filters(); let filters = index_scheduler.filters();
let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(&query, filters)?; let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(&query, filters)?;
if let Some(batch) = batches.first() { match batches.first() { Some(batch) => {
let batch_view = BatchView::from_batch(batch); let batch_view = BatchView::from_batch(batch);
Ok(HttpResponse::Ok().json(batch_view)) Ok(HttpResponse::Ok().json(batch_view))
} else { } _ => {
Err(index_scheduler::Error::BatchNotFound(batch_uid).into()) Err(index_scheduler::Error::BatchNotFound(batch_uid).into())
} }}
} }
#[derive(Debug, Serialize, ToSchema)] #[derive(Debug, Serialize, ToSchema)]

View File

@ -619,7 +619,7 @@ fn documents_by_query(
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors); let retrieve_vectors = RetrieveVectors::new(retrieve_vectors);
let ids = if let Some(ids) = ids { let ids = match ids { Some(ids) => {
let mut parsed_ids = Vec::with_capacity(ids.len()); let mut parsed_ids = Vec::with_capacity(ids.len());
for (index, id) in ids.into_iter().enumerate() { for (index, id) in ids.into_iter().enumerate() {
let id = id.try_into().map_err(|error| { let id = id.try_into().map_err(|error| {
@ -629,9 +629,9 @@ fn documents_by_query(
parsed_ids.push(id) parsed_ids.push(id)
} }
Some(parsed_ids) Some(parsed_ids)
} else { } _ => {
None None
}; }};
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let (total, documents) = retrieve_documents( let (total, documents) = retrieve_documents(

View File

@ -131,7 +131,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
ret.total_received = 1; ret.total_received = 1;
if let Some(ref sort) = sort { if let Some(sort) = sort {
ret.sort_total_number_of_criteria = 1; ret.sort_total_number_of_criteria = 1;
ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint(")); ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint("));
ret.sort_sum_of_criteria_terms = sort.len(); ret.sort_sum_of_criteria_terms = sort.len();
@ -139,7 +139,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
ret.distinct = distinct.is_some(); ret.distinct = distinct.is_some();
if let Some(ref filter) = filter { if let Some(filter) = filter {
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap()); static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
ret.filter_total_number_of_criteria = 1; ret.filter_total_number_of_criteria = 1;
@ -168,11 +168,11 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
ret.attributes_to_search_on_total_number_of_uses = 1; ret.attributes_to_search_on_total_number_of_uses = 1;
} }
if let Some(ref q) = q { if let Some(q) = q {
ret.max_terms_number = q.split_whitespace().count(); ret.max_terms_number = q.split_whitespace().count();
} }
if let Some(ref vector) = vector { if let Some(vector) = vector {
ret.max_vector_size = vector.len(); ret.max_vector_size = vector.len();
} }
ret.retrieve_vectors |= retrieve_vectors; ret.retrieve_vectors |= retrieve_vectors;

View File

@ -67,7 +67,7 @@ impl<Method: AggregateMethod> SimilarAggregator<Method> {
ret.total_received = 1; ret.total_received = 1;
if let Some(ref filter) = filter { if let Some(filter) = filter {
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap()); static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
ret.filter_total_number_of_criteria = 1; ret.filter_total_number_of_criteria = 1;

View File

@ -341,11 +341,11 @@ pub async fn get_logs(
}) })
.unwrap(); .unwrap();
if let Some(stream) = stream { match stream { Some(stream) => {
Ok(HttpResponse::Ok().streaming(stream)) Ok(HttpResponse::Ok().streaming(stream))
} else { } _ => {
Err(MeilisearchHttpError::AlreadyUsedLogRoute.into()) Err(MeilisearchHttpError::AlreadyUsedLogRoute.into())
} }}
} }
/// Stop retrieving logs /// Stop retrieving logs

View File

@ -638,12 +638,12 @@ async fn get_task(
let filters = index_scheduler.filters(); let filters = index_scheduler.filters();
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?; let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
if let Some(task) = tasks.first() { match tasks.first() { Some(task) => {
let task_view = TaskView::from_task(task); let task_view = TaskView::from_task(task);
Ok(HttpResponse::Ok().json(task_view)) Ok(HttpResponse::Ok().json(task_view))
} else { } _ => {
Err(index_scheduler::Error::TaskNotFound(task_uid).into()) Err(index_scheduler::Error::TaskNotFound(task_uid).into())
} }}
} }
/// Get a task's documents. /// Get a task's documents.
@ -693,7 +693,7 @@ async fn get_task_documents_file(
let filters = index_scheduler.filters(); let filters = index_scheduler.filters();
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?; let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
if let Some(task) = tasks.first() { match tasks.first() { Some(task) => {
match task.content_uuid() { match task.content_uuid() {
Some(uuid) => { Some(uuid) => {
let mut tfile = match index_scheduler.queue.update_file(uuid) { let mut tfile = match index_scheduler.queue.update_file(uuid) {
@ -711,9 +711,9 @@ async fn get_task_documents_file(
} }
None => Err(index_scheduler::Error::TaskFileNotFound(task_uid).into()), None => Err(index_scheduler::Error::TaskFileNotFound(task_uid).into()),
} }
} else { } _ => {
Err(index_scheduler::Error::TaskNotFound(task_uid).into()) Err(index_scheduler::Error::TaskNotFound(task_uid).into())
} }}
} }
pub enum DeserializeDateOption { pub enum DeserializeDateOption {

View File

@ -740,7 +740,7 @@ impl SearchByIndex {
_ => ranking_rules::CanonicalizationKind::Placeholder, _ => ranking_rules::CanonicalizationKind::Placeholder,
}; };
let sort = if let Some(sort) = &query.sort { let sort = match &query.sort { Some(sort) => {
let sorts: Vec<_> = let sorts: Vec<_> =
match sort.iter().map(|s| milli::AscDesc::from_str(s)).collect() { match sort.iter().map(|s| milli::AscDesc::from_str(s)).collect() {
Ok(sorts) => sorts, Ok(sorts) => sorts,
@ -752,9 +752,9 @@ impl SearchByIndex {
} }
}; };
Some(sorts) Some(sorts)
} else { } _ => {
None None
}; }};
let ranking_rules = ranking_rules::RankingRules::new( let ranking_rules = ranking_rules::RankingRules::new(
criteria.clone(), criteria.clone(),

View File

@ -1331,15 +1331,15 @@ impl<'a> HitMaker<'a> {
let displayed_ids = let displayed_ids =
displayed_ids.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect()); displayed_ids.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect());
let retrieve_vectors = if let RetrieveVectors::Retrieve = format.retrieve_vectors { let retrieve_vectors = match format.retrieve_vectors { RetrieveVectors::Retrieve => {
if vectors_is_hidden { if vectors_is_hidden {
RetrieveVectors::Hide RetrieveVectors::Hide
} else { } else {
RetrieveVectors::Retrieve RetrieveVectors::Retrieve
} }
} else { } _ => {
format.retrieve_vectors format.retrieve_vectors
}; }};
let fids = |attrs: &BTreeSet<String>| { let fids = |attrs: &BTreeSet<String>| {
let mut ids = BTreeSet::new(); let mut ids = BTreeSet::new();

View File

@ -94,7 +94,7 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
}); });
macro_rules! compute_authorized_search { macro_rules! compute_authorized_search {
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => { ($tenant_tokens:expr_2021, $filter:expr_2021, $expected_count:expr_2021) => {
let mut server = Server::new_auth().await; let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await; server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales"); let index = server.index("sales");
@ -141,7 +141,7 @@ macro_rules! compute_authorized_search {
} }
macro_rules! compute_forbidden_search { macro_rules! compute_forbidden_search {
($tenant_tokens:expr, $parent_keys:expr) => { ($tenant_tokens:expr_2021, $parent_keys:expr_2021) => {
let mut server = Server::new_auth().await; let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await; server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales"); let index = server.index("sales");

View File

@ -262,7 +262,7 @@ static BOTH_REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
}); });
macro_rules! compute_authorized_single_search { macro_rules! compute_authorized_single_search {
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => { ($tenant_tokens:expr_2021, $filter:expr_2021, $expected_count:expr_2021) => {
let mut server = Server::new_auth().await; let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await; server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales"); let index = server.index("sales");
@ -333,7 +333,7 @@ macro_rules! compute_authorized_single_search {
} }
macro_rules! compute_authorized_multiple_search { macro_rules! compute_authorized_multiple_search {
($tenant_tokens:expr, $filter1:expr, $filter2:expr, $expected_count1:expr, $expected_count2:expr) => { ($tenant_tokens:expr_2021, $filter1:expr_2021, $filter2:expr_2021, $expected_count1:expr_2021, $expected_count2:expr_2021) => {
let mut server = Server::new_auth().await; let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await; server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales"); let index = server.index("sales");
@ -417,7 +417,7 @@ macro_rules! compute_authorized_multiple_search {
} }
macro_rules! compute_forbidden_single_search { macro_rules! compute_forbidden_single_search {
($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => { ($tenant_tokens:expr_2021, $parent_keys:expr_2021, $failed_query_indexes:expr_2021) => {
let mut server = Server::new_auth().await; let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await; server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales"); let index = server.index("sales");
@ -493,7 +493,7 @@ macro_rules! compute_forbidden_single_search {
} }
macro_rules! compute_forbidden_multiple_search { macro_rules! compute_forbidden_multiple_search {
($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => { ($tenant_tokens:expr_2021, $parent_keys:expr_2021, $failed_query_indexes:expr_2021) => {
let mut server = Server::new_auth().await; let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await; server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales"); let index = server.index("sales");

View File

@ -63,7 +63,7 @@ impl Encoder {
buffer buffer
} }
pub fn header(self: &Encoder) -> Option<impl TryIntoHeaderPair> { pub fn header(self: &Encoder) -> Option<impl TryIntoHeaderPair + use<>> {
match self { match self {
Self::Plain => None, Self::Plain => None,
Self::Gzip => Some(("Content-Encoding", "gzip")), Self::Gzip => Some(("Content-Encoding", "gzip")),

View File

@ -25,13 +25,13 @@ pub struct Value(pub serde_json::Value);
impl Value { impl Value {
#[track_caller] #[track_caller]
pub fn uid(&self) -> u64 { pub fn uid(&self) -> u64 {
if let Some(uid) = self["uid"].as_u64() { match self["uid"].as_u64() { Some(uid) => {
uid uid
} else if let Some(uid) = self["taskUid"].as_u64() { } _ => { match self["taskUid"].as_u64() { Some(uid) => {
uid uid
} else { } _ => {
panic!("Didn't find any task id in: {self}"); panic!("Didn't find any task id in: {self}");
} }}}}
} }
pub fn has_uid(&self) -> bool { pub fn has_uid(&self) -> bool {
@ -150,7 +150,7 @@ macro_rules! json {
/// Performs a search test on both post and get routes /// Performs a search test on both post and get routes
#[macro_export] #[macro_export]
macro_rules! test_post_get_search { macro_rules! test_post_get_search {
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => { ($server:expr_2021, $query:expr_2021, |$response:ident, $status_code:ident | $block:expr_2021) => {
let post_query: meilisearch::routes::search::SearchQueryPost = let post_query: meilisearch::routes::search::SearchQueryPost =
serde_json::from_str(&$query.clone().to_string()).unwrap(); serde_json::from_str(&$query.clone().to_string()).unwrap();
let get_query: meilisearch::routes::search::SearchQuery = post_query.into(); let get_query: meilisearch::routes::search::SearchQuery = post_query.into();

View File

@ -43,9 +43,11 @@ impl Server<Owned> {
let dir = TempDir::new().unwrap(); let dir = TempDir::new().unwrap();
if cfg!(windows) { if cfg!(windows) {
std::env::set_var("TMP", TEST_TEMP_DIR.path()); // TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
} else { } else {
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()); // TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
} }
let options = default_settings(dir.path()); let options = default_settings(dir.path());
@ -58,9 +60,11 @@ impl Server<Owned> {
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self { pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
if cfg!(windows) { if cfg!(windows) {
std::env::set_var("TMP", TEST_TEMP_DIR.path()); // TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
} else { } else {
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()); // TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
} }
options.master_key = Some("MASTER_KEY".to_string()); options.master_key = Some("MASTER_KEY".to_string());
@ -191,9 +195,11 @@ impl Server<Shared> {
let dir = TempDir::new().unwrap(); let dir = TempDir::new().unwrap();
if cfg!(windows) { if cfg!(windows) {
std::env::set_var("TMP", TEST_TEMP_DIR.path()); // TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
} else { } else {
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()); // TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
} }
let options = default_settings(dir.path()); let options = default_settings(dir.path());
@ -296,9 +302,9 @@ impl<State> Server<State> {
&self, &self,
) -> impl actix_web::dev::Service< ) -> impl actix_web::dev::Service<
actix_http::Request, actix_http::Request,
Response = ServiceResponse<impl MessageBody>, Response = ServiceResponse<impl MessageBody + use<State>>,
Error = actix_web::Error, Error = actix_web::Error,
> { > + use<State> {
self.service.init_web_app().await self.service.init_web_app().await
} }

View File

@ -116,9 +116,9 @@ impl Service {
&self, &self,
) -> impl actix_web::dev::Service< ) -> impl actix_web::dev::Service<
actix_http::Request, actix_http::Request,
Response = ServiceResponse<impl MessageBody>, Response = ServiceResponse<impl MessageBody + use<>>,
Error = actix_web::Error, Error = actix_web::Error,
> { > + use<> {
let (_route_layer, route_layer_handle) = let (_route_layer, route_layer_handle) =
tracing_subscriber::reload::Layer::new(None.with_filter( tracing_subscriber::reload::Layer::new(None.with_filter(
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF), tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),

View File

@ -10,10 +10,10 @@ use crate::json;
macro_rules! verify_snapshot { macro_rules! verify_snapshot {
( (
$orig:expr, $orig:expr_2021,
$snapshot: expr, $snapshot: expr_2021,
|$server:ident| => |$server:ident| =>
$($e:expr,)+) => { $($e:expr_2021,)+) => {
use std::sync::Arc; use std::sync::Arc;
let snapshot = Arc::new($snapshot); let snapshot = Arc::new($snapshot);
let orig = Arc::new($orig); let orig = Arc::new($orig);

View File

@ -228,7 +228,7 @@ async fn list_tasks_status_and_type_filtered() {
} }
macro_rules! assert_valid_summarized_task { macro_rules! assert_valid_summarized_task {
($response:expr, $task_type:literal, $index:literal) => {{ ($response:expr_2021, $task_type:literal, $index:literal) => {{
assert_eq!($response.as_object().unwrap().len(), 5); assert_eq!($response.as_object().unwrap().len(), 5);
assert!($response["taskUid"].as_u64().is_some()); assert!($response["taskUid"].as_u64().is_some());
assert_eq!($response["indexUid"], $index); assert_eq!($response["indexUid"], $index);

View File

@ -577,14 +577,14 @@ fn export_documents(
return Err(meilisearch_types::milli::Error::UserError( return Err(meilisearch_types::milli::Error::UserError(
meilisearch_types::milli::UserError::InvalidVectorsMapType { meilisearch_types::milli::UserError::InvalidVectorsMapType {
document_id: { document_id: {
if let Ok(Some(Ok(index))) = index match index
.external_id_of(&rtxn, std::iter::once(id)) .external_id_of(&rtxn, std::iter::once(id))
.map(|it| it.into_iter().next()) .map(|it| it.into_iter().next())
{ { Ok(Some(Ok(index))) => {
index index
} else { } _ => {
format!("internal docid={id}") format!("internal docid={id}")
} }}
}, },
value: vectors.clone(), value: vectors.clone(),
}, },

View File

@ -1,6 +1,6 @@
[package] [package]
name = "milli" name = "milli"
edition = "2021" edition = "2024"
publish = false publish = false
version.workspace = true version.workspace = true

View File

@ -8,7 +8,7 @@ use crate::documents::DocumentsBatchBuilder;
use crate::Object; use crate::Object;
macro_rules! tri { macro_rules! tri {
($e:expr) => { ($e:expr_2021) => {
match $e { match $e {
Ok(r) => r, Ok(r) => r,
Err(e) => return Ok(Err(e.into())), Err(e) => return Ok(Err(e.into())),

View File

@ -301,26 +301,26 @@ impl<'a> FacetDistribution<'a> {
let mut distribution = BTreeMap::new(); let mut distribution = BTreeMap::new();
for (fid, name) in fields_ids_map.iter() { for (fid, name) in fields_ids_map.iter() {
if self.select_field(name, &filterable_attributes_rules) { if self.select_field(name, &filterable_attributes_rules) {
let min_value = if let Some(min_value) = crate::search::facet::facet_min_value( let min_value = match crate::search::facet::facet_min_value(
self.index, self.index,
self.rtxn, self.rtxn,
fid, fid,
candidates.clone(), candidates.clone(),
)? { )? { Some(min_value) => {
min_value min_value
} else { } _ => {
continue; continue;
}; }};
let max_value = if let Some(max_value) = crate::search::facet::facet_max_value( let max_value = match crate::search::facet::facet_max_value(
self.index, self.index,
self.rtxn, self.rtxn,
fid, fid,
candidates.clone(), candidates.clone(),
)? { )? { Some(max_value) => {
max_value max_value
} else { } _ => {
continue; continue;
}; }};
distribution.insert(name.to_string(), (min_value, max_value)); distribution.insert(name.to_string(), (min_value, max_value));
} }

View File

@ -37,12 +37,12 @@ where
let mut fd = LexicographicFacetDistribution { rtxn, db, field_id, callback }; let mut fd = LexicographicFacetDistribution { rtxn, db, field_id, callback };
let highest_level = get_highest_level(rtxn, db, field_id)?; let highest_level = get_highest_level(rtxn, db, field_id)?;
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
fd.iterate(candidates, highest_level, first_bound, usize::MAX)?; fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
Ok(()) Ok(())
} else { } _ => {
Ok(()) Ok(())
} }}
} }
pub fn count_iterate_over_facet_distribution<'t, CB>( pub fn count_iterate_over_facet_distribution<'t, CB>(

View File

@ -53,17 +53,16 @@ where
let mut f = FacetRangeSearch { rtxn, db, field_id, left, right, universe, docids }; let mut f = FacetRangeSearch { rtxn, db, field_id, left, right, universe, docids };
let highest_level = get_highest_level(rtxn, db, field_id)?; let highest_level = get_highest_level(rtxn, db, field_id)?;
if let Some(starting_left_bound) = match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?
get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(starting_left_bound) => {
{
let rightmost_bound = let rightmost_bound =
Bound::Included(get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap()); // will not fail because get_first_facet_value succeeded Bound::Included(get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap()); // will not fail because get_first_facet_value succeeded
let group_size = usize::MAX; let group_size = usize::MAX;
f.run(highest_level, starting_left_bound, rightmost_bound, group_size)?; f.run(highest_level, starting_left_bound, rightmost_bound, group_size)?;
Ok(()) Ok(())
} else { } _ => {
Ok(()) Ok(())
} }}
} }
/// Fetch the document ids that have a facet with a value between the two given bounds /// Fetch the document ids that have a facet with a value between the two given bounds

View File

@ -36,7 +36,7 @@ pub fn ascending_facet_sort<'t>(
candidates: RoaringBitmap, candidates: RoaringBitmap,
) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> { ) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> {
let highest_level = get_highest_level(rtxn, db, field_id)?; let highest_level = get_highest_level(rtxn, db, field_id)?;
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound }; let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
let iter = db.range(rtxn, &(first_key..)).unwrap().take(usize::MAX); let iter = db.range(rtxn, &(first_key..)).unwrap().take(usize::MAX);
@ -46,9 +46,9 @@ pub fn ascending_facet_sort<'t>(
field_id, field_id,
stack: vec![(candidates, iter)], stack: vec![(candidates, iter)],
})) }))
} else { } _ => {
Ok(itertools::Either::Right(std::iter::empty())) Ok(itertools::Either::Right(std::iter::empty()))
} }}
} }
struct AscendingFacetSort<'t, 'e> { struct AscendingFacetSort<'t, 'e> {

View File

@ -19,7 +19,7 @@ pub fn descending_facet_sort<'t>(
candidates: RoaringBitmap, candidates: RoaringBitmap,
) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> { ) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> {
let highest_level = get_highest_level(rtxn, db, field_id)?; let highest_level = get_highest_level(rtxn, db, field_id)?;
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound }; let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
let last_bound = get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap(); let last_bound = get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap();
let last_key = FacetGroupKey { field_id, level: highest_level, left_bound: last_bound }; let last_key = FacetGroupKey { field_id, level: highest_level, left_bound: last_bound };
@ -30,9 +30,9 @@ pub fn descending_facet_sort<'t>(
field_id, field_id,
stack: vec![(candidates, iter, Bound::Included(last_bound))], stack: vec![(candidates, iter, Bound::Included(last_bound))],
})) }))
} else { } _ => {
Ok(itertools::Either::Right(std::iter::empty())) Ok(itertools::Either::Right(std::iter::empty()))
} }}
} }
struct DescendingFacetSort<'t> { struct DescendingFacetSort<'t> {

View File

@ -23,7 +23,7 @@ fn facet_extreme_value<'t>(
mut extreme_it: impl Iterator<Item = heed::Result<(RoaringBitmap, &'t [u8])>> + 't, mut extreme_it: impl Iterator<Item = heed::Result<(RoaringBitmap, &'t [u8])>> + 't,
) -> Result<Option<f64>> { ) -> Result<Option<f64>> {
let extreme_value = let extreme_value =
if let Some(extreme_value) = extreme_it.next() { extreme_value } else { return Ok(None) }; match extreme_it.next() { Some(extreme_value) => { extreme_value } _ => { return Ok(None) }};
let (_, extreme_value) = extreme_value?; let (_, extreme_value) = extreme_value?;
OrderedF64Codec::bytes_decode(extreme_value) OrderedF64Codec::bytes_decode(extreme_value)
.map(Some) .map(Some)
@ -67,14 +67,14 @@ where
level0prefix.push(0); level0prefix.push(0);
let mut level0_iter_forward = let mut level0_iter_forward =
db.remap_types::<Bytes, DecodeIgnore>().prefix_iter(txn, level0prefix.as_slice())?; db.remap_types::<Bytes, DecodeIgnore>().prefix_iter(txn, level0prefix.as_slice())?;
if let Some(first) = level0_iter_forward.next() { match level0_iter_forward.next() { Some(first) => {
let (first_key, _) = first?; let (first_key, _) = first?;
let first_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(first_key) let first_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(first_key)
.map_err(heed::Error::Decoding)?; .map_err(heed::Error::Decoding)?;
Ok(Some(first_key.left_bound)) Ok(Some(first_key.left_bound))
} else { } _ => {
Ok(None) Ok(None)
} }}
} }
/// Get the last facet value in the facet database /// Get the last facet value in the facet database
@ -91,14 +91,14 @@ where
level0prefix.push(0); level0prefix.push(0);
let mut level0_iter_backward = let mut level0_iter_backward =
db.remap_types::<Bytes, DecodeIgnore>().rev_prefix_iter(txn, level0prefix.as_slice())?; db.remap_types::<Bytes, DecodeIgnore>().rev_prefix_iter(txn, level0prefix.as_slice())?;
if let Some(last) = level0_iter_backward.next() { match level0_iter_backward.next() { Some(last) => {
let (last_key, _) = last?; let (last_key, _) = last?;
let last_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(last_key) let last_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(last_key)
.map_err(heed::Error::Decoding)?; .map_err(heed::Error::Decoding)?;
Ok(Some(last_key.left_bound)) Ok(Some(last_key.left_bound))
} else { } _ => {
Ok(None) Ok(None)
} }}
} }
/// Get the height of the highest level in the facet database /// Get the height of the highest level in the facet database

View File

@ -146,7 +146,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
let mut cur_offset = 0usize; let mut cur_offset = 0usize;
macro_rules! maybe_add_to_results { macro_rules! maybe_add_to_results {
($candidates:expr) => { ($candidates:expr_2021) => {
maybe_add_to_results( maybe_add_to_results(
ctx, ctx,
from, from,

View File

@ -54,15 +54,15 @@ where
/// Insert the given value into the dedup-interner, and return /// Insert the given value into the dedup-interner, and return
/// its index. /// its index.
pub fn insert(&mut self, s: T) -> Interned<T> { pub fn insert(&mut self, s: T) -> Interned<T> {
if let Some(interned) = self.lookup.get(&s) { match self.lookup.get(&s) { Some(interned) => {
*interned *interned
} else { } _ => {
assert!(self.stable_store.len() < u16::MAX as usize); assert!(self.stable_store.len() < u16::MAX as usize);
self.stable_store.push(s.clone()); self.stable_store.push(s.clone());
let interned = Interned::from_raw(self.stable_store.len() as u16 - 1); let interned = Interned::from_raw(self.stable_store.len() as u16 - 1);
self.lookup.insert(s, interned); self.lookup.insert(s, interned);
interned interned
} }}
} }
/// Get a reference to the interned value. /// Get a reference to the interned value.
pub fn get(&self, interned: Interned<T>) -> &T { pub fn get(&self, interned: Interned<T>) -> &T {
@ -117,7 +117,7 @@ impl<T> FixedSizeInterner<T> {
pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> { pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> {
MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData } MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData }
} }
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> { pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> + use<T> {
(0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16)) (0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16))
} }
pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> { pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> {
@ -167,7 +167,7 @@ impl<T> Interner<T> {
pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> { pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> {
MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData } MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData }
} }
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> { pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> + use<T> {
(0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16)) (0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16))
} }
pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> { pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> {

View File

@ -206,11 +206,11 @@ struct DetailedLoggerFinish<'ctx> {
impl<'ctx> DetailedLoggerFinish<'ctx> { impl<'ctx> DetailedLoggerFinish<'ctx> {
fn cur_file(&mut self) -> &mut BufWriter<File> { fn cur_file(&mut self) -> &mut BufWriter<File> {
if let Some(file) = self.file_for_internal_state.as_mut() { match self.file_for_internal_state.as_mut() { Some(file) => {
file file
} else { } _ => {
&mut self.index_file &mut self.index_file
} }}
} }
fn pop_rr_action(&mut self) { fn pop_rr_action(&mut self) {
self.file_for_internal_state = None; self.file_for_internal_state = None;
@ -531,11 +531,11 @@ fill: \"#B6E2D3\"
paths: Vec<Vec<Interned<R::Condition>>>, paths: Vec<Vec<Interned<R::Condition>>>,
) -> Result<()> { ) -> Result<()> {
self.make_new_file_for_internal_state_if_needed()?; self.make_new_file_for_internal_state_if_needed()?;
let file = if let Some(file) = self.file_for_internal_state.as_mut() { let file = match self.file_for_internal_state.as_mut() { Some(file) => {
file file
} else { } _ => {
&mut self.index_file &mut self.index_file
}; }};
writeln!(file, "Path {{")?; writeln!(file, "Path {{")?;
for (path_idx, condition_indexes) in paths.iter().enumerate() { for (path_idx, condition_indexes) in paths.iter().enumerate() {
writeln!(file, "{path_idx} {{")?; writeln!(file, "{path_idx} {{")?;

View File

@ -853,7 +853,7 @@ fn check_sort_criteria(
let sortable_fields = ctx.index.sortable_fields(ctx.txn)?; let sortable_fields = ctx.index.sortable_fields(ctx.txn)?;
for asc_desc in sort_criteria { for asc_desc in sort_criteria {
match asc_desc.member() { match asc_desc.member() {
Member::Field(ref field) if !crate::is_faceted(field, &sortable_fields) => { Member::Field(field) if !crate::is_faceted(field, &sortable_fields) => {
let (valid_fields, hidden_fields) = let (valid_fields, hidden_fields) =
ctx.index.remove_hidden_fields(ctx.txn, sortable_fields)?; ctx.index.remove_hidden_fields(ctx.txn, sortable_fields)?;

View File

@ -266,11 +266,11 @@ pub fn partially_initialized_term_from_word(
} }
fn find_split_words(ctx: &mut SearchContext<'_>, word: &str) -> Result<Option<Interned<Phrase>>> { fn find_split_words(ctx: &mut SearchContext<'_>, word: &str) -> Result<Option<Interned<Phrase>>> {
if let Some((l, r)) = split_best_frequency(ctx, word)? { match split_best_frequency(ctx, word)? { Some((l, r)) => {
Ok(Some(ctx.phrase_interner.insert(Phrase { words: vec![Some(l), Some(r)] }))) Ok(Some(ctx.phrase_interner.insert(Phrase { words: vec![Some(l), Some(r)] })))
} else { } _ => {
Ok(None) Ok(None)
} }}
} }
impl Interned<QueryTerm> { impl Interned<QueryTerm> {

View File

@ -110,7 +110,7 @@ impl ExactTerm {
pub fn interned_words<'ctx>( pub fn interned_words<'ctx>(
&self, &self,
ctx: &'ctx SearchContext<'ctx>, ctx: &'ctx SearchContext<'ctx>,
) -> impl Iterator<Item = Option<Interned<String>>> + 'ctx { ) -> impl Iterator<Item = Option<Interned<String>>> + 'ctx + use<'ctx> {
match *self { match *self {
ExactTerm::Phrase(phrase) => { ExactTerm::Phrase(phrase) => {
let phrase = ctx.phrase_interner.get(phrase); let phrase = ctx.phrase_interner.get(phrase);

View File

@ -193,7 +193,7 @@ pub fn located_query_terms_from_tokens(
pub fn number_of_typos_allowed<'ctx>( pub fn number_of_typos_allowed<'ctx>(
ctx: &SearchContext<'ctx>, ctx: &SearchContext<'ctx>,
) -> Result<impl Fn(&str) -> u8 + 'ctx> { ) -> Result<impl Fn(&str) -> u8 + 'ctx + use<'ctx>> {
let authorize_typos = ctx.index.authorize_typos(ctx.txn)?; let authorize_typos = ctx.index.authorize_typos(ctx.txn)?;
let min_len_one_typo = ctx.index.min_word_len_one_typo(ctx.txn)?; let min_len_one_typo = ctx.index.min_word_len_one_typo(ctx.txn)?;
let min_len_two_typos = ctx.index.min_word_len_two_typos(ctx.txn)?; let min_len_two_typos = ctx.index.min_word_len_two_typos(ctx.txn)?;

View File

@ -77,11 +77,11 @@ pub fn compute_docids(
if universe.is_disjoint(ctx.get_phrase_docids(left_phrase)?) { if universe.is_disjoint(ctx.get_phrase_docids(left_phrase)?) {
continue; continue;
} }
} else if let Some(left_word_docids) = ctx.word_docids(Some(universe), left_word)? { } else { match ctx.word_docids(Some(universe), left_word)? { Some(left_word_docids) => {
if left_word_docids.is_empty() { if left_word_docids.is_empty() {
continue; continue;
} }
} } _ => {}}}
} }
for (right_word, right_phrase) in right_derivs { for (right_word, right_phrase) in right_derivs {

View File

@ -195,15 +195,15 @@ pub fn compute_phrase_docids(
} }
let mut candidates = None; let mut candidates = None;
for word in words.iter().flatten().copied() { for word in words.iter().flatten().copied() {
if let Some(word_docids) = ctx.word_docids(None, Word::Original(word))? { match ctx.word_docids(None, Word::Original(word))? { Some(word_docids) => {
if let Some(candidates) = candidates.as_mut() { if let Some(candidates) = candidates.as_mut() {
*candidates &= word_docids; *candidates &= word_docids;
} else { } else {
candidates = Some(word_docids); candidates = Some(word_docids);
} }
} else { } _ => {
return Ok(RoaringBitmap::new()); return Ok(RoaringBitmap::new());
} }}
} }
let Some(mut candidates) = candidates else { let Some(mut candidates) = candidates else {

View File

@ -196,10 +196,10 @@ impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Sort<'ctx,
universe: &RoaringBitmap, universe: &RoaringBitmap,
) -> Result<Option<RankingRuleOutput<Query>>> { ) -> Result<Option<RankingRuleOutput<Query>>> {
let iter = self.iter.as_mut().unwrap(); let iter = self.iter.as_mut().unwrap();
if let Some(mut bucket) = iter.next_bucket()? { match iter.next_bucket()? { Some(mut bucket) => {
bucket.candidates &= universe; bucket.candidates &= universe;
Ok(Some(bucket)) Ok(Some(bucket))
} else { } _ => {
let query = self.original_query.as_ref().unwrap().clone(); let query = self.original_query.as_ref().unwrap().clone();
Ok(Some(RankingRuleOutput { Ok(Some(RankingRuleOutput {
query, query,
@ -211,7 +211,7 @@ impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Sort<'ctx,
value: serde_json::Value::Null, value: serde_json::Value::Null,
}), }),
})) }))
} }}
} }
#[tracing::instrument(level = "trace", skip_all, target = "search::sort")] #[tracing::instrument(level = "trace", skip_all, target = "search::sort")]

View File

@ -27,7 +27,7 @@ pub fn default_db_snapshot_settings_for_test(name: Option<&str>) -> (insta::Sett
} }
#[macro_export] #[macro_export]
macro_rules! milli_snap { macro_rules! milli_snap {
($value:expr, $name:expr) => { ($value:expr_2021, $name:expr_2021) => {
let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None); let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None);
settings.bind(|| { settings.bind(|| {
let snap = $value; let snap = $value;
@ -37,7 +37,7 @@ macro_rules! milli_snap {
} }
}); });
}; };
($value:expr) => { ($value:expr_2021) => {
let (settings, test_name) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None); let (settings, test_name) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None);
settings.bind(|| { settings.bind(|| {
let snap = $value; let snap = $value;
@ -47,7 +47,7 @@ macro_rules! milli_snap {
} }
}); });
}; };
($value:expr, @$inline:literal) => { ($value:expr_2021, @$inline:literal) => {
let (settings, test_name) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None); let (settings, test_name) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None);
settings.bind(|| { settings.bind(|| {
let snap = $value; let snap = $value;
@ -61,7 +61,7 @@ macro_rules! milli_snap {
} }
}); });
}; };
($value:expr, $name:expr, @$inline:literal) => { ($value:expr_2021, $name:expr_2021, @$inline:literal) => {
let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None); let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None);
settings.bind(|| { settings.bind(|| {
let snap = $value; let snap = $value;
@ -142,7 +142,7 @@ db_snap!(index, word_docids, "some_identifier", @"");
*/ */
#[macro_export] #[macro_export]
macro_rules! db_snap { macro_rules! db_snap {
($index:ident, $db_name:ident, $name:expr) => { ($index:ident, $db_name:ident, $name:expr_2021) => {
let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(Some( let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(Some(
&format!("{}", $name), &format!("{}", $name),
)); ));
@ -178,7 +178,7 @@ macro_rules! db_snap {
} }
}); });
}; };
($index:ident, $db_name:ident, $name:expr, @$inline:literal) => { ($index:ident, $db_name:ident, $name:expr_2021, @$inline:literal) => {
let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(Some(&format!("{}", $name))); let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(Some(&format!("{}", $name)));
settings.bind(|| { settings.bind(|| {
let snap = $crate::full_snap_of_db!($index, $db_name); let snap = $crate::full_snap_of_db!($index, $db_name);

View File

@ -647,7 +647,7 @@ mod comparison_bench {
// insert one document // insert one document
// //
for _ in 0..nbr_doc { for _ in 0..nbr_doc {
index.insert(&mut txn, 0, &r.gen(), &once(1).collect()); index.insert(&mut txn, 0, &r.r#gen(), &once(1).collect());
} }
let time_spent = timer.elapsed().as_millis(); let time_spent = timer.elapsed().as_millis();
println!(" add {nbr_doc} : {time_spent}ms"); println!(" add {nbr_doc} : {time_spent}ms");

View File

@ -143,11 +143,11 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
strings_key_buffer.extend_from_slice(docid_bytes); strings_key_buffer.extend_from_slice(docid_bytes);
// We insert the document id on the Del and the Add side if the field exists. // We insert the document id on the Del and the Add side if the field exists.
let (ref mut del_exists, ref mut add_exists) = let (del_exists, add_exists) =
facet_exists_docids.entry(field_id).or_default(); facet_exists_docids.entry(field_id).or_default();
let (ref mut del_is_null, ref mut add_is_null) = let (del_is_null, add_is_null) =
facet_is_null_docids.entry(field_id).or_default(); facet_is_null_docids.entry(field_id).or_default();
let (ref mut del_is_empty, ref mut add_is_empty) = let (del_is_empty, add_is_empty) =
facet_is_empty_docids.entry(field_id).or_default(); facet_is_empty_docids.entry(field_id).or_default();
if del_value.is_some() { if del_value.is_some() {

View File

@ -453,14 +453,14 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
} in extractors } in extractors
{ {
let remove_from_user_provided = let remove_from_user_provided =
if let ExtractionAction::DocumentOperation(DocumentOperation { match action
{ ExtractionAction::DocumentOperation(DocumentOperation {
remove_from_user_provided, remove_from_user_provided,
}) = action }) => {
{
remove_from_user_provided remove_from_user_provided
} else { } _ => {
Default::default() Default::default()
}; }};
results.push(ExtractedVectorPoints { results.push(ExtractedVectorPoints {
manual_vectors: writer_into_reader(manual_vectors_writer)?, manual_vectors: writer_into_reader(manual_vectors_writer)?,
@ -789,11 +789,11 @@ fn embed_chunks(
match embedder.embed_index(text_chunks, request_threads) { match embedder.embed_index(text_chunks, request_threads) {
Ok(chunks) => Ok(chunks), Ok(chunks) => Ok(chunks),
Err(error) => { Err(error) => {
if let FaultSource::Bug = error.fault { match error.fault { FaultSource::Bug => {
Err(crate::Error::InternalError(crate::InternalError::VectorEmbeddingError( Err(crate::Error::InternalError(crate::InternalError::VectorEmbeddingError(
error.into(), error.into(),
))) )))
} else { } _ => {
let mut msg = let mut msg =
format!(r"While embedding documents for embedder `{embedder_name}`: {error}"); format!(r"While embedding documents for embedder `{embedder_name}`: {error}");
@ -827,7 +827,7 @@ fn embed_chunks(
} }
Err(crate::Error::UserError(crate::UserError::DocumentEmbeddingError(msg))) Err(crate::Error::UserError(crate::UserError::DocumentEmbeddingError(msg)))
} }}
} }
} }
} }

View File

@ -87,13 +87,13 @@ pub fn writer_into_reader(
/// We use memory mapping inside. So, according to the Rust community, it's unsafe. /// We use memory mapping inside. So, according to the Rust community, it's unsafe.
pub unsafe fn as_cloneable_grenad( pub unsafe fn as_cloneable_grenad(
reader: &grenad::Reader<BufReader<File>>, reader: &grenad::Reader<BufReader<File>>,
) -> Result<grenad::Reader<CursorClonableMmap>> { ) -> Result<grenad::Reader<CursorClonableMmap>> { unsafe {
let file = reader.get_ref().get_ref(); let file = reader.get_ref().get_ref();
let mmap = memmap2::Mmap::map(file)?; let mmap = memmap2::Mmap::map(file)?;
let cursor = io::Cursor::new(ClonableMmap::from(mmap)); let cursor = io::Cursor::new(ClonableMmap::from(mmap));
let reader = grenad::Reader::new(cursor)?; let reader = grenad::Reader::new(cursor)?;
Ok(reader) Ok(reader)
} }}
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct GrenadParameters { pub struct GrenadParameters {

View File

@ -367,7 +367,7 @@ where
match lmdb_writer_rx.clone().recv_timeout(std::time::Duration::from_millis(500)) { match lmdb_writer_rx.clone().recv_timeout(std::time::Duration::from_millis(500)) {
Err(status) => { Err(status) => {
if let Some(typed_chunks) = chunk_accumulator.pop_longest() { match chunk_accumulator.pop_longest() { Some(typed_chunks) => {
let (docids, is_merged_database) = let (docids, is_merged_database) =
write_typed_chunk_into_index(self.wtxn, self.index, &settings_diff, typed_chunks, &mut modified_docids)?; write_typed_chunk_into_index(self.wtxn, self.index, &settings_diff, typed_chunks, &mut modified_docids)?;
if !docids.is_empty() { if !docids.is_empty() {
@ -387,11 +387,11 @@ where
}); });
} }
// If no more chunk remains in the chunk accumulator and the channel is disconected, break. // If no more chunk remains in the chunk accumulator and the channel is disconected, break.
} else if status == crossbeam_channel::RecvTimeoutError::Disconnected { } _ => if status == crossbeam_channel::RecvTimeoutError::Disconnected {
break; break;
} else { } else {
rayon::yield_now(); rayon::yield_now();
} }}
} }
Ok(result) => { Ok(result) => {
let typed_chunk = match result? { let typed_chunk = match result? {

View File

@ -91,7 +91,7 @@ fn create_fields_mapping(
.iter() .iter()
// we sort by id here to ensure a deterministic mapping of the fields, that preserves // we sort by id here to ensure a deterministic mapping of the fields, that preserves
// the original ordering. // the original ordering.
.sorted_by_key(|(&id, _)| id) .sorted_by_key(|&(&id, _)| id)
.map(|(field, name)| match index_field_map.id(name) { .map(|(field, name)| match index_field_map.id(name) {
Some(id) => Ok((*field, id)), Some(id) => Ok((*field, id)),
None => index_field_map None => index_field_map

View File

@ -689,14 +689,14 @@ pub(crate) fn write_typed_chunk_into_index(
.unwrap(); .unwrap();
if embeddings.embedding_count() > usize::from(u8::MAX) { if embeddings.embedding_count() > usize::from(u8::MAX) {
let external_docid = if let Ok(Some(Ok(index))) = index let external_docid = match index
.external_id_of(wtxn, std::iter::once(docid)) .external_id_of(wtxn, std::iter::once(docid))
.map(|it| it.into_iter().next()) .map(|it| it.into_iter().next())
{ { Ok(Some(Ok(index))) => {
index index
} else { } _ => {
format!("internal docid={docid}") format!("internal docid={docid}")
}; }};
return Err(crate::Error::UserError(crate::UserError::TooManyVectors( return Err(crate::Error::UserError(crate::UserError::TooManyVectors(
external_docid, external_docid,
embeddings.embedding_count(), embeddings.embedding_count(),

View File

@ -365,11 +365,11 @@ impl<'doc> DelAddFacetValue<'doc> {
match kind { match kind {
FacetKind::Number => { FacetKind::Number => {
let key = (fid, value); let key = (fid, value);
if let Some(DelAdd::Deletion) = self.f64s.get(&key) { match self.f64s.get(&key) { Some(DelAdd::Deletion) => {
self.f64s.remove(&key); self.f64s.remove(&key);
} else { } _ => {
self.f64s.insert(key, DelAdd::Addition); self.f64s.insert(key, DelAdd::Addition);
} }}
} }
FacetKind::String => { FacetKind::String => {
if let Ok(s) = std::str::from_utf8(&value) { if let Ok(s) = std::str::from_utf8(&value) {
@ -386,11 +386,11 @@ impl<'doc> DelAddFacetValue<'doc> {
match kind { match kind {
FacetKind::Number => { FacetKind::Number => {
let key = (fid, value); let key = (fid, value);
if let Some(DelAdd::Addition) = self.f64s.get(&key) { match self.f64s.get(&key) { Some(DelAdd::Addition) => {
self.f64s.remove(&key); self.f64s.remove(&key);
} else { } _ => {
self.f64s.insert(key, DelAdd::Deletion); self.f64s.insert(key, DelAdd::Deletion);
} }}
} }
FacetKind::String => { FacetKind::String => {
if let Ok(s) = std::str::from_utf8(&value) { if let Ok(s) = std::str::from_utf8(&value) {

View File

@ -95,7 +95,7 @@ pub struct FrozenGeoExtractorData<'extractor> {
impl FrozenGeoExtractorData<'_> { impl FrozenGeoExtractorData<'_> {
pub fn iter_and_clear_removed( pub fn iter_and_clear_removed(
&mut self, &mut self,
) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_> { ) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_ + use<'_>> {
Ok(mem::take(&mut self.removed) Ok(mem::take(&mut self.removed)
.iter() .iter()
.copied() .copied()
@ -105,7 +105,7 @@ impl FrozenGeoExtractorData<'_> {
pub fn iter_and_clear_inserted( pub fn iter_and_clear_inserted(
&mut self, &mut self,
) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_> { ) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_ + use<'_>> {
Ok(mem::take(&mut self.inserted) Ok(mem::take(&mut self.inserted)
.iter() .iter()
.copied() .copied()

View File

@ -111,9 +111,9 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
let prompt = chunks.prompt(); let prompt = chunks.prompt();
let old_vectors = old_vectors.vectors_for_key(embedder_name)?.unwrap(); let old_vectors = old_vectors.vectors_for_key(embedder_name)?.unwrap();
if let Some(new_vectors) = new_vectors.as_ref().and_then(|new_vectors| { match new_vectors.as_ref().and_then(|new_vectors| {
new_vectors.vectors_for_key(embedder_name).transpose() new_vectors.vectors_for_key(embedder_name).transpose()
}) { }) { Some(new_vectors) => {
let new_vectors = new_vectors?; let new_vectors = new_vectors?;
if old_vectors.regenerate != new_vectors.regenerate { if old_vectors.regenerate != new_vectors.regenerate {
chunks.set_regenerate(update.docid(), new_vectors.regenerate); chunks.set_regenerate(update.docid(), new_vectors.regenerate);
@ -159,7 +159,7 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
)?; )?;
} }
} }
} else if old_vectors.regenerate { } _ => if old_vectors.regenerate {
let old_rendered = prompt.render_document( let old_rendered = prompt.render_document(
update.external_document_id(), update.external_document_id(),
update.current( update.current(
@ -188,7 +188,7 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
&unused_vectors_distribution, &unused_vectors_distribution,
)?; )?;
} }
} }}
} }
} }
DocumentChange::Insertion(insertion) => { DocumentChange::Insertion(insertion) => {
@ -202,9 +202,9 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
let embedder_name = chunks.embedder_name(); let embedder_name = chunks.embedder_name();
let prompt = chunks.prompt(); let prompt = chunks.prompt();
// if no inserted vectors, then regenerate: true + no embeddings => autogenerate // if no inserted vectors, then regenerate: true + no embeddings => autogenerate
if let Some(new_vectors) = new_vectors.as_ref().and_then(|new_vectors| { match new_vectors.as_ref().and_then(|new_vectors| {
new_vectors.vectors_for_key(embedder_name).transpose() new_vectors.vectors_for_key(embedder_name).transpose()
}) { }) { Some(new_vectors) => {
let new_vectors = new_vectors?; let new_vectors = new_vectors?;
chunks.set_regenerate(insertion.docid(), new_vectors.regenerate); chunks.set_regenerate(insertion.docid(), new_vectors.regenerate);
if let Some(embeddings) = new_vectors.embeddings { if let Some(embeddings) = new_vectors.embeddings {
@ -233,7 +233,7 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
&unused_vectors_distribution, &unused_vectors_distribution,
)?; )?;
} }
} else { } _ => {
let rendered = prompt.render_document( let rendered = prompt.render_document(
insertion.external_document_id(), insertion.external_document_id(),
insertion.inserted(), insertion.inserted(),
@ -246,7 +246,7 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
rendered, rendered,
&unused_vectors_distribution, &unused_vectors_distribution,
)?; )?;
} }}
} }
} }
} }
@ -424,11 +424,11 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
Ok(()) Ok(())
} }
Err(error) => { Err(error) => {
if let FaultSource::Bug = error.fault { match error.fault { FaultSource::Bug => {
Err(crate::Error::InternalError(crate::InternalError::VectorEmbeddingError( Err(crate::Error::InternalError(crate::InternalError::VectorEmbeddingError(
error.into(), error.into(),
))) )))
} else { } _ => {
let mut msg = format!( let mut msg = format!(
r"While embedding documents for embedder `{embedder_name}`: {error}" r"While embedding documents for embedder `{embedder_name}`: {error}"
); );
@ -463,7 +463,7 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
} }
Err(crate::Error::UserError(crate::UserError::DocumentEmbeddingError(msg))) Err(crate::Error::UserError(crate::UserError::DocumentEmbeddingError(msg)))
} }}
} }
}; };
texts.clear(); texts.clear();

View File

@ -19,7 +19,7 @@ pub fn retrieve_or_guess_primary_key<'a>(
// make sure that we have a declared primary key, either fetching it from the index or attempting to guess it. // make sure that we have a declared primary key, either fetching it from the index or attempting to guess it.
// do we have an existing declared primary key? // do we have an existing declared primary key?
let (primary_key, has_changed) = if let Some(primary_key_from_db) = index.primary_key(rtxn)? { let (primary_key, has_changed) = match index.primary_key(rtxn)? { Some(primary_key_from_db) => {
// did we request a primary key in the operation? // did we request a primary key in the operation?
match primary_key_from_op { match primary_key_from_op {
// we did, and it is different from the DB one // we did, and it is different from the DB one
@ -30,7 +30,7 @@ pub fn retrieve_or_guess_primary_key<'a>(
} }
_ => (primary_key_from_db, false), _ => (primary_key_from_db, false),
} }
} else { } _ => {
// no primary key in the DB => let's set one // no primary key in the DB => let's set one
// did we request a primary key in the operation? // did we request a primary key in the operation?
let primary_key = if let Some(primary_key_from_op) = primary_key_from_op { let primary_key = if let Some(primary_key_from_op) = primary_key_from_op {
@ -76,7 +76,7 @@ pub fn retrieve_or_guess_primary_key<'a>(
} }
}; };
(primary_key, true) (primary_key, true)
}; }};
match PrimaryKey::new_or_insert(primary_key, new_fields_ids_map) { match PrimaryKey::new_or_insert(primary_key, new_fields_ids_map) {
Ok(primary_key) => Ok(Ok((primary_key, has_changed))), Ok(primary_key) => Ok(Ok((primary_key, has_changed))),

View File

@ -95,16 +95,16 @@ fn compute_word_fst(index: &Index, wtxn: &mut RwTxn) -> Result<Option<PrefixDelt
let (word_fst_mmap, prefix_data) = word_fst_builder.build(index, &rtxn)?; let (word_fst_mmap, prefix_data) = word_fst_builder.build(index, &rtxn)?;
index.main.remap_types::<Str, Bytes>().put(wtxn, WORDS_FST_KEY, &word_fst_mmap)?; index.main.remap_types::<Str, Bytes>().put(wtxn, WORDS_FST_KEY, &word_fst_mmap)?;
if let Some(PrefixData { prefixes_fst_mmap, prefix_delta }) = prefix_data { match prefix_data { Some(PrefixData { prefixes_fst_mmap, prefix_delta }) => {
index.main.remap_types::<Str, Bytes>().put( index.main.remap_types::<Str, Bytes>().put(
wtxn, wtxn,
WORDS_PREFIXES_FST_KEY, WORDS_PREFIXES_FST_KEY,
&prefixes_fst_mmap, &prefixes_fst_mmap,
)?; )?;
Ok(Some(prefix_delta)) Ok(Some(prefix_delta))
} else { } _ => {
Ok(None) Ok(None)
} }}
} }
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facet_search")] #[tracing::instrument(level = "trace", skip_all, target = "indexing::facet_search")]

View File

@ -233,13 +233,13 @@ impl<'doc> VectorDocumentFromVersions<'doc> {
embedders: &'doc EmbeddingConfigs, embedders: &'doc EmbeddingConfigs,
) -> Result<Option<Self>> { ) -> Result<Option<Self>> {
let document = DocumentFromVersions::new(versions); let document = DocumentFromVersions::new(versions);
if let Some(vectors_field) = document.vectors_field()? { match document.vectors_field()? { Some(vectors_field) => {
let vectors = RawMap::from_raw_value_and_hasher(vectors_field, FxBuildHasher, bump) let vectors = RawMap::from_raw_value_and_hasher(vectors_field, FxBuildHasher, bump)
.map_err(UserError::SerdeJson)?; .map_err(UserError::SerdeJson)?;
Ok(Some(Self { external_document_id, vectors, embedders })) Ok(Some(Self { external_document_id, vectors, embedders }))
} else { } _ => {
Ok(None) Ok(None)
} }}
} }
} }

View File

@ -36,11 +36,11 @@ impl<'a> WordFstBuilder<'a> {
} }
self.word_fst_builder.register(deladd, right, &mut |bytes, deladd, is_modified| { self.word_fst_builder.register(deladd, right, &mut |bytes, deladd, is_modified| {
if let Some(prefix_fst_builder) = &mut self.prefix_fst_builder { match &mut self.prefix_fst_builder { Some(prefix_fst_builder) => {
prefix_fst_builder.insert_word(bytes, deladd, is_modified) prefix_fst_builder.insert_word(bytes, deladd, is_modified)
} else { } _ => {
Ok(()) Ok(())
} }}
})?; })?;
Ok(()) Ok(())
@ -52,11 +52,11 @@ impl<'a> WordFstBuilder<'a> {
rtxn: &heed::RoTxn, rtxn: &heed::RoTxn,
) -> Result<(Mmap, Option<PrefixData>)> { ) -> Result<(Mmap, Option<PrefixData>)> {
let words_fst_mmap = self.word_fst_builder.build(&mut |bytes, deladd, is_modified| { let words_fst_mmap = self.word_fst_builder.build(&mut |bytes, deladd, is_modified| {
if let Some(prefix_fst_builder) = &mut self.prefix_fst_builder { match &mut self.prefix_fst_builder { Some(prefix_fst_builder) => {
prefix_fst_builder.insert_word(bytes, deladd, is_modified) prefix_fst_builder.insert_word(bytes, deladd, is_modified)
} else { } _ => {
Ok(()) Ok(())
} }}
})?; })?;
let prefix_data = self let prefix_data = self

View File

@ -1401,18 +1401,18 @@ impl InnerIndexSettingsDiff {
pub fn reindex_searchable_id(&self, id: FieldId) -> Option<DelAddOperation> { pub fn reindex_searchable_id(&self, id: FieldId) -> Option<DelAddOperation> {
if self.cache_reindex_searchable_without_user_defined || self.cache_exact_attributes { if self.cache_reindex_searchable_without_user_defined || self.cache_exact_attributes {
Some(DelAddOperation::DeletionAndAddition) Some(DelAddOperation::DeletionAndAddition)
} else if let Some(only_additional_fields) = &self.only_additional_fields { } else { match &self.only_additional_fields { Some(only_additional_fields) => {
let additional_field = self.new.fields_ids_map.name(id).unwrap(); let additional_field = self.new.fields_ids_map.name(id).unwrap();
if only_additional_fields.contains(additional_field) { if only_additional_fields.contains(additional_field) {
Some(DelAddOperation::Addition) Some(DelAddOperation::Addition)
} else { } else {
None None
} }
} else if self.cache_user_defined_searchables { } _ => if self.cache_user_defined_searchables {
Some(DelAddOperation::DeletionAndAddition) Some(DelAddOperation::DeletionAndAddition)
} else { } else {
None None
} }}}
} }
/// List the faceted fields from the inner fid map. /// List the faceted fields from the inner fid map.
@ -1848,14 +1848,14 @@ pub fn validate_embedding_settings(
} }
} }
indexing_embedder = if let Setting::Set(mut embedder) = indexing_embedder { indexing_embedder = match indexing_embedder { Setting::Set(mut embedder) => {
embedder.document_template = validate_prompt( embedder.document_template = validate_prompt(
name, name,
embedder.document_template, embedder.document_template,
embedder.document_template_max_bytes, embedder.document_template_max_bytes,
)?; )?;
if let Some(source) = embedder.source.set() { match embedder.source.set() { Some(source) => {
let search_embedder = match embedder.search_embedder.clone() { let search_embedder = match embedder.search_embedder.clone() {
Setting::Set(search_embedder) => Setting::Set(deserialize_sub_embedder( Setting::Set(search_embedder) => Setting::Set(deserialize_sub_embedder(
search_embedder, search_embedder,
@ -1895,16 +1895,16 @@ pub fn validate_embedding_settings(
&embedder.binary_quantized, &embedder.binary_quantized,
&embedder.distribution, &embedder.distribution,
)?; )?;
} else { } _ => {
return Err(UserError::MissingSourceForNested { return Err(UserError::MissingSourceForNested {
embedder_name: NestingContext::Indexing.embedder_name_with_context(name), embedder_name: NestingContext::Indexing.embedder_name_with_context(name),
} }
.into()); .into());
} }}
Setting::Set(embedder) Setting::Set(embedder)
} else { } _ => {
indexing_embedder indexing_embedder
}; }};
} }
} }
Ok(Setting::Set(EmbeddingSettings { Ok(Setting::Set(EmbeddingSettings {

View File

@ -239,15 +239,15 @@ impl Embedder {
let model = BertModel::load(vb, &config).map_err(NewEmbedderError::load_model)?; let model = BertModel::load(vb, &config).map_err(NewEmbedderError::load_model)?;
if let Some(pp) = tokenizer.get_padding_mut() { match tokenizer.get_padding_mut() { Some(pp) => {
pp.strategy = tokenizers::PaddingStrategy::BatchLongest pp.strategy = tokenizers::PaddingStrategy::BatchLongest
} else { } _ => {
let pp = PaddingParams { let pp = PaddingParams {
strategy: tokenizers::PaddingStrategy::BatchLongest, strategy: tokenizers::PaddingStrategy::BatchLongest,
..Default::default() ..Default::default()
}; };
tokenizer.with_padding(Some(pp)); tokenizer.with_padding(Some(pp));
} }}
let mut this = Self { let mut this = Self {
model, model,

View File

@ -348,11 +348,11 @@ impl ArroyWrapper {
searcher.candidates(filter); searcher.candidates(filter);
} }
if let Some(mut ret) = searcher.by_item(rtxn, item)? { match searcher.by_item(rtxn, item)? { Some(mut ret) => {
results.append(&mut ret); results.append(&mut ret);
} else { } _ => {
break; break;
} }}
} }
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance)); results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));
Ok(results) Ok(results)
@ -402,19 +402,19 @@ impl ArroyWrapper {
if self.quantized { if self.quantized {
for reader in self.readers(rtxn, self.quantized_db()) { for reader in self.readers(rtxn, self.quantized_db()) {
if let Some(vec) = reader?.item_vector(rtxn, item_id)? { match reader?.item_vector(rtxn, item_id)? { Some(vec) => {
vectors.push(vec); vectors.push(vec);
} else { } _ => {
break; break;
} }}
} }
} else { } else {
for reader in self.readers(rtxn, self.angular_db()) { for reader in self.readers(rtxn, self.angular_db()) {
if let Some(vec) = reader?.item_vector(rtxn, item_id)? { match reader?.item_vector(rtxn, item_id)? { Some(vec) => {
vectors.push(vec); vectors.push(vec);
} else { } _ => {
break; break;
} }}
} }
} }
Ok(vectors) Ok(vectors)

View File

@ -150,11 +150,11 @@ impl Embedder {
headers: options.headers, headers: options.headers,
}; };
let dimensions = if let Some(dimensions) = options.dimensions { let dimensions = match options.dimensions { Some(dimensions) => {
dimensions dimensions
} else { } _ => {
infer_dimensions(&data)? infer_dimensions(&data)?
}; }};
Ok(Self { Ok(Self {
data, data,

View File

@ -8,7 +8,7 @@ use Criterion::*;
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS}; use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
macro_rules! test_distinct { macro_rules! test_distinct {
($func:ident, $distinct:ident, $exhaustive:ident, $limit:expr, $offset:expr, $criteria:expr, $n_res:expr) => { ($func:ident, $distinct:ident, $exhaustive:ident, $limit:expr, $offset:expr_2021, $criteria:expr_2021, $n_res:expr_2021) => {
#[test] #[test]
fn $func() { fn $func() {
let criteria = $criteria; let criteria = $criteria;

View File

@ -5,7 +5,7 @@ use Criterion::*;
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS}; use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
macro_rules! test_filter { macro_rules! test_filter {
($func:ident, $filter:expr) => { ($func:ident, $filter:expr_2021) => {
#[test] #[test]
fn $func() { fn $func() {
let criteria = vec![Words, Typo, Proximity, Attribute, Exactness]; let criteria = vec![Words, Typo, Proximity, Attribute, Exactness];

View File

@ -220,11 +220,11 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
id = Some(document.id.clone()) id = Some(document.id.clone())
} }
} else if let Some(("asc_desc_rank", filter)) = filter.split_once('<') { } else if let Some(("asc_desc_rank", filter)) = filter.split_once('<') {
if document.asc_desc_rank < filter.parse().unwrap() { if document.asc_desc_rank < filter.parse::<u32>().unwrap() {
id = Some(document.id.clone()) id = Some(document.id.clone())
} }
} else if let Some(("asc_desc_rank", filter)) = filter.split_once('>') { } else if let Some(("asc_desc_rank", filter)) = filter.split_once('>') {
if document.asc_desc_rank > filter.parse().unwrap() { if document.asc_desc_rank > filter.parse::<u32>().unwrap() {
id = Some(document.id.clone()) id = Some(document.id.clone())
} }
} else if filter.starts_with("_geoRadius") { } else if filter.starts_with("_geoRadius") {

View File

@ -20,7 +20,7 @@ const DISALLOW_OPTIONAL_WORDS: TermsMatchingStrategy = TermsMatchingStrategy::Al
const ASC_DESC_CANDIDATES_THRESHOLD: usize = 1000; const ASC_DESC_CANDIDATES_THRESHOLD: usize = 1000;
macro_rules! test_criterion { macro_rules! test_criterion {
($func:ident, $optional_word:ident, $criteria:expr, $sort_criteria:expr) => { ($func:ident, $optional_word:ident, $criteria:expr_2021, $sort_criteria:expr_2021) => {
#[test] #[test]
fn $func() { fn $func() {
let criteria = $criteria; let criteria = $criteria;

View File

@ -1,7 +1,7 @@
[package] [package]
name = "tracing-trace" name = "tracing-trace"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html