mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-05-14 08:14:05 +02:00
Fix some of the edition 2024 warnings
This commit is contained in:
parent
2762d5a32a
commit
aa87064a13
@ -30,7 +30,7 @@ authors = [
|
||||
description = "Meilisearch HTTP server"
|
||||
homepage = "https://meilisearch.com"
|
||||
readme = "README.md"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
|
||||
[profile.release]
|
||||
|
@ -275,19 +275,19 @@ impl From<Task> for TaskView {
|
||||
match (result, &mut details) {
|
||||
(
|
||||
TaskResult::DocumentAddition { indexed_documents: num, .. },
|
||||
Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }),
|
||||
Some(TaskDetails::DocumentAddition { indexed_documents, .. }),
|
||||
) => {
|
||||
indexed_documents.replace(*num);
|
||||
}
|
||||
(
|
||||
TaskResult::DocumentDeletion { deleted_documents: docs, .. },
|
||||
Some(TaskDetails::DocumentDeletion { ref mut deleted_documents, .. }),
|
||||
Some(TaskDetails::DocumentDeletion { deleted_documents, .. }),
|
||||
) => {
|
||||
deleted_documents.replace(*docs);
|
||||
}
|
||||
(
|
||||
TaskResult::ClearAll { deleted_documents: docs },
|
||||
Some(TaskDetails::ClearAll { ref mut deleted_documents }),
|
||||
Some(TaskDetails::ClearAll { deleted_documents }),
|
||||
) => {
|
||||
deleted_documents.replace(*docs);
|
||||
}
|
||||
|
@ -170,14 +170,14 @@ impl UpdateFile {
|
||||
}
|
||||
|
||||
pub fn push_document(&mut self, document: &Document) -> Result<()> {
|
||||
if let Some(mut writer) = self.writer.as_mut() {
|
||||
match self.writer.as_mut() { Some(mut writer) => {
|
||||
serde_json::to_writer(&mut writer, &document)?;
|
||||
writer.write_all(b"\n")?;
|
||||
} else {
|
||||
} _ => {
|
||||
let file = File::create(&self.path).unwrap();
|
||||
self.writer = Some(BufWriter::new(file));
|
||||
self.push_document(document)?;
|
||||
}
|
||||
}}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -111,7 +111,7 @@ impl FileStore {
|
||||
}
|
||||
|
||||
/// List the Uuids of the files in the FileStore
|
||||
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
|
||||
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>> + use<>> {
|
||||
Ok(self.path.read_dir()?.filter_map(|entry| {
|
||||
let file_name = match entry {
|
||||
Ok(entry) => entry.file_name(),
|
||||
@ -158,19 +158,19 @@ impl File {
|
||||
|
||||
impl Write for File {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
if let Some(file) = self.file.as_mut() {
|
||||
match self.file.as_mut() { Some(file) => {
|
||||
file.write(buf)
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(buf.len())
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
if let Some(file) = self.file.as_mut() {
|
||||
match self.file.as_mut() { Some(file) => {
|
||||
file.flush()
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(())
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@ name = "filter-parser-fuzz"
|
||||
version = "0.0.0"
|
||||
authors = ["Automatically generated"]
|
||||
publish = false
|
||||
edition = "2018"
|
||||
edition = "2024"
|
||||
|
||||
[package.metadata]
|
||||
cargo-fuzz = true
|
||||
|
@ -198,7 +198,7 @@ impl Display for Error<'_> {
|
||||
f,
|
||||
"Encountered an internal `{:?}` error while parsing your filter. Please fill an issue", kind
|
||||
)?,
|
||||
ErrorKind::External(ref error) => writeln!(f, "{}", error)?,
|
||||
ErrorKind::External(error) => writeln!(f, "{}", error)?,
|
||||
}
|
||||
let base_column = self.context.get_utf8_column();
|
||||
let size = self.context.fragment().chars().count();
|
||||
|
@ -3,7 +3,7 @@ name = "flatten-serde-json-fuzz"
|
||||
version = "0.0.0"
|
||||
authors = ["Automatically generated"]
|
||||
publish = false
|
||||
edition = "2018"
|
||||
edition = "2024"
|
||||
|
||||
[package.metadata]
|
||||
cargo-fuzz = true
|
||||
|
@ -272,11 +272,11 @@ impl IndexMapper {
|
||||
if tries >= 100 {
|
||||
panic!("Too many attempts to close index {name} prior to deletion.")
|
||||
}
|
||||
let reopen = if let Some(reopen) = reopen.wait_timeout(Duration::from_secs(6)) {
|
||||
let reopen = match reopen.wait_timeout(Duration::from_secs(6)) { Some(reopen) => {
|
||||
reopen
|
||||
} else {
|
||||
} _ => {
|
||||
continue;
|
||||
};
|
||||
}};
|
||||
reopen.close(&mut self.index_map.write().unwrap());
|
||||
continue;
|
||||
}
|
||||
@ -382,11 +382,11 @@ impl IndexMapper {
|
||||
Available(index) => break index,
|
||||
Closing(reopen) => {
|
||||
// Avoiding deadlocks: no lock taken while doing this operation.
|
||||
let reopen = if let Some(reopen) = reopen.wait_timeout(Duration::from_secs(6)) {
|
||||
let reopen = match reopen.wait_timeout(Duration::from_secs(6)) { Some(reopen) => {
|
||||
reopen
|
||||
} else {
|
||||
} _ => {
|
||||
continue;
|
||||
};
|
||||
}};
|
||||
let index_path = self.base_path.join(uuid.to_string());
|
||||
// take the lock to reopen the environment.
|
||||
reopen
|
||||
|
@ -355,19 +355,19 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
fn is_good_heed(tasks_path: &Path, map_size: usize) -> bool {
|
||||
if let Ok(env) = unsafe {
|
||||
match unsafe {
|
||||
heed::EnvOpenOptions::new().map_size(clamp_to_page_size(map_size)).open(tasks_path)
|
||||
} {
|
||||
} { Ok(env) => {
|
||||
env.prepare_for_closing().wait();
|
||||
true
|
||||
} else {
|
||||
} _ => {
|
||||
// We're treating all errors equally here, not only allocation errors.
|
||||
// This means there's a possiblity for the budget to lower due to errors different from allocation errors.
|
||||
// For persistent errors, this is OK as long as the task db is then reopened normally without ignoring the error this time.
|
||||
// For transient errors, this could lead to an instance with too low a budget.
|
||||
// However transient errors are: 1) less likely than persistent errors 2) likely to cause other issues down the line anyway.
|
||||
false
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
||||
|
@ -10,7 +10,7 @@ use crate::TaskId;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! debug_snapshot {
|
||||
($value:expr, @$snapshot:literal) => {{
|
||||
($value:expr_2021, @$snapshot:literal) => {{
|
||||
let value = format!("{:?}", $value);
|
||||
meili_snap::snapshot!(value, @$snapshot);
|
||||
}};
|
||||
|
@ -499,13 +499,13 @@ impl IndexScheduler {
|
||||
// create the batch directly. Otherwise, get the index name associated with the task
|
||||
// and use the autobatcher to batch the enqueued tasks associated with it
|
||||
|
||||
let index_name = if let Some(&index_name) = task.indexes().first() {
|
||||
let index_name = match task.indexes().first() { Some(&index_name) => {
|
||||
index_name
|
||||
} else {
|
||||
} _ => {
|
||||
assert!(matches!(&task.kind, KindWithContent::IndexSwap { swaps } if swaps.is_empty()));
|
||||
current_batch.processing(Some(&mut task));
|
||||
return Ok(Some((Batch::IndexSwap { task }, current_batch)));
|
||||
};
|
||||
}};
|
||||
|
||||
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
|
||||
let mut primary_key = None;
|
||||
|
@ -47,11 +47,11 @@ impl IndexScheduler {
|
||||
Batch::TaskCancelation { mut task } => {
|
||||
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
||||
let matched_tasks =
|
||||
if let KindWithContent::TaskCancelation { tasks, query: _ } = &task.kind {
|
||||
match &task.kind { KindWithContent::TaskCancelation { tasks, query: _ } => {
|
||||
tasks
|
||||
} else {
|
||||
} _ => {
|
||||
unreachable!()
|
||||
};
|
||||
}};
|
||||
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let mut canceled_tasks = self.cancel_matched_tasks(
|
||||
@ -83,11 +83,11 @@ impl IndexScheduler {
|
||||
let mut matched_tasks = RoaringBitmap::new();
|
||||
|
||||
for task in tasks.iter() {
|
||||
if let KindWithContent::TaskDeletion { tasks, query: _ } = &task.kind {
|
||||
match &task.kind { KindWithContent::TaskDeletion { tasks, query: _ } => {
|
||||
matched_tasks |= tasks;
|
||||
} else {
|
||||
} _ => {
|
||||
unreachable!()
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
@ -279,11 +279,11 @@ impl IndexScheduler {
|
||||
progress.update_progress(SwappingTheIndexes::EnsuringCorrectnessOfTheSwap);
|
||||
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let swaps = if let KindWithContent::IndexSwap { swaps } = &task.kind {
|
||||
let swaps = match &task.kind { KindWithContent::IndexSwap { swaps } => {
|
||||
swaps
|
||||
} else {
|
||||
} _ => {
|
||||
unreachable!()
|
||||
};
|
||||
}};
|
||||
let mut not_found_indexes = BTreeSet::new();
|
||||
for IndexSwap { indexes: (lhs, rhs) } in swaps {
|
||||
for index in [lhs, rhs] {
|
||||
@ -532,7 +532,7 @@ impl IndexScheduler {
|
||||
// We must remove the batch entirely
|
||||
if tasks.is_empty() {
|
||||
if let Some(batch) = self.queue.batches.get_batch(wtxn, batch_id)? {
|
||||
if let Some(BatchEnqueuedAt { earliest, oldest }) = batch.enqueued_at {
|
||||
match batch.enqueued_at { Some(BatchEnqueuedAt { earliest, oldest }) => {
|
||||
remove_task_datetime(
|
||||
wtxn,
|
||||
self.queue.batches.enqueued_at,
|
||||
@ -545,7 +545,7 @@ impl IndexScheduler {
|
||||
oldest,
|
||||
batch_id,
|
||||
)?;
|
||||
} else {
|
||||
} _ => {
|
||||
// If we don't have the enqueued at in the batch it means the database comes from the v1.12
|
||||
// and we still need to find the date by scrolling the database
|
||||
remove_n_tasks_datetime_earlier_than(
|
||||
@ -555,7 +555,7 @@ impl IndexScheduler {
|
||||
batch.stats.total_nb_tasks.clamp(1, 2) as usize,
|
||||
batch_id,
|
||||
)?;
|
||||
}
|
||||
}}
|
||||
remove_task_datetime(
|
||||
wtxn,
|
||||
self.queue.batches.started_at,
|
||||
|
@ -26,11 +26,11 @@ impl IndexScheduler {
|
||||
progress.update_progress(DumpCreationProgress::StartTheDumpCreation);
|
||||
let started_at = OffsetDateTime::now_utc();
|
||||
let (keys, instance_uid) =
|
||||
if let KindWithContent::DumpCreation { keys, instance_uid } = &task.kind {
|
||||
match &task.kind { KindWithContent::DumpCreation { keys, instance_uid } => {
|
||||
(keys, instance_uid)
|
||||
} else {
|
||||
} _ => {
|
||||
unreachable!();
|
||||
};
|
||||
}};
|
||||
let dump = dump::DumpWriter::new(*instance_uid)?;
|
||||
|
||||
// 1. dump the keys
|
||||
@ -206,14 +206,14 @@ impl IndexScheduler {
|
||||
let user_err =
|
||||
milli::Error::UserError(milli::UserError::InvalidVectorsMapType {
|
||||
document_id: {
|
||||
if let Ok(Some(Ok(index))) = index
|
||||
match index
|
||||
.external_id_of(&rtxn, std::iter::once(id))
|
||||
.map(|it| it.into_iter().next())
|
||||
{
|
||||
{ Ok(Some(Ok(index))) => {
|
||||
index
|
||||
} else {
|
||||
} _ => {
|
||||
format!("internal docid={id}")
|
||||
}
|
||||
}}
|
||||
},
|
||||
value: vectors.clone(),
|
||||
});
|
||||
|
@ -206,17 +206,17 @@ impl IndexScheduler {
|
||||
IndexOperation::DocumentEdition { index_uid, mut task } => {
|
||||
progress.update_progress(DocumentEditionProgress::RetrievingConfig);
|
||||
|
||||
let (filter, code) = if let KindWithContent::DocumentEdition {
|
||||
let (filter, code) = match &task.kind
|
||||
{ KindWithContent::DocumentEdition {
|
||||
filter_expr,
|
||||
context: _,
|
||||
function,
|
||||
..
|
||||
} = &task.kind
|
||||
{
|
||||
} => {
|
||||
(filter_expr, function)
|
||||
} else {
|
||||
} _ => {
|
||||
unreachable!()
|
||||
};
|
||||
}};
|
||||
|
||||
let candidates = match filter.as_ref().map(Filter::from_json) {
|
||||
Some(Ok(Some(filter))) => filter
|
||||
@ -226,18 +226,18 @@ impl IndexScheduler {
|
||||
Some(Err(e)) => return Err(Error::from_milli(e, Some(index_uid.clone()))),
|
||||
};
|
||||
|
||||
let (original_filter, context, function) = if let Some(Details::DocumentEdition {
|
||||
let (original_filter, context, function) = match task.details
|
||||
{ Some(Details::DocumentEdition {
|
||||
original_filter,
|
||||
context,
|
||||
function,
|
||||
..
|
||||
}) = task.details
|
||||
{
|
||||
}) => {
|
||||
(original_filter, context, function)
|
||||
} else {
|
||||
} _ => {
|
||||
// In the case of a `documentEdition` the details MUST be set
|
||||
unreachable!();
|
||||
};
|
||||
}};
|
||||
|
||||
if candidates.is_empty() {
|
||||
task.status = Status::Succeeded;
|
||||
@ -397,16 +397,16 @@ impl IndexScheduler {
|
||||
};
|
||||
}
|
||||
let will_be_removed = to_delete.len() - before;
|
||||
if let Some(Details::DocumentDeletionByFilter {
|
||||
match &mut task.details
|
||||
{ Some(Details::DocumentDeletionByFilter {
|
||||
original_filter: _,
|
||||
deleted_documents,
|
||||
}) = &mut task.details
|
||||
{
|
||||
}) => {
|
||||
*deleted_documents = Some(will_be_removed);
|
||||
} else {
|
||||
} _ => {
|
||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||
unreachable!()
|
||||
}
|
||||
}}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
@ -307,7 +307,7 @@ pub(crate) fn filter_out_references_to_newer_tasks(task: &mut Task) {
|
||||
|
||||
pub(crate) fn check_index_swap_validity(task: &Task) -> Result<()> {
|
||||
let swaps =
|
||||
if let KindWithContent::IndexSwap { swaps } = &task.kind { swaps } else { return Ok(()) };
|
||||
match &task.kind { KindWithContent::IndexSwap { swaps } => { swaps } _ => { return Ok(()) }};
|
||||
let mut all_indexes = HashSet::new();
|
||||
let mut duplicate_indexes = BTreeSet::new();
|
||||
for IndexSwap { indexes: (lhs, rhs) } in swaps {
|
||||
@ -501,15 +501,15 @@ impl crate::IndexScheduler {
|
||||
} => {
|
||||
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
|
||||
let (index_uid, documents_ids) =
|
||||
if let KindWithContent::DocumentDeletion {
|
||||
match kind
|
||||
{ KindWithContent::DocumentDeletion {
|
||||
ref index_uid,
|
||||
ref documents_ids,
|
||||
} = kind
|
||||
{
|
||||
} => {
|
||||
(index_uid, documents_ids)
|
||||
} else {
|
||||
} _ => {
|
||||
unreachable!()
|
||||
};
|
||||
}};
|
||||
assert_eq!(&task_index_uid.unwrap(), index_uid);
|
||||
|
||||
match status {
|
||||
@ -526,15 +526,15 @@ impl crate::IndexScheduler {
|
||||
}
|
||||
Details::DocumentDeletionByFilter { deleted_documents, original_filter: _ } => {
|
||||
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
|
||||
let (index_uid, _) = if let KindWithContent::DocumentDeletionByFilter {
|
||||
let (index_uid, _) = match kind
|
||||
{ KindWithContent::DocumentDeletionByFilter {
|
||||
ref index_uid,
|
||||
ref filter_expr,
|
||||
} = kind
|
||||
{
|
||||
} => {
|
||||
(index_uid, filter_expr)
|
||||
} else {
|
||||
} _ => {
|
||||
unreachable!()
|
||||
};
|
||||
}};
|
||||
assert_eq!(&task_index_uid.unwrap(), index_uid);
|
||||
|
||||
match status {
|
||||
|
@ -3,7 +3,7 @@ name = "json-depth-checker"
|
||||
version = "0.0.0"
|
||||
authors = ["Automatically generated"]
|
||||
publish = false
|
||||
edition = "2018"
|
||||
edition = "2024"
|
||||
|
||||
[package.metadata]
|
||||
cargo-fuzz = true
|
||||
|
@ -77,7 +77,7 @@ snapshot_hash!("hello world", name: "snap_name", @"5f93f983524def3dca464469d2cf9
|
||||
*/
|
||||
#[macro_export]
|
||||
macro_rules! snapshot_hash {
|
||||
($value:expr, @$inline:literal) => {
|
||||
($value:expr_2021, @$inline:literal) => {
|
||||
let test_name = {
|
||||
fn f() {}
|
||||
fn type_name_of_val<T>(_: T) -> &'static str {
|
||||
@ -99,7 +99,7 @@ macro_rules! snapshot_hash {
|
||||
}
|
||||
});
|
||||
};
|
||||
($value:expr, name: $name:expr, @$inline:literal) => {
|
||||
($value:expr_2021, name: $name:expr_2021, @$inline:literal) => {
|
||||
let test_name = {
|
||||
fn f() {}
|
||||
fn type_name_of_val<T>(_: T) -> &'static str {
|
||||
@ -151,7 +151,7 @@ snapshot!(format!("{:?}", vec![1, 2]), @"[1, 2]");
|
||||
*/
|
||||
#[macro_export]
|
||||
macro_rules! snapshot {
|
||||
($value:expr, name: $name:expr) => {
|
||||
($value:expr_2021, name: $name:expr_2021) => {
|
||||
let test_name = {
|
||||
fn f() {}
|
||||
fn type_name_of_val<T>(_: T) -> &'static str {
|
||||
@ -172,7 +172,7 @@ macro_rules! snapshot {
|
||||
}
|
||||
});
|
||||
};
|
||||
($value:expr, @$inline:literal) => {
|
||||
($value:expr_2021, @$inline:literal) => {
|
||||
// Note that the name given as argument does not matter since it is only an inline snapshot
|
||||
// We don't pass None because otherwise `meili-snap` will try to assign it a unique identifier
|
||||
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
|
||||
@ -183,7 +183,7 @@ macro_rules! snapshot {
|
||||
}
|
||||
});
|
||||
};
|
||||
($value:expr) => {
|
||||
($value:expr_2021) => {
|
||||
let test_name = {
|
||||
fn f() {}
|
||||
fn type_name_of_val<T>(_: T) -> &'static str {
|
||||
@ -213,13 +213,13 @@ macro_rules! snapshot {
|
||||
/// refer to the redactions feature in the `insta` guide.
|
||||
#[macro_export]
|
||||
macro_rules! json_string {
|
||||
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
|
||||
($value:expr_2021, {$($k:expr_2021 => $v:expr_2021),*$(,)?}) => {
|
||||
{
|
||||
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
|
||||
snap
|
||||
}
|
||||
};
|
||||
($value:expr) => {{
|
||||
($value:expr_2021) => {{
|
||||
let value = meili_snap::insta::_macro_support::serialize_value(
|
||||
&$value,
|
||||
meili_snap::insta::_macro_support::SerializationFormat::Json,
|
||||
|
@ -403,7 +403,7 @@ impl ErrorCode for milli::Error {
|
||||
match self {
|
||||
Error::InternalError(_) => Code::Internal,
|
||||
Error::IoError(e) => e.error_code(),
|
||||
Error::UserError(ref error) => {
|
||||
Error::UserError(error) => {
|
||||
match error {
|
||||
// TODO: wait for spec for new error codes.
|
||||
UserError::SerdeJson(_)
|
||||
|
@ -33,7 +33,7 @@ impl From<LocalizedAttributesRuleView> for LocalizedAttributesRule {
|
||||
///
|
||||
/// this enum implements `Deserr` in order to be used in the API.
|
||||
macro_rules! make_locale {
|
||||
($(($iso_639_1:ident, $iso_639_1_str:expr) => ($iso_639_3:ident, $iso_639_3_str:expr),)+) => {
|
||||
($(($iso_639_1:ident, $iso_639_1_str:expr_2021) => ($iso_639_3:ident, $iso_639_3_str:expr_2021),)+) => {
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, Serialize, Deserialize, Ord, PartialOrd, ToSchema)]
|
||||
#[deserr(rename_all = camelCase)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -572,19 +572,19 @@ pub fn apply_settings_to_builder(
|
||||
} = settings;
|
||||
|
||||
match searchable_attributes.deref() {
|
||||
Setting::Set(ref names) => builder.set_searchable_fields(names.clone()),
|
||||
Setting::Set(names) => builder.set_searchable_fields(names.clone()),
|
||||
Setting::Reset => builder.reset_searchable_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match displayed_attributes.deref() {
|
||||
Setting::Set(ref names) => builder.set_displayed_fields(names.clone()),
|
||||
Setting::Set(names) => builder.set_displayed_fields(names.clone()),
|
||||
Setting::Reset => builder.reset_displayed_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match filterable_attributes {
|
||||
Setting::Set(ref facets) => {
|
||||
Setting::Set(facets) => {
|
||||
builder.set_filterable_fields(facets.clone().into_iter().collect())
|
||||
}
|
||||
Setting::Reset => builder.reset_filterable_fields(),
|
||||
@ -592,13 +592,13 @@ pub fn apply_settings_to_builder(
|
||||
}
|
||||
|
||||
match sortable_attributes {
|
||||
Setting::Set(ref fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
|
||||
Setting::Set(fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
|
||||
Setting::Reset => builder.reset_sortable_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match ranking_rules {
|
||||
Setting::Set(ref criteria) => {
|
||||
Setting::Set(criteria) => {
|
||||
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
|
||||
}
|
||||
Setting::Reset => builder.reset_criteria(),
|
||||
@ -606,13 +606,13 @@ pub fn apply_settings_to_builder(
|
||||
}
|
||||
|
||||
match stop_words {
|
||||
Setting::Set(ref stop_words) => builder.set_stop_words(stop_words.clone()),
|
||||
Setting::Set(stop_words) => builder.set_stop_words(stop_words.clone()),
|
||||
Setting::Reset => builder.reset_stop_words(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match non_separator_tokens {
|
||||
Setting::Set(ref non_separator_tokens) => {
|
||||
Setting::Set(non_separator_tokens) => {
|
||||
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_non_separator_tokens(),
|
||||
@ -620,7 +620,7 @@ pub fn apply_settings_to_builder(
|
||||
}
|
||||
|
||||
match separator_tokens {
|
||||
Setting::Set(ref separator_tokens) => {
|
||||
Setting::Set(separator_tokens) => {
|
||||
builder.set_separator_tokens(separator_tokens.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_separator_tokens(),
|
||||
@ -628,38 +628,38 @@ pub fn apply_settings_to_builder(
|
||||
}
|
||||
|
||||
match dictionary {
|
||||
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||
Setting::Set(dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||
Setting::Reset => builder.reset_dictionary(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match synonyms {
|
||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||
Setting::Set(synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||
Setting::Reset => builder.reset_synonyms(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match distinct_attribute {
|
||||
Setting::Set(ref attr) => builder.set_distinct_field(attr.clone()),
|
||||
Setting::Set(attr) => builder.set_distinct_field(attr.clone()),
|
||||
Setting::Reset => builder.reset_distinct_field(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match proximity_precision {
|
||||
Setting::Set(ref precision) => builder.set_proximity_precision((*precision).into()),
|
||||
Setting::Set(precision) => builder.set_proximity_precision((*precision).into()),
|
||||
Setting::Reset => builder.reset_proximity_precision(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match localized_attributes_rules {
|
||||
Setting::Set(ref rules) => builder
|
||||
Setting::Set(rules) => builder
|
||||
.set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()),
|
||||
Setting::Reset => builder.reset_localized_attributes_rules(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match typo_tolerance {
|
||||
Setting::Set(ref value) => {
|
||||
Setting::Set(value) => {
|
||||
match value.enabled {
|
||||
Setting::Set(val) => builder.set_autorize_typos(val),
|
||||
Setting::Reset => builder.reset_authorize_typos(),
|
||||
@ -736,7 +736,7 @@ pub fn apply_settings_to_builder(
|
||||
}
|
||||
|
||||
match pagination {
|
||||
Setting::Set(ref value) => match value.max_total_hits {
|
||||
Setting::Set(value) => match value.max_total_hits {
|
||||
Setting::Set(val) => builder.set_pagination_max_total_hits(val),
|
||||
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
||||
Setting::NotSet => (),
|
||||
|
@ -89,11 +89,11 @@ fn is_empty_db(db_path: impl AsRef<Path>) -> bool {
|
||||
if !db_path.exists() {
|
||||
true
|
||||
// if we encounter an error or if the db is a file we consider the db non empty
|
||||
} else if let Ok(dir) = db_path.read_dir() {
|
||||
} else { match db_path.read_dir() { Ok(dir) => {
|
||||
dir.count() == 0
|
||||
} else {
|
||||
} _ => {
|
||||
true
|
||||
}
|
||||
}}}
|
||||
}
|
||||
|
||||
/// The handle used to update the logs at runtime. Must be accessible from the `main.rs` and the `route/logs.rs`.
|
||||
@ -466,18 +466,18 @@ fn import_dump(
|
||||
let reader = File::open(dump_path)?;
|
||||
let mut dump_reader = dump::DumpReader::open(reader)?;
|
||||
|
||||
if let Some(date) = dump_reader.date() {
|
||||
match dump_reader.date() { Some(date) => {
|
||||
tracing::info!(
|
||||
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
||||
%date,
|
||||
"Importing a dump of meilisearch"
|
||||
);
|
||||
} else {
|
||||
} _ => {
|
||||
tracing::info!(
|
||||
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
||||
"Importing a dump of meilisearch",
|
||||
);
|
||||
}
|
||||
}}
|
||||
|
||||
let instance_uid = dump_reader.instance_uid()?;
|
||||
|
||||
|
@ -178,11 +178,11 @@ async fn run_http(
|
||||
.disable_signals()
|
||||
.keep_alive(KeepAlive::Os);
|
||||
|
||||
if let Some(config) = opt_clone.get_ssl_config()? {
|
||||
match opt_clone.get_ssl_config()? { Some(config) => {
|
||||
http_server.bind_rustls_0_23(opt_clone.http_addr, config)?.run().await?;
|
||||
} else {
|
||||
} _ => {
|
||||
http_server.bind(&opt_clone.http_addr)?.run().await?;
|
||||
}
|
||||
}}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -907,7 +907,7 @@ fn load_private_key(
|
||||
fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> {
|
||||
let mut ret = Vec::new();
|
||||
|
||||
if let Some(ref name) = filename {
|
||||
if let Some(name) = filename {
|
||||
fs::File::open(name)
|
||||
.map_err(|_| anyhow::anyhow!("cannot open ocsp file"))?
|
||||
.read_to_end(&mut ret)
|
||||
@ -924,7 +924,8 @@ where
|
||||
T: AsRef<OsStr>,
|
||||
{
|
||||
if let Err(VarError::NotPresent) = std::env::var(key) {
|
||||
std::env::set_var(key, value);
|
||||
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||
unsafe { std::env::set_var(key, value) };
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -97,12 +97,12 @@ async fn get_batch(
|
||||
let filters = index_scheduler.filters();
|
||||
let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(&query, filters)?;
|
||||
|
||||
if let Some(batch) = batches.first() {
|
||||
match batches.first() { Some(batch) => {
|
||||
let batch_view = BatchView::from_batch(batch);
|
||||
Ok(HttpResponse::Ok().json(batch_view))
|
||||
} else {
|
||||
} _ => {
|
||||
Err(index_scheduler::Error::BatchNotFound(batch_uid).into())
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
|
@ -619,7 +619,7 @@ fn documents_by_query(
|
||||
|
||||
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors);
|
||||
|
||||
let ids = if let Some(ids) = ids {
|
||||
let ids = match ids { Some(ids) => {
|
||||
let mut parsed_ids = Vec::with_capacity(ids.len());
|
||||
for (index, id) in ids.into_iter().enumerate() {
|
||||
let id = id.try_into().map_err(|error| {
|
||||
@ -629,9 +629,9 @@ fn documents_by_query(
|
||||
parsed_ids.push(id)
|
||||
}
|
||||
Some(parsed_ids)
|
||||
} else {
|
||||
} _ => {
|
||||
None
|
||||
};
|
||||
}};
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let (total, documents) = retrieve_documents(
|
||||
|
@ -131,7 +131,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
|
||||
ret.total_received = 1;
|
||||
|
||||
if let Some(ref sort) = sort {
|
||||
if let Some(sort) = sort {
|
||||
ret.sort_total_number_of_criteria = 1;
|
||||
ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint("));
|
||||
ret.sort_sum_of_criteria_terms = sort.len();
|
||||
@ -139,7 +139,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
|
||||
ret.distinct = distinct.is_some();
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
if let Some(filter) = filter {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||
ret.filter_total_number_of_criteria = 1;
|
||||
|
||||
@ -168,11 +168,11 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
ret.attributes_to_search_on_total_number_of_uses = 1;
|
||||
}
|
||||
|
||||
if let Some(ref q) = q {
|
||||
if let Some(q) = q {
|
||||
ret.max_terms_number = q.split_whitespace().count();
|
||||
}
|
||||
|
||||
if let Some(ref vector) = vector {
|
||||
if let Some(vector) = vector {
|
||||
ret.max_vector_size = vector.len();
|
||||
}
|
||||
ret.retrieve_vectors |= retrieve_vectors;
|
||||
|
@ -67,7 +67,7 @@ impl<Method: AggregateMethod> SimilarAggregator<Method> {
|
||||
|
||||
ret.total_received = 1;
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
if let Some(filter) = filter {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||
ret.filter_total_number_of_criteria = 1;
|
||||
|
||||
|
@ -341,11 +341,11 @@ pub async fn get_logs(
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
if let Some(stream) = stream {
|
||||
match stream { Some(stream) => {
|
||||
Ok(HttpResponse::Ok().streaming(stream))
|
||||
} else {
|
||||
} _ => {
|
||||
Err(MeilisearchHttpError::AlreadyUsedLogRoute.into())
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
/// Stop retrieving logs
|
||||
|
@ -638,12 +638,12 @@ async fn get_task(
|
||||
let filters = index_scheduler.filters();
|
||||
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
|
||||
|
||||
if let Some(task) = tasks.first() {
|
||||
match tasks.first() { Some(task) => {
|
||||
let task_view = TaskView::from_task(task);
|
||||
Ok(HttpResponse::Ok().json(task_view))
|
||||
} else {
|
||||
} _ => {
|
||||
Err(index_scheduler::Error::TaskNotFound(task_uid).into())
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
/// Get a task's documents.
|
||||
@ -693,7 +693,7 @@ async fn get_task_documents_file(
|
||||
let filters = index_scheduler.filters();
|
||||
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
|
||||
|
||||
if let Some(task) = tasks.first() {
|
||||
match tasks.first() { Some(task) => {
|
||||
match task.content_uuid() {
|
||||
Some(uuid) => {
|
||||
let mut tfile = match index_scheduler.queue.update_file(uuid) {
|
||||
@ -711,9 +711,9 @@ async fn get_task_documents_file(
|
||||
}
|
||||
None => Err(index_scheduler::Error::TaskFileNotFound(task_uid).into()),
|
||||
}
|
||||
} else {
|
||||
} _ => {
|
||||
Err(index_scheduler::Error::TaskNotFound(task_uid).into())
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
pub enum DeserializeDateOption {
|
||||
|
@ -740,7 +740,7 @@ impl SearchByIndex {
|
||||
_ => ranking_rules::CanonicalizationKind::Placeholder,
|
||||
};
|
||||
|
||||
let sort = if let Some(sort) = &query.sort {
|
||||
let sort = match &query.sort { Some(sort) => {
|
||||
let sorts: Vec<_> =
|
||||
match sort.iter().map(|s| milli::AscDesc::from_str(s)).collect() {
|
||||
Ok(sorts) => sorts,
|
||||
@ -752,9 +752,9 @@ impl SearchByIndex {
|
||||
}
|
||||
};
|
||||
Some(sorts)
|
||||
} else {
|
||||
} _ => {
|
||||
None
|
||||
};
|
||||
}};
|
||||
|
||||
let ranking_rules = ranking_rules::RankingRules::new(
|
||||
criteria.clone(),
|
||||
|
@ -1331,15 +1331,15 @@ impl<'a> HitMaker<'a> {
|
||||
let displayed_ids =
|
||||
displayed_ids.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect());
|
||||
|
||||
let retrieve_vectors = if let RetrieveVectors::Retrieve = format.retrieve_vectors {
|
||||
let retrieve_vectors = match format.retrieve_vectors { RetrieveVectors::Retrieve => {
|
||||
if vectors_is_hidden {
|
||||
RetrieveVectors::Hide
|
||||
} else {
|
||||
RetrieveVectors::Retrieve
|
||||
}
|
||||
} else {
|
||||
} _ => {
|
||||
format.retrieve_vectors
|
||||
};
|
||||
}};
|
||||
|
||||
let fids = |attrs: &BTreeSet<String>| {
|
||||
let mut ids = BTreeSet::new();
|
||||
|
@ -94,7 +94,7 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
});
|
||||
|
||||
macro_rules! compute_authorized_search {
|
||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||
($tenant_tokens:expr_2021, $filter:expr_2021, $expected_count:expr_2021) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
@ -141,7 +141,7 @@ macro_rules! compute_authorized_search {
|
||||
}
|
||||
|
||||
macro_rules! compute_forbidden_search {
|
||||
($tenant_tokens:expr, $parent_keys:expr) => {
|
||||
($tenant_tokens:expr_2021, $parent_keys:expr_2021) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
|
@ -262,7 +262,7 @@ static BOTH_REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
});
|
||||
|
||||
macro_rules! compute_authorized_single_search {
|
||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||
($tenant_tokens:expr_2021, $filter:expr_2021, $expected_count:expr_2021) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
@ -333,7 +333,7 @@ macro_rules! compute_authorized_single_search {
|
||||
}
|
||||
|
||||
macro_rules! compute_authorized_multiple_search {
|
||||
($tenant_tokens:expr, $filter1:expr, $filter2:expr, $expected_count1:expr, $expected_count2:expr) => {
|
||||
($tenant_tokens:expr_2021, $filter1:expr_2021, $filter2:expr_2021, $expected_count1:expr_2021, $expected_count2:expr_2021) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
@ -417,7 +417,7 @@ macro_rules! compute_authorized_multiple_search {
|
||||
}
|
||||
|
||||
macro_rules! compute_forbidden_single_search {
|
||||
($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => {
|
||||
($tenant_tokens:expr_2021, $parent_keys:expr_2021, $failed_query_indexes:expr_2021) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
@ -493,7 +493,7 @@ macro_rules! compute_forbidden_single_search {
|
||||
}
|
||||
|
||||
macro_rules! compute_forbidden_multiple_search {
|
||||
($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => {
|
||||
($tenant_tokens:expr_2021, $parent_keys:expr_2021, $failed_query_indexes:expr_2021) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
|
@ -63,7 +63,7 @@ impl Encoder {
|
||||
buffer
|
||||
}
|
||||
|
||||
pub fn header(self: &Encoder) -> Option<impl TryIntoHeaderPair> {
|
||||
pub fn header(self: &Encoder) -> Option<impl TryIntoHeaderPair + use<>> {
|
||||
match self {
|
||||
Self::Plain => None,
|
||||
Self::Gzip => Some(("Content-Encoding", "gzip")),
|
||||
|
@ -25,13 +25,13 @@ pub struct Value(pub serde_json::Value);
|
||||
impl Value {
|
||||
#[track_caller]
|
||||
pub fn uid(&self) -> u64 {
|
||||
if let Some(uid) = self["uid"].as_u64() {
|
||||
match self["uid"].as_u64() { Some(uid) => {
|
||||
uid
|
||||
} else if let Some(uid) = self["taskUid"].as_u64() {
|
||||
} _ => { match self["taskUid"].as_u64() { Some(uid) => {
|
||||
uid
|
||||
} else {
|
||||
} _ => {
|
||||
panic!("Didn't find any task id in: {self}");
|
||||
}
|
||||
}}}}
|
||||
}
|
||||
|
||||
pub fn has_uid(&self) -> bool {
|
||||
@ -150,7 +150,7 @@ macro_rules! json {
|
||||
/// Performs a search test on both post and get routes
|
||||
#[macro_export]
|
||||
macro_rules! test_post_get_search {
|
||||
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => {
|
||||
($server:expr_2021, $query:expr_2021, |$response:ident, $status_code:ident | $block:expr_2021) => {
|
||||
let post_query: meilisearch::routes::search::SearchQueryPost =
|
||||
serde_json::from_str(&$query.clone().to_string()).unwrap();
|
||||
let get_query: meilisearch::routes::search::SearchQuery = post_query.into();
|
||||
|
@ -43,9 +43,11 @@ impl Server<Owned> {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
if cfg!(windows) {
|
||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
|
||||
} else {
|
||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
|
||||
}
|
||||
|
||||
let options = default_settings(dir.path());
|
||||
@ -58,9 +60,11 @@ impl Server<Owned> {
|
||||
|
||||
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
||||
if cfg!(windows) {
|
||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
|
||||
} else {
|
||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
|
||||
}
|
||||
|
||||
options.master_key = Some("MASTER_KEY".to_string());
|
||||
@ -191,9 +195,11 @@ impl Server<Shared> {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
if cfg!(windows) {
|
||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
|
||||
} else {
|
||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
|
||||
}
|
||||
|
||||
let options = default_settings(dir.path());
|
||||
@ -296,9 +302,9 @@ impl<State> Server<State> {
|
||||
&self,
|
||||
) -> impl actix_web::dev::Service<
|
||||
actix_http::Request,
|
||||
Response = ServiceResponse<impl MessageBody>,
|
||||
Response = ServiceResponse<impl MessageBody + use<State>>,
|
||||
Error = actix_web::Error,
|
||||
> {
|
||||
> + use<State> {
|
||||
self.service.init_web_app().await
|
||||
}
|
||||
|
||||
|
@ -116,9 +116,9 @@ impl Service {
|
||||
&self,
|
||||
) -> impl actix_web::dev::Service<
|
||||
actix_http::Request,
|
||||
Response = ServiceResponse<impl MessageBody>,
|
||||
Response = ServiceResponse<impl MessageBody + use<>>,
|
||||
Error = actix_web::Error,
|
||||
> {
|
||||
> + use<> {
|
||||
let (_route_layer, route_layer_handle) =
|
||||
tracing_subscriber::reload::Layer::new(None.with_filter(
|
||||
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),
|
||||
|
@ -10,10 +10,10 @@ use crate::json;
|
||||
|
||||
macro_rules! verify_snapshot {
|
||||
(
|
||||
$orig:expr,
|
||||
$snapshot: expr,
|
||||
$orig:expr_2021,
|
||||
$snapshot: expr_2021,
|
||||
|$server:ident| =>
|
||||
$($e:expr,)+) => {
|
||||
$($e:expr_2021,)+) => {
|
||||
use std::sync::Arc;
|
||||
let snapshot = Arc::new($snapshot);
|
||||
let orig = Arc::new($orig);
|
||||
|
@ -228,7 +228,7 @@ async fn list_tasks_status_and_type_filtered() {
|
||||
}
|
||||
|
||||
macro_rules! assert_valid_summarized_task {
|
||||
($response:expr, $task_type:literal, $index:literal) => {{
|
||||
($response:expr_2021, $task_type:literal, $index:literal) => {{
|
||||
assert_eq!($response.as_object().unwrap().len(), 5);
|
||||
assert!($response["taskUid"].as_u64().is_some());
|
||||
assert_eq!($response["indexUid"], $index);
|
||||
|
@ -577,14 +577,14 @@ fn export_documents(
|
||||
return Err(meilisearch_types::milli::Error::UserError(
|
||||
meilisearch_types::milli::UserError::InvalidVectorsMapType {
|
||||
document_id: {
|
||||
if let Ok(Some(Ok(index))) = index
|
||||
match index
|
||||
.external_id_of(&rtxn, std::iter::once(id))
|
||||
.map(|it| it.into_iter().next())
|
||||
{
|
||||
{ Ok(Some(Ok(index))) => {
|
||||
index
|
||||
} else {
|
||||
} _ => {
|
||||
format!("internal docid={id}")
|
||||
}
|
||||
}}
|
||||
},
|
||||
value: vectors.clone(),
|
||||
},
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "milli"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
|
@ -8,7 +8,7 @@ use crate::documents::DocumentsBatchBuilder;
|
||||
use crate::Object;
|
||||
|
||||
macro_rules! tri {
|
||||
($e:expr) => {
|
||||
($e:expr_2021) => {
|
||||
match $e {
|
||||
Ok(r) => r,
|
||||
Err(e) => return Ok(Err(e.into())),
|
||||
|
@ -301,26 +301,26 @@ impl<'a> FacetDistribution<'a> {
|
||||
let mut distribution = BTreeMap::new();
|
||||
for (fid, name) in fields_ids_map.iter() {
|
||||
if self.select_field(name, &filterable_attributes_rules) {
|
||||
let min_value = if let Some(min_value) = crate::search::facet::facet_min_value(
|
||||
let min_value = match crate::search::facet::facet_min_value(
|
||||
self.index,
|
||||
self.rtxn,
|
||||
fid,
|
||||
candidates.clone(),
|
||||
)? {
|
||||
)? { Some(min_value) => {
|
||||
min_value
|
||||
} else {
|
||||
} _ => {
|
||||
continue;
|
||||
};
|
||||
let max_value = if let Some(max_value) = crate::search::facet::facet_max_value(
|
||||
}};
|
||||
let max_value = match crate::search::facet::facet_max_value(
|
||||
self.index,
|
||||
self.rtxn,
|
||||
fid,
|
||||
candidates.clone(),
|
||||
)? {
|
||||
)? { Some(max_value) => {
|
||||
max_value
|
||||
} else {
|
||||
} _ => {
|
||||
continue;
|
||||
};
|
||||
}};
|
||||
|
||||
distribution.insert(name.to_string(), (min_value, max_value));
|
||||
}
|
||||
|
@ -37,12 +37,12 @@ where
|
||||
let mut fd = LexicographicFacetDistribution { rtxn, db, field_id, callback };
|
||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||
|
||||
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
|
||||
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
|
||||
fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
|
||||
Ok(())
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(())
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn count_iterate_over_facet_distribution<'t, CB>(
|
||||
|
@ -53,17 +53,16 @@ where
|
||||
let mut f = FacetRangeSearch { rtxn, db, field_id, left, right, universe, docids };
|
||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||
|
||||
if let Some(starting_left_bound) =
|
||||
get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?
|
||||
{
|
||||
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?
|
||||
{ Some(starting_left_bound) => {
|
||||
let rightmost_bound =
|
||||
Bound::Included(get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap()); // will not fail because get_first_facet_value succeeded
|
||||
let group_size = usize::MAX;
|
||||
f.run(highest_level, starting_left_bound, rightmost_bound, group_size)?;
|
||||
Ok(())
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(())
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
/// Fetch the document ids that have a facet with a value between the two given bounds
|
||||
|
@ -36,7 +36,7 @@ pub fn ascending_facet_sort<'t>(
|
||||
candidates: RoaringBitmap,
|
||||
) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> {
|
||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
|
||||
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
|
||||
let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
|
||||
let iter = db.range(rtxn, &(first_key..)).unwrap().take(usize::MAX);
|
||||
|
||||
@ -46,9 +46,9 @@ pub fn ascending_facet_sort<'t>(
|
||||
field_id,
|
||||
stack: vec![(candidates, iter)],
|
||||
}))
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(itertools::Either::Right(std::iter::empty()))
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
struct AscendingFacetSort<'t, 'e> {
|
||||
|
@ -19,7 +19,7 @@ pub fn descending_facet_sort<'t>(
|
||||
candidates: RoaringBitmap,
|
||||
) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> {
|
||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
|
||||
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
|
||||
let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
|
||||
let last_bound = get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap();
|
||||
let last_key = FacetGroupKey { field_id, level: highest_level, left_bound: last_bound };
|
||||
@ -30,9 +30,9 @@ pub fn descending_facet_sort<'t>(
|
||||
field_id,
|
||||
stack: vec![(candidates, iter, Bound::Included(last_bound))],
|
||||
}))
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(itertools::Either::Right(std::iter::empty()))
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
struct DescendingFacetSort<'t> {
|
||||
|
@ -23,7 +23,7 @@ fn facet_extreme_value<'t>(
|
||||
mut extreme_it: impl Iterator<Item = heed::Result<(RoaringBitmap, &'t [u8])>> + 't,
|
||||
) -> Result<Option<f64>> {
|
||||
let extreme_value =
|
||||
if let Some(extreme_value) = extreme_it.next() { extreme_value } else { return Ok(None) };
|
||||
match extreme_it.next() { Some(extreme_value) => { extreme_value } _ => { return Ok(None) }};
|
||||
let (_, extreme_value) = extreme_value?;
|
||||
OrderedF64Codec::bytes_decode(extreme_value)
|
||||
.map(Some)
|
||||
@ -67,14 +67,14 @@ where
|
||||
level0prefix.push(0);
|
||||
let mut level0_iter_forward =
|
||||
db.remap_types::<Bytes, DecodeIgnore>().prefix_iter(txn, level0prefix.as_slice())?;
|
||||
if let Some(first) = level0_iter_forward.next() {
|
||||
match level0_iter_forward.next() { Some(first) => {
|
||||
let (first_key, _) = first?;
|
||||
let first_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(first_key)
|
||||
.map_err(heed::Error::Decoding)?;
|
||||
Ok(Some(first_key.left_bound))
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(None)
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
/// Get the last facet value in the facet database
|
||||
@ -91,14 +91,14 @@ where
|
||||
level0prefix.push(0);
|
||||
let mut level0_iter_backward =
|
||||
db.remap_types::<Bytes, DecodeIgnore>().rev_prefix_iter(txn, level0prefix.as_slice())?;
|
||||
if let Some(last) = level0_iter_backward.next() {
|
||||
match level0_iter_backward.next() { Some(last) => {
|
||||
let (last_key, _) = last?;
|
||||
let last_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(last_key)
|
||||
.map_err(heed::Error::Decoding)?;
|
||||
Ok(Some(last_key.left_bound))
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(None)
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
/// Get the height of the highest level in the facet database
|
||||
|
@ -146,7 +146,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
let mut cur_offset = 0usize;
|
||||
|
||||
macro_rules! maybe_add_to_results {
|
||||
($candidates:expr) => {
|
||||
($candidates:expr_2021) => {
|
||||
maybe_add_to_results(
|
||||
ctx,
|
||||
from,
|
||||
|
@ -54,15 +54,15 @@ where
|
||||
/// Insert the given value into the dedup-interner, and return
|
||||
/// its index.
|
||||
pub fn insert(&mut self, s: T) -> Interned<T> {
|
||||
if let Some(interned) = self.lookup.get(&s) {
|
||||
match self.lookup.get(&s) { Some(interned) => {
|
||||
*interned
|
||||
} else {
|
||||
} _ => {
|
||||
assert!(self.stable_store.len() < u16::MAX as usize);
|
||||
self.stable_store.push(s.clone());
|
||||
let interned = Interned::from_raw(self.stable_store.len() as u16 - 1);
|
||||
self.lookup.insert(s, interned);
|
||||
interned
|
||||
}
|
||||
}}
|
||||
}
|
||||
/// Get a reference to the interned value.
|
||||
pub fn get(&self, interned: Interned<T>) -> &T {
|
||||
@ -117,7 +117,7 @@ impl<T> FixedSizeInterner<T> {
|
||||
pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> {
|
||||
MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData }
|
||||
}
|
||||
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> {
|
||||
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> + use<T> {
|
||||
(0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16))
|
||||
}
|
||||
pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> {
|
||||
@ -167,7 +167,7 @@ impl<T> Interner<T> {
|
||||
pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> {
|
||||
MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData }
|
||||
}
|
||||
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> {
|
||||
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> + use<T> {
|
||||
(0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16))
|
||||
}
|
||||
pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> {
|
||||
|
@ -206,11 +206,11 @@ struct DetailedLoggerFinish<'ctx> {
|
||||
|
||||
impl<'ctx> DetailedLoggerFinish<'ctx> {
|
||||
fn cur_file(&mut self) -> &mut BufWriter<File> {
|
||||
if let Some(file) = self.file_for_internal_state.as_mut() {
|
||||
match self.file_for_internal_state.as_mut() { Some(file) => {
|
||||
file
|
||||
} else {
|
||||
} _ => {
|
||||
&mut self.index_file
|
||||
}
|
||||
}}
|
||||
}
|
||||
fn pop_rr_action(&mut self) {
|
||||
self.file_for_internal_state = None;
|
||||
@ -531,11 +531,11 @@ fill: \"#B6E2D3\"
|
||||
paths: Vec<Vec<Interned<R::Condition>>>,
|
||||
) -> Result<()> {
|
||||
self.make_new_file_for_internal_state_if_needed()?;
|
||||
let file = if let Some(file) = self.file_for_internal_state.as_mut() {
|
||||
let file = match self.file_for_internal_state.as_mut() { Some(file) => {
|
||||
file
|
||||
} else {
|
||||
} _ => {
|
||||
&mut self.index_file
|
||||
};
|
||||
}};
|
||||
writeln!(file, "Path {{")?;
|
||||
for (path_idx, condition_indexes) in paths.iter().enumerate() {
|
||||
writeln!(file, "{path_idx} {{")?;
|
||||
|
@ -853,7 +853,7 @@ fn check_sort_criteria(
|
||||
let sortable_fields = ctx.index.sortable_fields(ctx.txn)?;
|
||||
for asc_desc in sort_criteria {
|
||||
match asc_desc.member() {
|
||||
Member::Field(ref field) if !crate::is_faceted(field, &sortable_fields) => {
|
||||
Member::Field(field) if !crate::is_faceted(field, &sortable_fields) => {
|
||||
let (valid_fields, hidden_fields) =
|
||||
ctx.index.remove_hidden_fields(ctx.txn, sortable_fields)?;
|
||||
|
||||
|
@ -266,11 +266,11 @@ pub fn partially_initialized_term_from_word(
|
||||
}
|
||||
|
||||
fn find_split_words(ctx: &mut SearchContext<'_>, word: &str) -> Result<Option<Interned<Phrase>>> {
|
||||
if let Some((l, r)) = split_best_frequency(ctx, word)? {
|
||||
match split_best_frequency(ctx, word)? { Some((l, r)) => {
|
||||
Ok(Some(ctx.phrase_interner.insert(Phrase { words: vec![Some(l), Some(r)] })))
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(None)
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
impl Interned<QueryTerm> {
|
||||
|
@ -110,7 +110,7 @@ impl ExactTerm {
|
||||
pub fn interned_words<'ctx>(
|
||||
&self,
|
||||
ctx: &'ctx SearchContext<'ctx>,
|
||||
) -> impl Iterator<Item = Option<Interned<String>>> + 'ctx {
|
||||
) -> impl Iterator<Item = Option<Interned<String>>> + 'ctx + use<'ctx> {
|
||||
match *self {
|
||||
ExactTerm::Phrase(phrase) => {
|
||||
let phrase = ctx.phrase_interner.get(phrase);
|
||||
|
@ -193,7 +193,7 @@ pub fn located_query_terms_from_tokens(
|
||||
|
||||
pub fn number_of_typos_allowed<'ctx>(
|
||||
ctx: &SearchContext<'ctx>,
|
||||
) -> Result<impl Fn(&str) -> u8 + 'ctx> {
|
||||
) -> Result<impl Fn(&str) -> u8 + 'ctx + use<'ctx>> {
|
||||
let authorize_typos = ctx.index.authorize_typos(ctx.txn)?;
|
||||
let min_len_one_typo = ctx.index.min_word_len_one_typo(ctx.txn)?;
|
||||
let min_len_two_typos = ctx.index.min_word_len_two_typos(ctx.txn)?;
|
||||
|
@ -77,11 +77,11 @@ pub fn compute_docids(
|
||||
if universe.is_disjoint(ctx.get_phrase_docids(left_phrase)?) {
|
||||
continue;
|
||||
}
|
||||
} else if let Some(left_word_docids) = ctx.word_docids(Some(universe), left_word)? {
|
||||
} else { match ctx.word_docids(Some(universe), left_word)? { Some(left_word_docids) => {
|
||||
if left_word_docids.is_empty() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} _ => {}}}
|
||||
}
|
||||
|
||||
for (right_word, right_phrase) in right_derivs {
|
||||
|
@ -195,15 +195,15 @@ pub fn compute_phrase_docids(
|
||||
}
|
||||
let mut candidates = None;
|
||||
for word in words.iter().flatten().copied() {
|
||||
if let Some(word_docids) = ctx.word_docids(None, Word::Original(word))? {
|
||||
match ctx.word_docids(None, Word::Original(word))? { Some(word_docids) => {
|
||||
if let Some(candidates) = candidates.as_mut() {
|
||||
*candidates &= word_docids;
|
||||
} else {
|
||||
candidates = Some(word_docids);
|
||||
}
|
||||
} else {
|
||||
} _ => {
|
||||
return Ok(RoaringBitmap::new());
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
let Some(mut candidates) = candidates else {
|
||||
|
@ -196,10 +196,10 @@ impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Sort<'ctx,
|
||||
universe: &RoaringBitmap,
|
||||
) -> Result<Option<RankingRuleOutput<Query>>> {
|
||||
let iter = self.iter.as_mut().unwrap();
|
||||
if let Some(mut bucket) = iter.next_bucket()? {
|
||||
match iter.next_bucket()? { Some(mut bucket) => {
|
||||
bucket.candidates &= universe;
|
||||
Ok(Some(bucket))
|
||||
} else {
|
||||
} _ => {
|
||||
let query = self.original_query.as_ref().unwrap().clone();
|
||||
Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
@ -211,7 +211,7 @@ impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Sort<'ctx,
|
||||
value: serde_json::Value::Null,
|
||||
}),
|
||||
}))
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search::sort")]
|
||||
|
@ -27,7 +27,7 @@ pub fn default_db_snapshot_settings_for_test(name: Option<&str>) -> (insta::Sett
|
||||
}
|
||||
#[macro_export]
|
||||
macro_rules! milli_snap {
|
||||
($value:expr, $name:expr) => {
|
||||
($value:expr_2021, $name:expr_2021) => {
|
||||
let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None);
|
||||
settings.bind(|| {
|
||||
let snap = $value;
|
||||
@ -37,7 +37,7 @@ macro_rules! milli_snap {
|
||||
}
|
||||
});
|
||||
};
|
||||
($value:expr) => {
|
||||
($value:expr_2021) => {
|
||||
let (settings, test_name) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None);
|
||||
settings.bind(|| {
|
||||
let snap = $value;
|
||||
@ -47,7 +47,7 @@ macro_rules! milli_snap {
|
||||
}
|
||||
});
|
||||
};
|
||||
($value:expr, @$inline:literal) => {
|
||||
($value:expr_2021, @$inline:literal) => {
|
||||
let (settings, test_name) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None);
|
||||
settings.bind(|| {
|
||||
let snap = $value;
|
||||
@ -61,7 +61,7 @@ macro_rules! milli_snap {
|
||||
}
|
||||
});
|
||||
};
|
||||
($value:expr, $name:expr, @$inline:literal) => {
|
||||
($value:expr_2021, $name:expr_2021, @$inline:literal) => {
|
||||
let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(None);
|
||||
settings.bind(|| {
|
||||
let snap = $value;
|
||||
@ -142,7 +142,7 @@ db_snap!(index, word_docids, "some_identifier", @"");
|
||||
*/
|
||||
#[macro_export]
|
||||
macro_rules! db_snap {
|
||||
($index:ident, $db_name:ident, $name:expr) => {
|
||||
($index:ident, $db_name:ident, $name:expr_2021) => {
|
||||
let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(Some(
|
||||
&format!("{}", $name),
|
||||
));
|
||||
@ -178,7 +178,7 @@ macro_rules! db_snap {
|
||||
}
|
||||
});
|
||||
};
|
||||
($index:ident, $db_name:ident, $name:expr, @$inline:literal) => {
|
||||
($index:ident, $db_name:ident, $name:expr_2021, @$inline:literal) => {
|
||||
let (settings, _) = $crate::snapshot_tests::default_db_snapshot_settings_for_test(Some(&format!("{}", $name)));
|
||||
settings.bind(|| {
|
||||
let snap = $crate::full_snap_of_db!($index, $db_name);
|
||||
|
@ -647,7 +647,7 @@ mod comparison_bench {
|
||||
// insert one document
|
||||
//
|
||||
for _ in 0..nbr_doc {
|
||||
index.insert(&mut txn, 0, &r.gen(), &once(1).collect());
|
||||
index.insert(&mut txn, 0, &r.r#gen(), &once(1).collect());
|
||||
}
|
||||
let time_spent = timer.elapsed().as_millis();
|
||||
println!(" add {nbr_doc} : {time_spent}ms");
|
||||
|
@ -143,11 +143,11 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
strings_key_buffer.extend_from_slice(docid_bytes);
|
||||
|
||||
// We insert the document id on the Del and the Add side if the field exists.
|
||||
let (ref mut del_exists, ref mut add_exists) =
|
||||
let (del_exists, add_exists) =
|
||||
facet_exists_docids.entry(field_id).or_default();
|
||||
let (ref mut del_is_null, ref mut add_is_null) =
|
||||
let (del_is_null, add_is_null) =
|
||||
facet_is_null_docids.entry(field_id).or_default();
|
||||
let (ref mut del_is_empty, ref mut add_is_empty) =
|
||||
let (del_is_empty, add_is_empty) =
|
||||
facet_is_empty_docids.entry(field_id).or_default();
|
||||
|
||||
if del_value.is_some() {
|
||||
|
@ -453,14 +453,14 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
|
||||
} in extractors
|
||||
{
|
||||
let remove_from_user_provided =
|
||||
if let ExtractionAction::DocumentOperation(DocumentOperation {
|
||||
match action
|
||||
{ ExtractionAction::DocumentOperation(DocumentOperation {
|
||||
remove_from_user_provided,
|
||||
}) = action
|
||||
{
|
||||
}) => {
|
||||
remove_from_user_provided
|
||||
} else {
|
||||
} _ => {
|
||||
Default::default()
|
||||
};
|
||||
}};
|
||||
|
||||
results.push(ExtractedVectorPoints {
|
||||
manual_vectors: writer_into_reader(manual_vectors_writer)?,
|
||||
@ -789,11 +789,11 @@ fn embed_chunks(
|
||||
match embedder.embed_index(text_chunks, request_threads) {
|
||||
Ok(chunks) => Ok(chunks),
|
||||
Err(error) => {
|
||||
if let FaultSource::Bug = error.fault {
|
||||
match error.fault { FaultSource::Bug => {
|
||||
Err(crate::Error::InternalError(crate::InternalError::VectorEmbeddingError(
|
||||
error.into(),
|
||||
)))
|
||||
} else {
|
||||
} _ => {
|
||||
let mut msg =
|
||||
format!(r"While embedding documents for embedder `{embedder_name}`: {error}");
|
||||
|
||||
@ -827,7 +827,7 @@ fn embed_chunks(
|
||||
}
|
||||
|
||||
Err(crate::Error::UserError(crate::UserError::DocumentEmbeddingError(msg)))
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -87,13 +87,13 @@ pub fn writer_into_reader(
|
||||
/// We use memory mapping inside. So, according to the Rust community, it's unsafe.
|
||||
pub unsafe fn as_cloneable_grenad(
|
||||
reader: &grenad::Reader<BufReader<File>>,
|
||||
) -> Result<grenad::Reader<CursorClonableMmap>> {
|
||||
) -> Result<grenad::Reader<CursorClonableMmap>> { unsafe {
|
||||
let file = reader.get_ref().get_ref();
|
||||
let mmap = memmap2::Mmap::map(file)?;
|
||||
let cursor = io::Cursor::new(ClonableMmap::from(mmap));
|
||||
let reader = grenad::Reader::new(cursor)?;
|
||||
Ok(reader)
|
||||
}
|
||||
}}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct GrenadParameters {
|
||||
|
@ -367,7 +367,7 @@ where
|
||||
|
||||
match lmdb_writer_rx.clone().recv_timeout(std::time::Duration::from_millis(500)) {
|
||||
Err(status) => {
|
||||
if let Some(typed_chunks) = chunk_accumulator.pop_longest() {
|
||||
match chunk_accumulator.pop_longest() { Some(typed_chunks) => {
|
||||
let (docids, is_merged_database) =
|
||||
write_typed_chunk_into_index(self.wtxn, self.index, &settings_diff, typed_chunks, &mut modified_docids)?;
|
||||
if !docids.is_empty() {
|
||||
@ -387,11 +387,11 @@ where
|
||||
});
|
||||
}
|
||||
// If no more chunk remains in the chunk accumulator and the channel is disconected, break.
|
||||
} else if status == crossbeam_channel::RecvTimeoutError::Disconnected {
|
||||
} _ => if status == crossbeam_channel::RecvTimeoutError::Disconnected {
|
||||
break;
|
||||
} else {
|
||||
rayon::yield_now();
|
||||
}
|
||||
}}
|
||||
}
|
||||
Ok(result) => {
|
||||
let typed_chunk = match result? {
|
||||
|
@ -91,7 +91,7 @@ fn create_fields_mapping(
|
||||
.iter()
|
||||
// we sort by id here to ensure a deterministic mapping of the fields, that preserves
|
||||
// the original ordering.
|
||||
.sorted_by_key(|(&id, _)| id)
|
||||
.sorted_by_key(|&(&id, _)| id)
|
||||
.map(|(field, name)| match index_field_map.id(name) {
|
||||
Some(id) => Ok((*field, id)),
|
||||
None => index_field_map
|
||||
|
@ -689,14 +689,14 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
.unwrap();
|
||||
|
||||
if embeddings.embedding_count() > usize::from(u8::MAX) {
|
||||
let external_docid = if let Ok(Some(Ok(index))) = index
|
||||
let external_docid = match index
|
||||
.external_id_of(wtxn, std::iter::once(docid))
|
||||
.map(|it| it.into_iter().next())
|
||||
{
|
||||
{ Ok(Some(Ok(index))) => {
|
||||
index
|
||||
} else {
|
||||
} _ => {
|
||||
format!("internal docid={docid}")
|
||||
};
|
||||
}};
|
||||
return Err(crate::Error::UserError(crate::UserError::TooManyVectors(
|
||||
external_docid,
|
||||
embeddings.embedding_count(),
|
||||
|
@ -365,11 +365,11 @@ impl<'doc> DelAddFacetValue<'doc> {
|
||||
match kind {
|
||||
FacetKind::Number => {
|
||||
let key = (fid, value);
|
||||
if let Some(DelAdd::Deletion) = self.f64s.get(&key) {
|
||||
match self.f64s.get(&key) { Some(DelAdd::Deletion) => {
|
||||
self.f64s.remove(&key);
|
||||
} else {
|
||||
} _ => {
|
||||
self.f64s.insert(key, DelAdd::Addition);
|
||||
}
|
||||
}}
|
||||
}
|
||||
FacetKind::String => {
|
||||
if let Ok(s) = std::str::from_utf8(&value) {
|
||||
@ -386,11 +386,11 @@ impl<'doc> DelAddFacetValue<'doc> {
|
||||
match kind {
|
||||
FacetKind::Number => {
|
||||
let key = (fid, value);
|
||||
if let Some(DelAdd::Addition) = self.f64s.get(&key) {
|
||||
match self.f64s.get(&key) { Some(DelAdd::Addition) => {
|
||||
self.f64s.remove(&key);
|
||||
} else {
|
||||
} _ => {
|
||||
self.f64s.insert(key, DelAdd::Deletion);
|
||||
}
|
||||
}}
|
||||
}
|
||||
FacetKind::String => {
|
||||
if let Ok(s) = std::str::from_utf8(&value) {
|
||||
|
@ -95,7 +95,7 @@ pub struct FrozenGeoExtractorData<'extractor> {
|
||||
impl FrozenGeoExtractorData<'_> {
|
||||
pub fn iter_and_clear_removed(
|
||||
&mut self,
|
||||
) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_> {
|
||||
) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_ + use<'_>> {
|
||||
Ok(mem::take(&mut self.removed)
|
||||
.iter()
|
||||
.copied()
|
||||
@ -105,7 +105,7 @@ impl FrozenGeoExtractorData<'_> {
|
||||
|
||||
pub fn iter_and_clear_inserted(
|
||||
&mut self,
|
||||
) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_> {
|
||||
) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_ + use<'_>> {
|
||||
Ok(mem::take(&mut self.inserted)
|
||||
.iter()
|
||||
.copied()
|
||||
|
@ -111,9 +111,9 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
let prompt = chunks.prompt();
|
||||
|
||||
let old_vectors = old_vectors.vectors_for_key(embedder_name)?.unwrap();
|
||||
if let Some(new_vectors) = new_vectors.as_ref().and_then(|new_vectors| {
|
||||
match new_vectors.as_ref().and_then(|new_vectors| {
|
||||
new_vectors.vectors_for_key(embedder_name).transpose()
|
||||
}) {
|
||||
}) { Some(new_vectors) => {
|
||||
let new_vectors = new_vectors?;
|
||||
if old_vectors.regenerate != new_vectors.regenerate {
|
||||
chunks.set_regenerate(update.docid(), new_vectors.regenerate);
|
||||
@ -159,7 +159,7 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
)?;
|
||||
}
|
||||
}
|
||||
} else if old_vectors.regenerate {
|
||||
} _ => if old_vectors.regenerate {
|
||||
let old_rendered = prompt.render_document(
|
||||
update.external_document_id(),
|
||||
update.current(
|
||||
@ -188,7 +188,7 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
&unused_vectors_distribution,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
DocumentChange::Insertion(insertion) => {
|
||||
@ -202,9 +202,9 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
let embedder_name = chunks.embedder_name();
|
||||
let prompt = chunks.prompt();
|
||||
// if no inserted vectors, then regenerate: true + no embeddings => autogenerate
|
||||
if let Some(new_vectors) = new_vectors.as_ref().and_then(|new_vectors| {
|
||||
match new_vectors.as_ref().and_then(|new_vectors| {
|
||||
new_vectors.vectors_for_key(embedder_name).transpose()
|
||||
}) {
|
||||
}) { Some(new_vectors) => {
|
||||
let new_vectors = new_vectors?;
|
||||
chunks.set_regenerate(insertion.docid(), new_vectors.regenerate);
|
||||
if let Some(embeddings) = new_vectors.embeddings {
|
||||
@ -233,7 +233,7 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
&unused_vectors_distribution,
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
} _ => {
|
||||
let rendered = prompt.render_document(
|
||||
insertion.external_document_id(),
|
||||
insertion.inserted(),
|
||||
@ -246,7 +246,7 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
rendered,
|
||||
&unused_vectors_distribution,
|
||||
)?;
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -424,11 +424,11 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
Ok(())
|
||||
}
|
||||
Err(error) => {
|
||||
if let FaultSource::Bug = error.fault {
|
||||
match error.fault { FaultSource::Bug => {
|
||||
Err(crate::Error::InternalError(crate::InternalError::VectorEmbeddingError(
|
||||
error.into(),
|
||||
)))
|
||||
} else {
|
||||
} _ => {
|
||||
let mut msg = format!(
|
||||
r"While embedding documents for embedder `{embedder_name}`: {error}"
|
||||
);
|
||||
@ -463,7 +463,7 @@ impl<'a, 'b, 'extractor> Chunks<'a, 'b, 'extractor> {
|
||||
}
|
||||
|
||||
Err(crate::Error::UserError(crate::UserError::DocumentEmbeddingError(msg)))
|
||||
}
|
||||
}}
|
||||
}
|
||||
};
|
||||
texts.clear();
|
||||
|
@ -19,7 +19,7 @@ pub fn retrieve_or_guess_primary_key<'a>(
|
||||
// make sure that we have a declared primary key, either fetching it from the index or attempting to guess it.
|
||||
|
||||
// do we have an existing declared primary key?
|
||||
let (primary_key, has_changed) = if let Some(primary_key_from_db) = index.primary_key(rtxn)? {
|
||||
let (primary_key, has_changed) = match index.primary_key(rtxn)? { Some(primary_key_from_db) => {
|
||||
// did we request a primary key in the operation?
|
||||
match primary_key_from_op {
|
||||
// we did, and it is different from the DB one
|
||||
@ -30,7 +30,7 @@ pub fn retrieve_or_guess_primary_key<'a>(
|
||||
}
|
||||
_ => (primary_key_from_db, false),
|
||||
}
|
||||
} else {
|
||||
} _ => {
|
||||
// no primary key in the DB => let's set one
|
||||
// did we request a primary key in the operation?
|
||||
let primary_key = if let Some(primary_key_from_op) = primary_key_from_op {
|
||||
@ -76,7 +76,7 @@ pub fn retrieve_or_guess_primary_key<'a>(
|
||||
}
|
||||
};
|
||||
(primary_key, true)
|
||||
};
|
||||
}};
|
||||
|
||||
match PrimaryKey::new_or_insert(primary_key, new_fields_ids_map) {
|
||||
Ok(primary_key) => Ok(Ok((primary_key, has_changed))),
|
||||
|
@ -95,16 +95,16 @@ fn compute_word_fst(index: &Index, wtxn: &mut RwTxn) -> Result<Option<PrefixDelt
|
||||
|
||||
let (word_fst_mmap, prefix_data) = word_fst_builder.build(index, &rtxn)?;
|
||||
index.main.remap_types::<Str, Bytes>().put(wtxn, WORDS_FST_KEY, &word_fst_mmap)?;
|
||||
if let Some(PrefixData { prefixes_fst_mmap, prefix_delta }) = prefix_data {
|
||||
match prefix_data { Some(PrefixData { prefixes_fst_mmap, prefix_delta }) => {
|
||||
index.main.remap_types::<Str, Bytes>().put(
|
||||
wtxn,
|
||||
WORDS_PREFIXES_FST_KEY,
|
||||
&prefixes_fst_mmap,
|
||||
)?;
|
||||
Ok(Some(prefix_delta))
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(None)
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facet_search")]
|
||||
|
@ -233,13 +233,13 @@ impl<'doc> VectorDocumentFromVersions<'doc> {
|
||||
embedders: &'doc EmbeddingConfigs,
|
||||
) -> Result<Option<Self>> {
|
||||
let document = DocumentFromVersions::new(versions);
|
||||
if let Some(vectors_field) = document.vectors_field()? {
|
||||
match document.vectors_field()? { Some(vectors_field) => {
|
||||
let vectors = RawMap::from_raw_value_and_hasher(vectors_field, FxBuildHasher, bump)
|
||||
.map_err(UserError::SerdeJson)?;
|
||||
Ok(Some(Self { external_document_id, vectors, embedders }))
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(None)
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -36,11 +36,11 @@ impl<'a> WordFstBuilder<'a> {
|
||||
}
|
||||
|
||||
self.word_fst_builder.register(deladd, right, &mut |bytes, deladd, is_modified| {
|
||||
if let Some(prefix_fst_builder) = &mut self.prefix_fst_builder {
|
||||
match &mut self.prefix_fst_builder { Some(prefix_fst_builder) => {
|
||||
prefix_fst_builder.insert_word(bytes, deladd, is_modified)
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(())
|
||||
}
|
||||
}}
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
@ -52,11 +52,11 @@ impl<'a> WordFstBuilder<'a> {
|
||||
rtxn: &heed::RoTxn,
|
||||
) -> Result<(Mmap, Option<PrefixData>)> {
|
||||
let words_fst_mmap = self.word_fst_builder.build(&mut |bytes, deladd, is_modified| {
|
||||
if let Some(prefix_fst_builder) = &mut self.prefix_fst_builder {
|
||||
match &mut self.prefix_fst_builder { Some(prefix_fst_builder) => {
|
||||
prefix_fst_builder.insert_word(bytes, deladd, is_modified)
|
||||
} else {
|
||||
} _ => {
|
||||
Ok(())
|
||||
}
|
||||
}}
|
||||
})?;
|
||||
|
||||
let prefix_data = self
|
||||
|
@ -1401,18 +1401,18 @@ impl InnerIndexSettingsDiff {
|
||||
pub fn reindex_searchable_id(&self, id: FieldId) -> Option<DelAddOperation> {
|
||||
if self.cache_reindex_searchable_without_user_defined || self.cache_exact_attributes {
|
||||
Some(DelAddOperation::DeletionAndAddition)
|
||||
} else if let Some(only_additional_fields) = &self.only_additional_fields {
|
||||
} else { match &self.only_additional_fields { Some(only_additional_fields) => {
|
||||
let additional_field = self.new.fields_ids_map.name(id).unwrap();
|
||||
if only_additional_fields.contains(additional_field) {
|
||||
Some(DelAddOperation::Addition)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else if self.cache_user_defined_searchables {
|
||||
} _ => if self.cache_user_defined_searchables {
|
||||
Some(DelAddOperation::DeletionAndAddition)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}}}
|
||||
}
|
||||
|
||||
/// List the faceted fields from the inner fid map.
|
||||
@ -1848,14 +1848,14 @@ pub fn validate_embedding_settings(
|
||||
}
|
||||
}
|
||||
|
||||
indexing_embedder = if let Setting::Set(mut embedder) = indexing_embedder {
|
||||
indexing_embedder = match indexing_embedder { Setting::Set(mut embedder) => {
|
||||
embedder.document_template = validate_prompt(
|
||||
name,
|
||||
embedder.document_template,
|
||||
embedder.document_template_max_bytes,
|
||||
)?;
|
||||
|
||||
if let Some(source) = embedder.source.set() {
|
||||
match embedder.source.set() { Some(source) => {
|
||||
let search_embedder = match embedder.search_embedder.clone() {
|
||||
Setting::Set(search_embedder) => Setting::Set(deserialize_sub_embedder(
|
||||
search_embedder,
|
||||
@ -1895,16 +1895,16 @@ pub fn validate_embedding_settings(
|
||||
&embedder.binary_quantized,
|
||||
&embedder.distribution,
|
||||
)?;
|
||||
} else {
|
||||
} _ => {
|
||||
return Err(UserError::MissingSourceForNested {
|
||||
embedder_name: NestingContext::Indexing.embedder_name_with_context(name),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}}
|
||||
Setting::Set(embedder)
|
||||
} else {
|
||||
} _ => {
|
||||
indexing_embedder
|
||||
};
|
||||
}};
|
||||
}
|
||||
}
|
||||
Ok(Setting::Set(EmbeddingSettings {
|
||||
|
@ -239,15 +239,15 @@ impl Embedder {
|
||||
|
||||
let model = BertModel::load(vb, &config).map_err(NewEmbedderError::load_model)?;
|
||||
|
||||
if let Some(pp) = tokenizer.get_padding_mut() {
|
||||
match tokenizer.get_padding_mut() { Some(pp) => {
|
||||
pp.strategy = tokenizers::PaddingStrategy::BatchLongest
|
||||
} else {
|
||||
} _ => {
|
||||
let pp = PaddingParams {
|
||||
strategy: tokenizers::PaddingStrategy::BatchLongest,
|
||||
..Default::default()
|
||||
};
|
||||
tokenizer.with_padding(Some(pp));
|
||||
}
|
||||
}}
|
||||
|
||||
let mut this = Self {
|
||||
model,
|
||||
|
@ -348,11 +348,11 @@ impl ArroyWrapper {
|
||||
searcher.candidates(filter);
|
||||
}
|
||||
|
||||
if let Some(mut ret) = searcher.by_item(rtxn, item)? {
|
||||
match searcher.by_item(rtxn, item)? { Some(mut ret) => {
|
||||
results.append(&mut ret);
|
||||
} else {
|
||||
} _ => {
|
||||
break;
|
||||
}
|
||||
}}
|
||||
}
|
||||
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));
|
||||
Ok(results)
|
||||
@ -402,19 +402,19 @@ impl ArroyWrapper {
|
||||
|
||||
if self.quantized {
|
||||
for reader in self.readers(rtxn, self.quantized_db()) {
|
||||
if let Some(vec) = reader?.item_vector(rtxn, item_id)? {
|
||||
match reader?.item_vector(rtxn, item_id)? { Some(vec) => {
|
||||
vectors.push(vec);
|
||||
} else {
|
||||
} _ => {
|
||||
break;
|
||||
}
|
||||
}}
|
||||
}
|
||||
} else {
|
||||
for reader in self.readers(rtxn, self.angular_db()) {
|
||||
if let Some(vec) = reader?.item_vector(rtxn, item_id)? {
|
||||
match reader?.item_vector(rtxn, item_id)? { Some(vec) => {
|
||||
vectors.push(vec);
|
||||
} else {
|
||||
} _ => {
|
||||
break;
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
Ok(vectors)
|
||||
|
@ -150,11 +150,11 @@ impl Embedder {
|
||||
headers: options.headers,
|
||||
};
|
||||
|
||||
let dimensions = if let Some(dimensions) = options.dimensions {
|
||||
let dimensions = match options.dimensions { Some(dimensions) => {
|
||||
dimensions
|
||||
} else {
|
||||
} _ => {
|
||||
infer_dimensions(&data)?
|
||||
};
|
||||
}};
|
||||
|
||||
Ok(Self {
|
||||
data,
|
||||
|
@ -8,7 +8,7 @@ use Criterion::*;
|
||||
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
|
||||
|
||||
macro_rules! test_distinct {
|
||||
($func:ident, $distinct:ident, $exhaustive:ident, $limit:expr, $offset:expr, $criteria:expr, $n_res:expr) => {
|
||||
($func:ident, $distinct:ident, $exhaustive:ident, $limit:expr, $offset:expr_2021, $criteria:expr_2021, $n_res:expr_2021) => {
|
||||
#[test]
|
||||
fn $func() {
|
||||
let criteria = $criteria;
|
||||
|
@ -5,7 +5,7 @@ use Criterion::*;
|
||||
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
|
||||
|
||||
macro_rules! test_filter {
|
||||
($func:ident, $filter:expr) => {
|
||||
($func:ident, $filter:expr_2021) => {
|
||||
#[test]
|
||||
fn $func() {
|
||||
let criteria = vec![Words, Typo, Proximity, Attribute, Exactness];
|
||||
|
@ -220,11 +220,11 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
|
||||
id = Some(document.id.clone())
|
||||
}
|
||||
} else if let Some(("asc_desc_rank", filter)) = filter.split_once('<') {
|
||||
if document.asc_desc_rank < filter.parse().unwrap() {
|
||||
if document.asc_desc_rank < filter.parse::<u32>().unwrap() {
|
||||
id = Some(document.id.clone())
|
||||
}
|
||||
} else if let Some(("asc_desc_rank", filter)) = filter.split_once('>') {
|
||||
if document.asc_desc_rank > filter.parse().unwrap() {
|
||||
if document.asc_desc_rank > filter.parse::<u32>().unwrap() {
|
||||
id = Some(document.id.clone())
|
||||
}
|
||||
} else if filter.starts_with("_geoRadius") {
|
||||
|
@ -20,7 +20,7 @@ const DISALLOW_OPTIONAL_WORDS: TermsMatchingStrategy = TermsMatchingStrategy::Al
|
||||
const ASC_DESC_CANDIDATES_THRESHOLD: usize = 1000;
|
||||
|
||||
macro_rules! test_criterion {
|
||||
($func:ident, $optional_word:ident, $criteria:expr, $sort_criteria:expr) => {
|
||||
($func:ident, $optional_word:ident, $criteria:expr_2021, $sort_criteria:expr_2021) => {
|
||||
#[test]
|
||||
fn $func() {
|
||||
let criteria = $criteria;
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "tracing-trace"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user