Merge branch 'main' into release-v1.14.0-tmp

This commit is contained in:
Tamo 2025-04-14 12:35:47 +02:00 committed by GitHub
commit b025f1bcf1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
113 changed files with 1268 additions and 852 deletions

View file

@ -108,7 +108,7 @@ where
/// not supported on untagged enums.
struct StarOrVisitor<T>(PhantomData<T>);
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
where
T: FromStr<Err = FE>,
FE: Display,

View file

@ -99,7 +99,7 @@ impl Task {
/// Return true when a task is finished.
/// A task is finished when its last state is either `Succeeded` or `Failed`.
pub fn is_finished(&self) -> bool {
self.events.last().map_or(false, |event| {
self.events.last().is_some_and(|event| {
matches!(event, TaskEvent::Succeded { .. } | TaskEvent::Failed { .. })
})
}

View file

@ -108,7 +108,7 @@ where
/// not supported on untagged enums.
struct StarOrVisitor<T>(PhantomData<T>);
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
where
T: FromStr<Err = FE>,
FE: Display,

View file

@ -114,7 +114,7 @@ impl Task {
/// Return true when a task is finished.
/// A task is finished when its last state is either `Succeeded` or `Failed`.
pub fn is_finished(&self) -> bool {
self.events.last().map_or(false, |event| {
self.events.last().is_some_and(|event| {
matches!(event, TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. })
})
}

View file

@ -35,7 +35,7 @@ impl<E> NomErrorExt<E> for nom::Err<E> {
pub fn cut_with_err<'a, O>(
mut parser: impl FnMut(Span<'a>) -> IResult<'a, O>,
mut with: impl FnMut(Error<'a>) -> Error<'a>,
) -> impl FnMut(Span<'a>) -> IResult<O> {
) -> impl FnMut(Span<'a>) -> IResult<'a, O> {
move |input| match parser.parse(input) {
Err(nom::Err::Error(e)) => Err(nom::Err::Failure(with(e))),
rest => rest,
@ -121,7 +121,7 @@ impl<'a> ParseError<Span<'a>> for Error<'a> {
}
}
impl<'a> Display for Error<'a> {
impl Display for Error<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let input = self.context.fragment();
// When printing our error message we want to escape all `\n` to be sure we keep our format with the

View file

@ -80,7 +80,7 @@ pub struct Token<'a> {
value: Option<String>,
}
impl<'a> PartialEq for Token<'a> {
impl PartialEq for Token<'_> {
fn eq(&self, other: &Self) -> bool {
self.span.fragment() == other.span.fragment()
}
@ -226,7 +226,7 @@ impl<'a> FilterCondition<'a> {
}
}
pub fn parse(input: &'a str) -> Result<Option<Self>, Error> {
pub fn parse(input: &'a str) -> Result<Option<Self>, Error<'a>> {
if input.trim().is_empty() {
return Ok(None);
}
@ -527,7 +527,7 @@ pub fn parse_filter(input: Span) -> IResult<FilterCondition> {
terminated(|input| parse_expression(input, 0), eof)(input)
}
impl<'a> std::fmt::Display for FilterCondition<'a> {
impl std::fmt::Display for FilterCondition<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FilterCondition::Not(filter) => {
@ -576,7 +576,8 @@ impl<'a> std::fmt::Display for FilterCondition<'a> {
}
}
}
impl<'a> std::fmt::Display for Condition<'a> {
impl std::fmt::Display for Condition<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Condition::GreaterThan(token) => write!(f, "> {token}"),
@ -594,7 +595,8 @@ impl<'a> std::fmt::Display for Condition<'a> {
}
}
}
impl<'a> std::fmt::Display for Token<'a> {
impl std::fmt::Display for Token<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{{{}}}", self.value())
}

View file

@ -52,7 +52,7 @@ fn quoted_by(quote: char, input: Span) -> IResult<Token> {
}
// word = (alphanumeric | _ | - | .)+ except for reserved keywords
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<Token<'a>> {
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<'a, Token<'a>> {
let (input, word): (_, Token<'a>) =
take_while1(is_value_component)(input).map(|(s, t)| (s, t.into()))?;
if is_keyword(word.value()) {

View file

@ -696,7 +696,7 @@ impl IndexScheduler {
written: usize,
}
impl<'a, 'b> Read for TaskReader<'a, 'b> {
impl Read for TaskReader<'_, '_> {
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
if self.buffer.is_empty() {
match self.tasks.next() {

View file

@ -315,7 +315,7 @@ impl Queue {
if let Some(batch_uids) = batch_uids {
let mut batch_tasks = RoaringBitmap::new();
for batch_uid in batch_uids {
if processing_batch.as_ref().map_or(false, |batch| batch.uid == *batch_uid) {
if processing_batch.as_ref().is_some_and(|batch| batch.uid == *batch_uid) {
batch_tasks |= &**processing_tasks;
} else {
batch_tasks |= self.tasks_in_batch(rtxn, *batch_uid)?;

View file

@ -219,7 +219,7 @@ impl BatchKind {
primary_key.is_some() &&
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
// 2.1.2 If the task don't have a primary-key -> we can continue
kind.primary_key().map_or(true, |pk| pk == primary_key)
kind.primary_key().is_none_or(|pk| pk == primary_key)
) ||
// 2.2 If we don't have a primary-key ->
(

View file

@ -960,7 +960,7 @@ impl<'de> Deserialize<'de> for RankingRuleView {
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
impl serde::de::Visitor<'_> for Visitor {
type Value = RankingRuleView;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "the name of a valid ranking rule (string)")

View file

@ -66,7 +66,7 @@ where
/// not supported on untagged enums.
struct StarOrVisitor<T>(PhantomData<T>);
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
where
T: FromStr<Err = FE>,
FE: fmt::Display,

View file

@ -346,7 +346,7 @@ fn open_or_create_database_unchecked(
match (
index_scheduler_builder(),
auth_controller.map_err(anyhow::Error::from),
create_current_version_file(&opt.db_path).map_err(anyhow::Error::from),
create_current_version_file(&opt.db_path),
) {
(Ok(i), Ok(a), Ok(())) => Ok((i, a)),
(Err(e), _, _) | (_, Err(e), _) | (_, _, Err(e)) => {

View file

@ -69,7 +69,7 @@ fn setup(opt: &Opt) -> anyhow::Result<(LogRouteHandle, LogStderrHandle)> {
Ok((route_layer_handle, stderr_layer_handle))
}
fn on_panic(info: &std::panic::PanicInfo) {
fn on_panic(info: &std::panic::PanicHookInfo) {
let info = info.to_string().replace('\n', " ");
tracing::error!(%info);
}

View file

@ -16,7 +16,7 @@ use meilisearch_types::milli::update::IndexerConfig;
use meilisearch_types::milli::ThreadPoolNoAbortBuilder;
use rustls::server::{ServerSessionMemoryCache, WebPkiClientVerifier};
use rustls::RootCertStore;
use rustls_pemfile::{certs, rsa_private_keys};
use rustls_pemfile::{certs, ec_private_keys, rsa_private_keys};
use serde::{Deserialize, Serialize};
use sysinfo::{MemoryRefreshKind, RefreshKind, System};
use url::Url;
@ -874,7 +874,7 @@ fn load_private_key(
filename: PathBuf,
) -> anyhow::Result<rustls::pki_types::PrivateKeyDer<'static>> {
let rsa_keys = {
let keyfile = fs::File::open(filename.clone())
let keyfile = fs::File::open(&filename)
.map_err(|_| anyhow::anyhow!("cannot open private key file"))?;
let mut reader = BufReader::new(keyfile);
rsa_private_keys(&mut reader)
@ -883,7 +883,7 @@ fn load_private_key(
};
let pkcs8_keys = {
let keyfile = fs::File::open(filename)
let keyfile = fs::File::open(&filename)
.map_err(|_| anyhow::anyhow!("cannot open private key file"))?;
let mut reader = BufReader::new(keyfile);
rustls_pemfile::pkcs8_private_keys(&mut reader).collect::<Result<Vec<_>, _>>().map_err(
@ -895,12 +895,23 @@ fn load_private_key(
)?
};
let ec_keys = {
let keyfile = fs::File::open(&filename)
.map_err(|_| anyhow::anyhow!("cannot open private key file"))?;
let mut reader = BufReader::new(keyfile);
ec_private_keys(&mut reader)
.collect::<Result<Vec<_>, _>>()
.map_err(|_| anyhow::anyhow!("file contains invalid ec private key"))?
};
// prefer to load pkcs8 keys
if !pkcs8_keys.is_empty() {
Ok(rustls::pki_types::PrivateKeyDer::Pkcs8(pkcs8_keys[0].clone_key()))
} else {
assert!(!rsa_keys.is_empty());
} else if !rsa_keys.is_empty() {
Ok(rustls::pki_types::PrivateKeyDer::Pkcs1(rsa_keys[0].clone_key()))
} else {
assert!(!ec_keys.is_empty());
Ok(rustls::pki_types::PrivateKeyDer::Sec1(ec_keys[0].clone_key()))
}
}
@ -929,7 +940,6 @@ where
}
/// Functions used to get default value for `Opt` fields, needs to be function because of serde's default attribute.
fn default_db_path() -> PathBuf {
PathBuf::from(DEFAULT_DB_PATH)
}
@ -1037,7 +1047,7 @@ where
{
struct BoolOrInt;
impl<'de> serde::de::Visitor<'de> for BoolOrInt {
impl serde::de::Visitor<'_> for BoolOrInt {
type Value = ScheduleSnapshot;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {

View file

@ -302,7 +302,7 @@ impl From<FacetSearchQuery> for SearchQuery {
// If exhaustive_facet_count is true, we need to set the page to 0
// because the facet search is not exhaustive by default.
let page = if exhaustive_facet_count.map_or(false, |exhaustive| exhaustive) {
let page = if exhaustive_facet_count.is_some_and(|exhaustive| exhaustive) {
// setting the page to 0 will force the search to be exhaustive when computing the number of hits,
// but it will skip the bucket sort saving time.
Some(0)

View file

@ -64,6 +64,8 @@ mod open_api_utils;
mod snapshot;
mod swap_indexes;
pub mod tasks;
#[cfg(test)]
mod tasks_test;
#[derive(OpenApi)]
#[openapi(
@ -168,7 +170,7 @@ pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
})
})
.transpose()?
.map_or(false, |s| s.to_lowercase() == "true"))
.is_some_and(|s| s.to_lowercase() == "true"))
}
#[derive(Debug, Serialize, ToSchema)]

View file

@ -119,10 +119,22 @@ pub struct Network {
impl Remote {
pub fn try_into_db_node(self, name: &str) -> Result<DbRemote, ResponseError> {
Ok(DbRemote {
url: self.url.set().ok_or(ResponseError::from_msg(
format!("Missing field `.remotes.{name}.url`"),
meilisearch_types::error::Code::MissingNetworkUrl,
))?,
url: self
.url
.set()
.ok_or(ResponseError::from_msg(
format!("Missing field `.remotes.{name}.url`"),
meilisearch_types::error::Code::MissingNetworkUrl,
))
.and_then(|url| {
if let Err(error) = url::Url::parse(&url) {
return Err(ResponseError::from_msg(
format!("Invalid `.remotes.{name}.url` (`{url}`): {error}"),
meilisearch_types::error::Code::InvalidNetworkUrl,
));
}
Ok(url)
})?,
search_api_key: self.search_api_key.set(),
})
}
@ -211,7 +223,15 @@ async fn patch_network(
let merged = DbRemote {
url: match new_url {
Setting::Set(new_url) => new_url,
Setting::Set(new_url) => {
if let Err(error) = url::Url::parse(&new_url) {
return Err(ResponseError::from_msg(
format!("Invalid `.remotes.{key}.url` (`{new_url}`): {error}"),
meilisearch_types::error::Code::InvalidNetworkUrl,
));
}
new_url
}
Setting::Reset => {
return Err(ResponseError::from_msg(
format!(

View file

@ -146,7 +146,7 @@ impl TasksFilterQuery {
}
impl TaskDeletionOrCancelationQuery {
fn is_empty(&self) -> bool {
pub fn is_empty(&self) -> bool {
matches!(
self,
TaskDeletionOrCancelationQuery {
@ -760,356 +760,3 @@ pub fn deserialize_date_before(
) -> std::result::Result<OptionStarOr<OffsetDateTime>, InvalidTaskDateError> {
value.try_map(|x| deserialize_date(&x, DeserializeDateOption::Before))
}
#[cfg(test)]
mod tests {
use deserr::Deserr;
use meili_snap::snapshot;
use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::{Code, ResponseError};
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
where
T: Deserr<DeserrQueryParamError>,
{
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
match deserr::deserialize::<_, _, DeserrQueryParamError>(value) {
Ok(data) => Ok(data),
Err(e) => Err(ResponseError::from(e)),
}
}
#[test]
fn deserialize_task_filter_dates() {
{
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.after_started_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_started_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.after_finished_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_finished_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
}
{
let params =
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 -06:20:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 +00:00:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.2000003 +00:00:00)");
}
{
// Stars are allowed in date fields as well
let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
}
{
let params = "afterFinishedAt=2021";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at"
}
"###);
}
{
let params = "beforeFinishedAt=2021";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at"
}
"###);
}
{
let params = "afterEnqueuedAt=2021-12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at"
}
"###);
}
{
let params = "beforeEnqueuedAt=2021-12-03T23";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at"
}
"###);
}
{
let params = "afterStartedAt=2021-12-03T23:45";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at"
}
"###);
}
{
let params = "beforeStartedAt=2021-12-03T23:45";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at"
}
"###);
}
}
#[test]
fn deserialize_task_filter_uids() {
{
let params = "uids=78,1,12,73";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.uids), @"List([78, 1, 12, 73])");
}
{
let params = "uids=1";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.uids), @"List([1])");
}
{
let params = "uids=cat,*,dog";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
{
let params = "uids=78,hello,world";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
{
let params = "uids=cat";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
}
#[test]
fn deserialize_task_filter_status() {
{
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.statuses), @"List([Succeeded, Failed, Enqueued, Processing, Canceled])");
}
{
let params = "statuses=enqueued";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.statuses), @"List([Enqueued])");
}
{
let params = "statuses=finished";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
"code": "invalid_task_statuses",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_statuses"
}
"###);
}
}
#[test]
fn deserialize_task_filter_types() {
{
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.types), @"List([DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation])");
}
{
let params = "types=settingsUpdate";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.types), @"List([SettingsUpdate])");
}
{
let params = "types=createIndex";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r#"
{
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
}
"#);
}
}
#[test]
fn deserialize_task_filter_index_uids() {
{
let params = "indexUids=toto,tata-78";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("toto"), IndexUid("tata-78")])"###);
}
{
let params = "indexUids=index_a";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("index_a")])"###);
}
{
let params = "indexUids=1,hé";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
{
let params = "indexUids=hé";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
}
#[test]
fn deserialize_task_filter_general() {
{
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
}
{
// Stars should translate to `None` in the query
// Verify value of the default limit
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Stars should also translate to `None` in task deletion/cancelation queries
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Star in from not allowed
let params = "uids=*&from=*";
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `from`: could not parse `*` as a positive integer",
"code": "invalid_task_from",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_from"
}
"###);
}
{
// From not allowed in task deletion/cancelation queries
let params = "from=12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `from`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
}
{
// Limit not allowed in task deletion/cancelation queries
let params = "limit=12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `limit`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
}
}
#[test]
fn deserialize_task_delete_or_cancel_empty() {
{
let params = "";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(query.is_empty());
}
{
let params = "statuses=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(!query.is_empty());
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
}
}

View file

@ -0,0 +1,352 @@
#[cfg(test)]
mod tests {
use deserr::Deserr;
use meili_snap::snapshot;
use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::{Code, ResponseError};
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
where
T: Deserr<DeserrQueryParamError>,
{
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
match deserr::deserialize::<_, _, DeserrQueryParamError>(value) {
Ok(data) => Ok(data),
Err(e) => Err(ResponseError::from(e)),
}
}
#[test]
fn deserialize_task_filter_dates() {
{
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.after_started_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_started_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.after_finished_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_finished_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
}
{
let params =
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 -06:20:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 +00:00:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.2000003 +00:00:00)");
}
{
// Stars are allowed in date fields as well
let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
}
{
let params = "afterFinishedAt=2021";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at"
}
"###);
}
{
let params = "beforeFinishedAt=2021";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at"
}
"###);
}
{
let params = "afterEnqueuedAt=2021-12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at"
}
"###);
}
{
let params = "beforeEnqueuedAt=2021-12-03T23";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at"
}
"###);
}
{
let params = "afterStartedAt=2021-12-03T23:45";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at"
}
"###);
}
{
let params = "beforeStartedAt=2021-12-03T23:45";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at"
}
"###);
}
}
#[test]
fn deserialize_task_filter_uids() {
{
let params = "uids=78,1,12,73";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.uids), @"List([78, 1, 12, 73])");
}
{
let params = "uids=1";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.uids), @"List([1])");
}
{
let params = "uids=cat,*,dog";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
{
let params = "uids=78,hello,world";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
{
let params = "uids=cat";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
}
#[test]
fn deserialize_task_filter_status() {
{
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.statuses), @"List([Succeeded, Failed, Enqueued, Processing, Canceled])");
}
{
let params = "statuses=enqueued";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.statuses), @"List([Enqueued])");
}
{
let params = "statuses=finished";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
"code": "invalid_task_statuses",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_statuses"
}
"###);
}
}
#[test]
fn deserialize_task_filter_types() {
{
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.types), @"List([DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation])");
}
{
let params = "types=settingsUpdate";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.types), @"List([SettingsUpdate])");
}
{
let params = "types=createIndex";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r#"
{
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
}
"#);
}
}
#[test]
fn deserialize_task_filter_index_uids() {
{
let params = "indexUids=toto,tata-78";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("toto"), IndexUid("tata-78")])"###);
}
{
let params = "indexUids=index_a";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("index_a")])"###);
}
{
let params = "indexUids=1,hé";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
{
let params = "indexUids=hé";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
}
#[test]
fn deserialize_task_filter_general() {
{
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
}
{
// Stars should translate to `None` in the query
// Verify value of the default limit
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Stars should also translate to `None` in task deletion/cancelation queries
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Star in from not allowed
let params = "uids=*&from=*";
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `from`: could not parse `*` as a positive integer",
"code": "invalid_task_from",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_from"
}
"###);
}
{
// From not allowed in task deletion/cancelation queries
let params = "from=12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `from`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
}
{
// Limit not allowed in task deletion/cancelation queries
let params = "limit=12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `limit`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
}
}
#[test]
fn deserialize_task_delete_or_cancel_empty() {
{
let params = "";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(query.is_empty());
}
{
let params = "statuses=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(!query.is_empty());
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
}
}

View file

@ -32,7 +32,6 @@ pub const FEDERATION_REMOTE: &str = "remote";
#[derive(Debug, Default, Clone, PartialEq, Serialize, deserr::Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct FederationOptions {
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchWeight>)]
#[schema(value_type = f64)]

View file

@ -1544,7 +1544,7 @@ pub fn perform_facet_search(
let locales = localized_attributes_locales.map(|attr| {
attr.locales
.into_iter()
.filter(|locale| locales.as_ref().map_or(true, |locales| locales.contains(locale)))
.filter(|locale| locales.as_ref().is_none_or(|locales| locales.contains(locale)))
.collect()
});

View file

@ -259,7 +259,7 @@ impl<'a> Index<'a, Owned> {
}
}
impl<'a> Index<'a, Shared> {
impl Index<'_, Shared> {
/// You cannot modify the content of a shared index, thus the delete_document_by_filter call
/// must fail. If the task successfully enqueue itself, we'll wait for the task to finishes,
/// and if it succeed the function will panic.

View file

@ -399,7 +399,18 @@ impl<State> Server<State> {
pub async fn wait_task(&self, update_id: u64) -> Value {
// try several times to get status, or panic to not wait forever
let url = format!("/tasks/{}", update_id);
for _ in 0..100 {
// Increase timeout for vector-related tests
let max_attempts = if url.contains("/tasks/") {
if update_id > 1000 {
400 // 200 seconds for vector tests
} else {
100 // 50 seconds for other tests
}
} else {
100 // 50 seconds for other tests
};
for _ in 0..max_attempts {
let (response, status_code) = self.service.get(&url).await;
assert_eq!(200, status_code, "response: {}", response);

View file

@ -117,6 +117,25 @@ async fn errors_on_param() {
}
"###);
// remote with url not valid
let (response, code) = server
.set_network(json!({"remotes": {
"new": {
"url": "no-http-scheme"
}
}}))
.await;
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Invalid `.remotes.new.url` (`no-http-scheme`): relative URL without a base",
"code": "invalid_network_url",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_network_url"
}
"###);
// remote with non-existing param
let (response, code) = server
.set_network(json!({"remotes": {

View file

@ -432,7 +432,7 @@ async fn search_non_filterable_facets() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute pattern is `title`.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -443,7 +443,7 @@ async fn search_non_filterable_facets() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute pattern is `title`.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -463,7 +463,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute patterns are `genres, title`.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `genres, title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -474,7 +474,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute patterns are `genres, title`.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `genres, title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -493,7 +493,7 @@ async fn search_non_filterable_facets_no_filterable() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, this index does not have configured filterable attributes.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -504,7 +504,7 @@ async fn search_non_filterable_facets_no_filterable() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, this index does not have configured filterable attributes.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -524,7 +524,7 @@ async fn search_non_filterable_facets_multiple_facets() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attributes `doggo, neko` are not filterable. The available filterable attribute patterns are `genres, title`.",
"message": "Invalid facet distribution: Attributes `doggo, neko` are not filterable. Available filterable attributes patterns are: `genres, title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -535,7 +535,7 @@ async fn search_non_filterable_facets_multiple_facets() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attributes `doggo, neko` are not filterable. The available filterable attribute patterns are `genres, title`.",
"message": "Invalid facet distribution: Attributes `doggo, neko` are not filterable. Available filterable attributes patterns are: `genres, title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -884,14 +884,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;
@ -910,14 +910,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;
@ -931,14 +931,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;
@ -957,14 +957,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;
@ -983,14 +983,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;

View file

@ -559,7 +559,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "genres", "facetQuery": "a"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
},
)
.await;
@ -570,7 +570,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "genres", "facetQuery": "a"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
},
).await;
@ -580,7 +580,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "genres", "facetQuery": "a"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
},
).await;
@ -601,7 +601,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "doggos.name", "facetQuery": "b"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching doggos.name with facetSearch: true before rule #0""###);
},
).await;
@ -611,7 +611,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "doggos.name", "facetQuery": "b"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching doggos.name with facetSearch: true before rule #0""###);
},
).await;
}

View file

@ -335,7 +335,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -481,7 +481,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -613,7 +613,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"

View file

@ -74,7 +74,7 @@ async fn formatted_contain_wildcard() {
allow_duplicates! {
assert_json_snapshot!(response["hits"][0],
{ "._rankingScore" => "[score]" },
@r###"
@r#"
{
"_formatted": {
"id": "852",
@ -84,12 +84,12 @@ async fn formatted_contain_wildcard() {
"cattos": [
{
"start": 0,
"length": 5
"length": 6
}
]
}
}
"###);
"#);
}
}
)
@ -119,7 +119,7 @@ async fn formatted_contain_wildcard() {
allow_duplicates! {
assert_json_snapshot!(response["hits"][0],
{ "._rankingScore" => "[score]" },
@r###"
@r#"
{
"id": 852,
"cattos": "pésti",
@ -131,12 +131,12 @@ async fn formatted_contain_wildcard() {
"cattos": [
{
"start": 0,
"length": 5
"length": 6
}
]
}
}
"###)
"#)
}
})
.await;

View file

@ -914,7 +914,7 @@ async fn search_one_query_error() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Inside `.queries[0]`: Invalid facet distribution, this index does not have configured filterable attributes.",
"message": "Inside `.queries[0]`: Invalid facet distribution: Attribute `title` is not filterable. This index does not have configured filterable attributes.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -1010,7 +1010,7 @@ async fn search_multiple_query_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Inside `.queries[0]`: Invalid facet distribution, this index does not have configured filterable attributes.",
"message": "Inside `.queries[0]`: Invalid facet distribution: Attribute `title` is not filterable. This index does not have configured filterable attributes.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@ -3647,7 +3647,7 @@ async fn federation_non_faceted_for_an_index() {
snapshot!(code, @"400 Bad Request");
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
{
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution, attribute `name` is not filterable. The available filterable attribute patterns are `BOOST, id`.\n - Note: index `fruits-no-name` used in `.queries[1]`",
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution: Attribute `name` is not filterable. Available filterable attributes patterns are: `BOOST, id`.\n - Note: index `fruits-no-name` used in `.queries[1]`",
"code": "invalid_multi_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
@ -3669,7 +3669,7 @@ async fn federation_non_faceted_for_an_index() {
snapshot!(code, @"400 Bad Request");
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
{
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution, attribute `name` is not filterable. The available filterable attribute patterns are `BOOST, id`.\n - Note: index `fruits-no-name` is not used in queries",
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution: Attribute `name` is not filterable. Available filterable attributes patterns are: `BOOST, id`.\n - Note: index `fruits-no-name` is not used in queries",
"code": "invalid_multi_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
@ -3690,14 +3690,14 @@ async fn federation_non_faceted_for_an_index() {
]}))
.await;
snapshot!(code, @"400 Bad Request");
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r#"
{
"message": "Inside `.federation.facetsByIndex.fruits-no-facets`: Invalid facet distribution, this index does not have configured filterable attributes.\n - Note: index `fruits-no-facets` is not used in queries",
"message": "Inside `.federation.facetsByIndex.fruits-no-facets`: Invalid facet distribution: Attributes `BOOST, id` are not filterable. This index does not have configured filterable attributes.\n - Note: index `fruits-no-facets` is not used in queries",
"code": "invalid_multi_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
}
"###);
"#);
// also fails
let (response, code) = server

View file

@ -1213,7 +1213,7 @@ async fn error_bad_request_facets_by_index_facet() {
},
"remoteErrors": {
"ms1": {
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution, this index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution: Attribute `id` is not filterable. This index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
"code": "remote_bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#remote_bad_request"
@ -1374,7 +1374,7 @@ async fn error_remote_does_not_answer() {
"###);
let (response, _status_code) = ms1.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
{
"hits": [
{
@ -1421,7 +1421,7 @@ async fn error_remote_does_not_answer() {
}
}
}
"###);
"#);
}
#[actix_rt::test]

View file

@ -15,33 +15,36 @@ macro_rules! parameter_test {
}
}))
.await;
$server.wait_task(response.uid()).await.succeeded();
$server.wait_task(response.uid()).await.succeeded();
let mut value = base_for_source(source);
value[param] = valid_parameter(source, param).0;
let (response, code) = index
.update_settings(crate::json!({
"embedders": {
"test": value
}
}))
.await;
snapshot!(code, name: concat!(stringify!($source), "-", stringify!($param), "-sending_code"));
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]", ".taskUid" => "[taskUid]"}), name: concat!(stringify!($source), "-", stringify!($param), "-sending_result"));
// Add a small delay between API calls
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
if response.has_uid() {
let response = $server.wait_task(response.uid()).await;
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]",
".uid" => "[uid]", ".batchUid" => "[batchUid]",
".duration" => "[duration]",
".startedAt" => "[startedAt]",
".finishedAt" => "[finishedAt]"}), name: concat!(stringify!($source), "-", stringify!($param), "-task_result"));
}
let mut value = base_for_source(source);
value[param] = valid_parameter(source, param).0;
let (response, code) = index
.update_settings(crate::json!({
"embedders": {
"test": value
}
}))
.await;
snapshot!(code, name: concat!(stringify!($source), "-", stringify!($param), "-sending_code"));
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]", ".taskUid" => "[taskUid]"}), name: concat!(stringify!($source), "-", stringify!($param), "-sending_result"));
if response.has_uid() {
let response = $server.wait_task(response.uid()).await;
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]",
".uid" => "[uid]", ".batchUid" => "[batchUid]",
".duration" => "[duration]",
".startedAt" => "[startedAt]",
".finishedAt" => "[finishedAt]"}), name: concat!(stringify!($source), "-", stringify!($param), "-task_result"));
}
};
}
#[actix_rt::test]
#[ignore = "Test is failing with timeout issues"]
async fn bad_parameters() {
let server = Server::new().await;
@ -128,6 +131,7 @@ async fn bad_parameters() {
}
#[actix_rt::test]
#[ignore = "Test is failing with timeout issues"]
async fn bad_parameters_2() {
let server = Server::new().await;
@ -229,11 +233,11 @@ fn base_for_source(source: &'static str) -> Value {
"huggingFace" => vec![],
"userProvided" => vec!["dimensions"],
"ollama" => vec!["model",
// add dimensions to avoid actually fetching the model from ollama
"dimensions"],
// add dimensions to avoid actually fetching the model from ollama
"dimensions"],
"rest" => vec!["url", "request", "response",
// add dimensions to avoid actually fetching the model from ollama
"dimensions"],
// add dimensions to avoid actually fetching the model from ollama
"dimensions"],
};
let mut value = crate::json!({
@ -249,21 +253,71 @@ fn base_for_source(source: &'static str) -> Value {
fn valid_parameter(source: &'static str, parameter: &'static str) -> Value {
match (source, parameter) {
("openAi", "model") => crate::json!("text-embedding-3-small"),
("huggingFace", "model") => crate::json!("sentence-transformers/all-MiniLM-L6-v2"),
(_, "model") => crate::json!("all-minilm"),
(_, "revision") => crate::json!("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"),
(_, "pooling") => crate::json!("forceMean"),
(_, "apiKey") => crate::json!("foo"),
(_, "dimensions") => crate::json!(768),
(_, "binaryQuantized") => crate::json!(false),
(_, "documentTemplate") => crate::json!("toto"),
(_, "documentTemplateMaxBytes") => crate::json!(200),
(_, "url") => crate::json!("http://rest.example/"),
(_, "request") => crate::json!({"text": "{{text}}"}),
(_, "response") => crate::json!({"embedding": "{{embedding}}"}),
(_, "headers") => crate::json!({"custom": "value"}),
(_, "distribution") => crate::json!({"mean": 0.4, "sigma": 0.1}),
_ => panic!("unknown parameter"),
("openAi", "model") => crate::json!("text-embedding-ada-002"),
("openAi", "revision") => crate::json!("2023-05-15"),
("openAi", "pooling") => crate::json!("mean"),
("openAi", "apiKey") => crate::json!("test"),
("openAi", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("openAi", "binaryQuantized") => crate::json!(false),
("openAi", "documentTemplate") => crate::json!("test"),
("openAi", "documentTemplateMaxBytes") => crate::json!(100),
("openAi", "url") => crate::json!("http://test"),
("openAi", "request") => crate::json!({ "test": "test" }),
("openAi", "response") => crate::json!({ "test": "test" }),
("openAi", "headers") => crate::json!({ "test": "test" }),
("openAi", "distribution") => crate::json!("normal"),
("huggingFace", "model") => crate::json!("test"),
("huggingFace", "revision") => crate::json!("test"),
("huggingFace", "pooling") => crate::json!("mean"),
("huggingFace", "apiKey") => crate::json!("test"),
("huggingFace", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("huggingFace", "binaryQuantized") => crate::json!(false),
("huggingFace", "documentTemplate") => crate::json!("test"),
("huggingFace", "documentTemplateMaxBytes") => crate::json!(100),
("huggingFace", "url") => crate::json!("http://test"),
("huggingFace", "request") => crate::json!({ "test": "test" }),
("huggingFace", "response") => crate::json!({ "test": "test" }),
("huggingFace", "headers") => crate::json!({ "test": "test" }),
("huggingFace", "distribution") => crate::json!("normal"),
("userProvided", "model") => crate::json!("test"),
("userProvided", "revision") => crate::json!("test"),
("userProvided", "pooling") => crate::json!("mean"),
("userProvided", "apiKey") => crate::json!("test"),
("userProvided", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("userProvided", "binaryQuantized") => crate::json!(false),
("userProvided", "documentTemplate") => crate::json!("test"),
("userProvided", "documentTemplateMaxBytes") => crate::json!(100),
("userProvided", "url") => crate::json!("http://test"),
("userProvided", "request") => crate::json!({ "test": "test" }),
("userProvided", "response") => crate::json!({ "test": "test" }),
("userProvided", "headers") => crate::json!({ "test": "test" }),
("userProvided", "distribution") => crate::json!("normal"),
("ollama", "model") => crate::json!("test"),
("ollama", "revision") => crate::json!("test"),
("ollama", "pooling") => crate::json!("mean"),
("ollama", "apiKey") => crate::json!("test"),
("ollama", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("ollama", "binaryQuantized") => crate::json!(false),
("ollama", "documentTemplate") => crate::json!("test"),
("ollama", "documentTemplateMaxBytes") => crate::json!(100),
("ollama", "url") => crate::json!("http://test"),
("ollama", "request") => crate::json!({ "test": "test" }),
("ollama", "response") => crate::json!({ "test": "test" }),
("ollama", "headers") => crate::json!({ "test": "test" }),
("ollama", "distribution") => crate::json!("normal"),
("rest", "model") => crate::json!("test"),
("rest", "revision") => crate::json!("test"),
("rest", "pooling") => crate::json!("mean"),
("rest", "apiKey") => crate::json!("test"),
("rest", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("rest", "binaryQuantized") => crate::json!(false),
("rest", "documentTemplate") => crate::json!("test"),
("rest", "documentTemplateMaxBytes") => crate::json!(100),
("rest", "url") => crate::json!("http://test"),
("rest", "request") => crate::json!({ "test": "test" }),
("rest", "response") => crate::json!({ "test": "test" }),
("rest", "headers") => crate::json!({ "test": "test" }),
("rest", "distribution") => crate::json!("normal"),
_ => panic!("Invalid parameter {} for source {}", parameter, source),
}
}

View file

@ -100,7 +100,7 @@ async fn add_remove_user_provided() {
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
snapshot!(json_string!(documents), @r#"
{
"results": [
{
@ -134,7 +134,7 @@ async fn add_remove_user_provided() {
"limit": 20,
"total": 2
}
"###);
"#);
let (value, code) = index.delete_document(0).await;
snapshot!(code, @"202 Accepted");
@ -143,7 +143,7 @@ async fn add_remove_user_provided() {
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
snapshot!(json_string!(documents), @r#"
{
"results": [
{
@ -161,6 +161,97 @@ async fn add_remove_user_provided() {
"limit": 20,
"total": 1
}
"#);
}
#[actix_rt::test]
async fn user_provide_mismatched_embedding_dimension() {
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [0, 0] }},
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let task = index.wait_task(value.uid()).await;
snapshot!(task, @r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "doggo",
"status": "failed",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 0
},
"error": {
"message": "Index `doggo`: Invalid vector dimensions: expected: `3`, found: `2`.",
"code": "invalid_vector_dimensions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_vector_dimensions"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"#);
// FIXME: /!\ Case where number of embeddings is divisor of `dimensions` would still pass
let new_document = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [[0, 0], [1, 1], [2, 2]] }},
]);
let (response, code) = index.add_documents(new_document, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(response.uid()).await.succeeded();
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
{
"results": [
{
"id": 0,
"name": "kefir",
"_vectors": {
"manual": {
"embeddings": [
[
0.0,
0.0,
1.0
],
[
1.0,
2.0,
2.0
]
],
"regenerate": false
}
}
}
],
"offset": 0,
"limit": 20,
"total": 1
}
"###);
}
@ -759,7 +850,7 @@ async fn add_remove_one_vector_4588() {
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
snapshot!(json_string!(documents), @r#"
{
"results": [
{
@ -777,5 +868,5 @@ async fn add_remove_one_vector_4588() {
"limit": 20,
"total": 1
}
"###);
"#);
}

View file

@ -271,7 +271,7 @@ fn fetch_matching_values_in_object(
}
fn starts_with(selector: &str, key: &str) -> bool {
selector.strip_prefix(key).map_or(false, |tail| {
selector.strip_prefix(key).is_some_and(|tail| {
tail.chars().next().map(|c| c == PRIMARY_KEY_SPLIT_SYMBOL).unwrap_or(true)
})
}

View file

@ -27,7 +27,7 @@ impl<'a, W> DocumentVisitor<'a, W> {
}
}
impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
impl<'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'_, W> {
/// This Visitor value is nothing, since it write the value to a file.
type Value = Result<(), Error>;
@ -61,7 +61,7 @@ impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
}
}
impl<'a, 'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'a, W>
impl<'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'_, W>
where
W: Write,
{

View file

@ -1,4 +1,5 @@
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::convert::Infallible;
use std::fmt::Write;
use std::{io, str};
@ -120,13 +121,37 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco
and can not be more than 511 bytes.", .document_id.to_string()
)]
InvalidDocumentId { document_id: Value },
#[error("Invalid facet distribution, {}", format_invalid_filter_distribution(.invalid_facets_name, .valid_patterns))]
#[error("Invalid facet distribution: {}",
if .invalid_facets_name.len() == 1 {
let field = .invalid_facets_name.iter().next().unwrap();
match .matching_rule_indices.get(field) {
Some(rule_index) => format!("Attribute `{}` matched rule #{} in filterableAttributes, but this rule does not enable filtering.\nHint: enable filtering in rule #{} by modifying the features.filter object\nHint: prepend another rule matching `{}` with appropriate filter features before rule #{}",
field, rule_index, rule_index, field, rule_index),
None => match .valid_patterns.is_empty() {
true => format!("Attribute `{}` is not filterable. This index does not have configured filterable attributes.", field),
false => format!("Attribute `{}` is not filterable. Available filterable attributes patterns are: `{}`.",
field,
.valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")),
}
}
} else {
format!("Attributes `{}` are not filterable. {}",
.invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
match .valid_patterns.is_empty() {
true => "This index does not have configured filterable attributes.".to_string(),
false => format!("Available filterable attributes patterns are: `{}`.",
.valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")),
}
)
}
)]
InvalidFacetsDistribution {
invalid_facets_name: BTreeSet<String>,
valid_patterns: BTreeSet<String>,
matching_rule_indices: HashMap<String, usize>,
},
#[error(transparent)]
InvalidGeoField(#[from] GeoError),
InvalidGeoField(#[from] Box<GeoError>),
#[error("Invalid vector dimensions: expected: `{}`, found: `{}`.", .expected, .found)]
InvalidVectorDimensions { expected: usize, found: usize },
#[error("Invalid vector dimensions in document with id `{document_id}` in `._vectors.{embedder_name}`.\n - note: embedding #{embedding_index} has dimensions {found}\n - note: embedder `{embedder_name}` requires {expected}")]
@ -145,7 +170,12 @@ and can not be more than 511 bytes.", .document_id.to_string()
InvalidFilter(String),
#[error("Invalid type for filter subexpression: expected: {}, found: {}.", .0.join(", "), .1)]
InvalidFilterExpression(&'static [&'static str], Value),
#[error("Filter operator `{operator}` is not allowed for the attribute `{field}`.\n - Note: allowed operators: {}.\n - Note: field `{field}` {} in `filterableAttributes`", allowed_operators.join(", "), format!("matched rule #{rule_index}"))]
#[error("Filter operator `{operator}` is not allowed for the attribute `{field}`.\n - Note: allowed operators: {}.\n - Note: field `{field}` matched rule #{rule_index} in `filterableAttributes`\n - Hint: enable {} in rule #{rule_index} by modifying the features.filter object\n - Hint: prepend another rule matching `{field}` with appropriate filter features before rule #{rule_index}",
allowed_operators.join(", "),
if operator == "=" || operator == "!=" || operator == "IN" {"equality"}
else if operator == "<" || operator == ">" || operator == "<=" || operator == ">=" || operator == "TO" {"comparison"}
else {"the appropriate filter operators"}
)]
FilterOperatorNotAllowed {
field: String,
allowed_operators: Vec<String>,
@ -165,33 +195,51 @@ and can not be more than 511 bytes.", .document_id.to_string()
InvalidSortableAttribute { field: String, valid_fields: BTreeSet<String>, hidden_fields: bool },
#[error("Attribute `{}` is not filterable and thus, cannot be used as distinct attribute. {}",
.field,
match .valid_patterns.is_empty() {
true => "This index does not have configured filterable attributes.".to_string(),
false => format!("Available filterable attributes patterns are: `{}{}`.",
match (.valid_patterns.is_empty(), .matching_rule_index) {
// No rules match and no filterable attributes
(true, None) => "This index does not have configured filterable attributes.".to_string(),
// No rules match but there are some filterable attributes
(false, None) => format!("Available filterable attributes patterns are: `{}{}`.",
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
.hidden_fields.then_some(", <..hidden-attributes>").unwrap_or(""),
),
// A rule matched but filtering isn't enabled
(_, Some(rule_index)) => format!("Note: this attribute matches rule #{} in filterableAttributes, but this rule does not enable filtering.\nHint: enable filtering in rule #{} by adding appropriate filter features.\nHint: prepend another rule matching {} with filter features before rule #{}",
rule_index, rule_index, .field, rule_index
),
}
)]
InvalidDistinctAttribute {
field: String,
valid_patterns: BTreeSet<String>,
hidden_fields: bool,
matching_rule_index: Option<usize>,
},
#[error("Attribute `{}` is not facet-searchable. {}",
.field,
match .valid_patterns.is_empty() {
true => "This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.".to_string(),
false => format!("Available facet-searchable attributes patterns are: `{}{}`. To make it facet-searchable add it to the `filterableAttributes` index settings.",
match (.valid_patterns.is_empty(), .matching_rule_index) {
// No rules match and no facet searchable attributes
(true, None) => "This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.".to_string(),
// No rules match but there are some facet searchable attributes
(false, None) => format!("Available facet-searchable attributes patterns are: `{}{}`. To make it facet-searchable add it to the `filterableAttributes` index settings.",
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
.hidden_fields.then_some(", <..hidden-attributes>").unwrap_or(""),
),
// A rule matched but facet search isn't enabled
(_, Some(rule_index)) => format!("Note: this attribute matches rule #{} in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #{} by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching {} with facetSearch: true before rule #{}",
rule_index, rule_index, .field, rule_index
),
}
)]
InvalidFacetSearchFacetName {
field: String,
valid_patterns: BTreeSet<String>,
hidden_fields: bool,
matching_rule_index: Option<usize>,
},
#[error("Attribute `{}` is not searchable. Available searchable attributes are: `{}{}`.",
.field,
@ -396,45 +444,53 @@ pub enum GeoError {
BadLongitude { document_id: Value, value: Value },
}
#[allow(dead_code)]
fn format_invalid_filter_distribution(
invalid_facets_name: &BTreeSet<String>,
valid_patterns: &BTreeSet<String>,
) -> String {
if valid_patterns.is_empty() {
return "this index does not have configured filterable attributes.".into();
}
let mut result = String::new();
match invalid_facets_name.len() {
0 => (),
1 => write!(
result,
"attribute `{}` is not filterable.",
invalid_facets_name.first().unwrap()
)
.unwrap(),
_ => write!(
result,
"attributes `{}` are not filterable.",
invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
)
.unwrap(),
};
if invalid_facets_name.is_empty() {
if valid_patterns.is_empty() {
return "this index does not have configured filterable attributes.".into();
}
} else {
match invalid_facets_name.len() {
1 => write!(
result,
"Attribute `{}` is not filterable.",
invalid_facets_name.first().unwrap()
)
.unwrap(),
_ => write!(
result,
"Attributes `{}` are not filterable.",
invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
)
.unwrap(),
};
}
match valid_patterns.len() {
1 => write!(
result,
" The available filterable attribute pattern is `{}`.",
valid_patterns.first().unwrap()
)
.unwrap(),
_ => write!(
result,
" The available filterable attribute patterns are `{}`.",
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
)
.unwrap(),
if valid_patterns.is_empty() {
if !invalid_facets_name.is_empty() {
write!(result, " This index does not have configured filterable attributes.").unwrap();
}
} else {
match valid_patterns.len() {
1 => write!(
result,
" Available filterable attributes patterns are: `{}`.",
valid_patterns.first().unwrap()
)
.unwrap(),
_ => write!(
result,
" Available filterable attributes patterns are: `{}`.",
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
)
.unwrap(),
}
}
result
@ -446,7 +502,7 @@ fn format_invalid_filter_distribution(
/// ```ignore
/// impl From<FieldIdMapMissingEntry> for Error {
/// fn from(error: FieldIdMapMissingEntry) -> Error {
/// Error::from(InternalError::from(error))
/// Error::from(<InternalError>::from(error))
/// }
/// }
/// ```
@ -471,7 +527,7 @@ error_from_sub_error! {
str::Utf8Error => InternalError,
ThreadPoolBuildError => InternalError,
SerializationError => InternalError,
GeoError => UserError,
Box<GeoError> => UserError,
CriterionError => UserError,
}

View file

@ -25,7 +25,7 @@ impl ExternalDocumentsIds {
/// Returns `true` if hard and soft external documents lists are empty.
pub fn is_empty(&self, rtxn: &RoTxn<'_>) -> heed::Result<bool> {
self.0.is_empty(rtxn).map_err(Into::into)
self.0.is_empty(rtxn)
}
pub fn get<A: AsRef<str>>(

View file

@ -119,7 +119,7 @@ impl<'indexing> GlobalFieldsIdsMap<'indexing> {
}
}
impl<'indexing> MutFieldIdMapper for GlobalFieldsIdsMap<'indexing> {
impl MutFieldIdMapper for GlobalFieldsIdsMap<'_> {
fn insert(&mut self, name: &str) -> Option<FieldId> {
self.id_or_insert(name)
}

View file

@ -3039,10 +3039,15 @@ pub(crate) mod tests {
documents!({ "id" : 6, RESERVED_GEO_FIELD_NAME: {"lat": "unparseable", "lng": "unparseable"}}),
)
.unwrap_err();
assert!(matches!(
err1,
Error::UserError(UserError::InvalidGeoField(GeoError::BadLatitudeAndLongitude { .. }))
));
match err1 {
Error::UserError(UserError::InvalidGeoField(err)) => match *err {
GeoError::BadLatitudeAndLongitude { .. } => (),
otherwise => {
panic!("err1 is not a BadLatitudeAndLongitude error but rather a {otherwise:?}")
}
},
_ => panic!("err1 is not a BadLatitudeAndLongitude error but rather a {err1:?}"),
}
db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted
}

View file

@ -204,7 +204,7 @@ pub fn relative_from_absolute_position(absolute: Position) -> (FieldId, Relative
// Compute the absolute word position with the field id of the attribute and relative position in the attribute.
pub fn absolute_from_relative_position(field_id: FieldId, relative: RelativePosition) -> Position {
(field_id as u32) << 16 | (relative as u32)
((field_id as u32) << 16) | (relative as u32)
}
// TODO: this is wrong, but will do for now
/// Compute the "bucketed" absolute position from the field id and relative position in the field.
@ -372,7 +372,7 @@ pub fn is_faceted(field: &str, faceted_fields: impl IntoIterator<Item = impl AsR
/// assert!(!is_faceted_by("animaux.chien", "animaux.chie"));
/// ```
pub fn is_faceted_by(field: &str, facet: &str) -> bool {
field.starts_with(facet) && field[facet.len()..].chars().next().map_or(true, |c| c == '.')
field.starts_with(facet) && field[facet.len()..].chars().next().is_none_or(|c| c == '.')
}
pub fn normalize_facet(original: &str) -> String {

View file

@ -15,7 +15,7 @@ impl<'a, D: ObjectView, F: ArrayView> Context<'a, D, F> {
}
}
impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
impl<D: ObjectView, F: ArrayView> ObjectView for Context<'_, D, F> {
fn as_value(&self) -> &dyn ValueView {
self
}
@ -52,7 +52,7 @@ impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
}
}
impl<'a, D: ObjectView, F: ArrayView> ValueView for Context<'a, D, F> {
impl<D: ObjectView, F: ArrayView> ValueView for Context<'_, D, F> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}

View file

@ -67,7 +67,7 @@ impl<'a> Document<'a> {
}
}
impl<'a> ObjectView for Document<'a> {
impl ObjectView for Document<'_> {
fn as_value(&self) -> &dyn ValueView {
self
}
@ -98,7 +98,7 @@ impl<'a> ObjectView for Document<'a> {
}
}
impl<'a> ValueView for Document<'a> {
impl ValueView for Document<'_> {
fn as_debug(&self) -> &dyn Debug {
self
}
@ -283,7 +283,7 @@ impl<'doc> ParseableArray<'doc> {
}
}
impl<'doc> ArrayView for ParseableArray<'doc> {
impl ArrayView for ParseableArray<'_> {
fn as_value(&self) -> &dyn ValueView {
self
}
@ -311,7 +311,7 @@ impl<'doc> ArrayView for ParseableArray<'doc> {
}
}
impl<'doc> ValueView for ParseableArray<'doc> {
impl ValueView for ParseableArray<'_> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@ -353,7 +353,7 @@ impl<'doc> ValueView for ParseableArray<'doc> {
}
}
impl<'doc> ObjectView for ParseableMap<'doc> {
impl ObjectView for ParseableMap<'_> {
fn as_value(&self) -> &dyn ValueView {
self
}
@ -392,7 +392,7 @@ impl<'doc> ObjectView for ParseableMap<'doc> {
}
}
impl<'doc> ValueView for ParseableMap<'doc> {
impl ValueView for ParseableMap<'_> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@ -441,7 +441,7 @@ impl<'doc> ValueView for ParseableMap<'doc> {
}
}
impl<'doc> ValueView for ParseableValue<'doc> {
impl ValueView for ParseableValue<'_> {
fn as_debug(&self) -> &dyn Debug {
self
}
@ -622,7 +622,7 @@ struct ArraySource<'s, 'doc> {
s: &'s RawVec<'doc>,
}
impl<'s, 'doc> fmt::Display for ArraySource<'s, 'doc> {
impl fmt::Display for ArraySource<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[")?;
for item in self.s {
@ -638,7 +638,7 @@ struct ArrayRender<'s, 'doc> {
s: &'s RawVec<'doc>,
}
impl<'s, 'doc> fmt::Display for ArrayRender<'s, 'doc> {
impl fmt::Display for ArrayRender<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for item in self.s {
let v = ParseableValue::new(item, self.s.bump());

View file

@ -17,7 +17,7 @@ pub struct FieldValue<'a, D: ObjectView> {
metadata: Metadata,
}
impl<'a, D: ObjectView> ValueView for FieldValue<'a, D> {
impl<D: ObjectView> ValueView for FieldValue<'_, D> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@ -78,7 +78,7 @@ impl<'a, D: ObjectView> FieldValue<'a, D> {
}
}
impl<'a, D: ObjectView> ObjectView for FieldValue<'a, D> {
impl<D: ObjectView> ObjectView for FieldValue<'_, D> {
fn as_value(&self) -> &dyn ValueView {
self
}
@ -148,7 +148,7 @@ impl<'a, 'map, D: ObjectView> BorrowedFields<'a, 'map, D> {
}
}
impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
impl<D: ObjectView> ArrayView for OwnedFields<'_, D> {
fn as_value(&self) -> &dyn ValueView {
self.0.as_value()
}
@ -170,7 +170,7 @@ impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
}
}
impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
impl<D: ObjectView> ArrayView for BorrowedFields<'_, '_, D> {
fn as_value(&self) -> &dyn ValueView {
self
}
@ -212,7 +212,7 @@ impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
}
}
impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
impl<D: ObjectView> ValueView for BorrowedFields<'_, '_, D> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@ -254,7 +254,7 @@ impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
}
}
impl<'a, D: ObjectView> ValueView for OwnedFields<'a, D> {
impl<D: ObjectView> ValueView for OwnedFields<'_, D> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@ -292,7 +292,7 @@ struct ArraySource<'a, 'map, D: ObjectView> {
s: &'a BorrowedFields<'a, 'map, D>,
}
impl<'a, 'map, D: ObjectView> fmt::Display for ArraySource<'a, 'map, D> {
impl<D: ObjectView> fmt::Display for ArraySource<'_, '_, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[")?;
for item in self.s.values() {
@ -307,7 +307,7 @@ struct ArrayRender<'a, 'map, D: ObjectView> {
s: &'a BorrowedFields<'a, 'map, D>,
}
impl<'a, 'map, D: ObjectView> fmt::Display for ArrayRender<'a, 'map, D> {
impl<D: ObjectView> fmt::Display for ArrayRender<'_, '_, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for item in self.s.values() {
write!(f, "{}", item.render())?;

View file

@ -358,7 +358,7 @@ impl<'a> FacetDistribution<'a> {
) -> bool {
// If the field is not filterable, we don't want to compute the facet distribution.
if !matching_features(name, filterable_attributes_rules)
.map_or(false, |(_, features)| features.is_filterable())
.is_some_and(|(_, features)| features.is_filterable())
{
return false;
}
@ -378,13 +378,21 @@ impl<'a> FacetDistribution<'a> {
filterable_attributes_rules: &[FilterableAttributesRule],
) -> Result<()> {
let mut invalid_facets = BTreeSet::new();
let mut matching_rule_indices = HashMap::new();
if let Some(facets) = &self.facets {
for field in facets.keys() {
let is_valid_filterable_field =
matching_features(field, filterable_attributes_rules)
.map_or(false, |(_, features)| features.is_filterable());
if !is_valid_filterable_field {
let matched_rule = matching_features(field, filterable_attributes_rules);
let is_filterable = matched_rule.is_some_and(|(_, f)| f.is_filterable());
if !is_filterable {
invalid_facets.insert(field.to_string());
// If the field matched a rule but that rule doesn't enable filtering,
// store the rule index for better error messages
if let Some((rule_index, _)) = matched_rule {
matching_rule_indices.insert(field.to_string(), rule_index);
}
}
}
}
@ -400,6 +408,7 @@ impl<'a> FacetDistribution<'a> {
return Err(Error::UserError(UserError::InvalidFacetsDistribution {
invalid_facets_name: invalid_facets,
valid_patterns,
matching_rule_indices,
}));
}

View file

@ -79,7 +79,7 @@ struct FacetRangeSearch<'t, 'b, 'bitmap> {
docids: &'bitmap mut RoaringBitmap,
}
impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> {
impl<'t> FacetRangeSearch<'t, '_, '_> {
fn run_level_0(&mut self, starting_left_bound: &'t [u8], group_size: usize) -> Result<()> {
let left_key =
FacetGroupKey { field_id: self.field_id, level: 0, left_bound: starting_left_bound };

View file

@ -62,7 +62,7 @@ struct AscendingFacetSort<'t, 'e> {
)>,
}
impl<'t, 'e> Iterator for AscendingFacetSort<'t, 'e> {
impl<'t> Iterator for AscendingFacetSort<'t, '_> {
type Item = Result<(RoaringBitmap, &'t [u8])>;
fn next(&mut self) -> Option<Self::Item> {

View file

@ -66,15 +66,15 @@ enum FilterError<'a> {
ParseGeoError(BadGeoError),
TooDeep,
}
impl<'a> std::error::Error for FilterError<'a> {}
impl std::error::Error for FilterError<'_> {}
impl<'a> From<BadGeoError> for FilterError<'a> {
impl From<BadGeoError> for FilterError<'_> {
fn from(geo_error: BadGeoError) -> Self {
FilterError::ParseGeoError(geo_error)
}
}
impl<'a> Display for FilterError<'a> {
impl Display for FilterError<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::AttributeNotFilterable { attribute, filterable_patterns } => {
@ -237,7 +237,7 @@ impl<'a> Filter<'a> {
for fid in self.condition.fids(MAX_FILTER_DEPTH) {
let attribute = fid.value();
if matching_features(attribute, &filterable_attributes_rules)
.map_or(false, |(_, features)| features.is_filterable())
.is_some_and(|(_, features)| features.is_filterable())
{
continue;
}
@ -461,7 +461,7 @@ impl<'a> Filter<'a> {
filterable_attribute_rules: &[FilterableAttributesRule],
universe: Option<&RoaringBitmap>,
) -> Result<RoaringBitmap> {
if universe.map_or(false, |u| u.is_empty()) {
if universe.is_some_and(|u| u.is_empty()) {
return Ok(RoaringBitmap::new());
}

View file

@ -75,9 +75,11 @@ impl<'a> SearchForFacetValues<'a> {
let rtxn = self.search_query.rtxn;
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
if !matching_features(&self.facet, &filterable_attributes_rules)
.map_or(false, |(_, features)| features.is_facet_searchable())
{
let matched_rule = matching_features(&self.facet, &filterable_attributes_rules);
let is_facet_searchable =
matched_rule.is_some_and(|(_, features)| features.is_facet_searchable());
if !is_facet_searchable {
let matching_field_names =
filtered_matching_patterns(&filterable_attributes_rules, &|features| {
features.is_facet_searchable()
@ -85,10 +87,14 @@ impl<'a> SearchForFacetValues<'a> {
let (valid_patterns, hidden_fields) =
index.remove_hidden_fields(rtxn, matching_field_names)?;
// Get the matching rule index if any rule matched the attribute
let matching_rule_index = matched_rule.map(|(rule_index, _)| rule_index);
return Err(UserError::InvalidFacetSearchFacetName {
field: self.facet.clone(),
valid_patterns,
hidden_fields,
matching_rule_index,
}
.into());
};
@ -129,7 +135,7 @@ impl<'a> SearchForFacetValues<'a> {
if authorize_typos && field_authorizes_typos {
let exact_words_fst = self.search_query.index.exact_words(rtxn)?;
if exact_words_fst.map_or(false, |fst| fst.contains(query)) {
if exact_words_fst.is_some_and(|fst| fst.contains(query)) {
if fst.contains(query) {
self.fetch_original_facets_using_normalized(
fid,

View file

@ -151,7 +151,7 @@ impl ScoreWithRatioResult {
}
}
impl<'a> Search<'a> {
impl Search<'_> {
#[tracing::instrument(level = "trace", skip_all, target = "search::hybrid")]
pub fn execute_hybrid(&self, semantic_ratio: f32) -> Result<(SearchResult, Option<u32>)> {
// TODO: find classier way to achieve that than to reset vector and query params

View file

@ -190,9 +190,10 @@ impl<'a> Search<'a> {
if let Some(distinct) = &self.distinct {
let filterable_fields = ctx.index.filterable_attributes_rules(ctx.txn)?;
// check if the distinct field is in the filterable fields
if !matching_features(distinct, &filterable_fields)
.map_or(false, |(_, features)| features.is_filterable())
{
let matched_rule = matching_features(distinct, &filterable_fields);
let is_filterable = matched_rule.is_some_and(|(_, features)| features.is_filterable());
if !is_filterable {
// if not, remove the hidden fields from the filterable fields to generate the error message
let matching_patterns =
filtered_matching_patterns(&filterable_fields, &|features| {
@ -200,11 +201,16 @@ impl<'a> Search<'a> {
});
let (valid_patterns, hidden_fields) =
ctx.index.remove_hidden_fields(ctx.txn, matching_patterns)?;
// Get the matching rule index if any rule matched the attribute
let matching_rule_index = matched_rule.map(|(rule_index, _)| rule_index);
// and return the error
return Err(Error::UserError(UserError::InvalidDistinctAttribute {
field: distinct.clone(),
valid_patterns,
hidden_fields,
matching_rule_index,
}));
}
}

View file

@ -537,7 +537,7 @@ impl<'ctx> SearchContext<'ctx> {
fid: u16,
) -> Result<Option<RoaringBitmap>> {
// if the requested fid isn't in the restricted list, return None.
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
return Ok(None);
}
@ -558,7 +558,7 @@ impl<'ctx> SearchContext<'ctx> {
fid: u16,
) -> Result<Option<RoaringBitmap>> {
// if the requested fid isn't in the restricted list, return None.
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
return Ok(None);
}

View file

@ -72,7 +72,7 @@ pub fn find_best_match_interval(matches: &[Match], crop_size: usize) -> [&Match;
let interval_score = get_interval_score(&matches[interval_first..=interval_last]);
let is_interval_score_better = &best_interval
.as_ref()
.map_or(true, |MatchIntervalWithScore { score, .. }| interval_score > *score);
.is_none_or(|MatchIntervalWithScore { score, .. }| interval_score > *score);
if *is_interval_score_better {
best_interval = Some(MatchIntervalWithScore {

View file

@ -8,6 +8,7 @@ use std::cmp::{max, min};
use charabia::{Language, SeparatorKind, Token, Tokenizer};
use either::Either;
use itertools::Itertools;
pub use matching_words::MatchingWords;
use matching_words::{MatchType, PartialMatch};
use r#match::{Match, MatchPosition};
@ -122,7 +123,7 @@ pub struct Matcher<'t, 'tokenizer, 'b, 'lang> {
matches: Option<(Vec<Token<'t>>, Vec<Match>)>,
}
impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
impl<'t> Matcher<'t, '_, '_, '_> {
/// Iterates over tokens and save any of them that matches the query.
fn compute_matches(&mut self) -> &mut Self {
/// some words are counted as matches only if they are close together and in the good order,
@ -229,8 +230,7 @@ impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
.iter()
.map(|m| MatchBounds {
start: tokens[m.get_first_token_pos()].byte_start,
// TODO: Why is this in chars, while start is in bytes?
length: m.char_count,
length: self.calc_byte_length(tokens, m),
indices: if array_indices.is_empty() {
None
} else {
@ -241,6 +241,18 @@ impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
}
}
fn calc_byte_length(&self, tokens: &[Token<'t>], m: &Match) -> usize {
(m.get_first_token_pos()..=m.get_last_token_pos())
.flat_map(|i| match &tokens[i].char_map {
Some(char_map) => {
char_map.iter().map(|(original, _)| *original as usize).collect_vec()
}
None => tokens[i].lemma().chars().map(|c| c.len_utf8()).collect_vec(),
})
.take(m.char_count)
.sum()
}
/// Returns the bounds in byte index of the crop window.
fn crop_bounds(&self, tokens: &[Token<'_>], matches: &[Match], crop_size: usize) -> [usize; 2] {
let (

View file

@ -327,7 +327,7 @@ impl QueryGraph {
let mut peekable = term_with_frequency.into_iter().peekable();
while let Some((idx, frequency)) = peekable.next() {
term_weight.insert(idx, weight);
if peekable.peek().map_or(false, |(_, f)| frequency != *f) {
if peekable.peek().is_some_and(|(_, f)| frequency != *f) {
weight += 1;
}
}

View file

@ -398,7 +398,7 @@ fn split_best_frequency(
let right = ctx.word_interner.insert(right.to_owned());
if let Some(frequency) = ctx.get_db_word_pair_proximity_docids_len(None, left, right, 1)? {
if best.map_or(true, |(old, _, _)| frequency > old) {
if best.is_none_or(|(old, _, _)| frequency > old) {
best = Some((frequency, left, right));
}
}

View file

@ -203,7 +203,7 @@ pub fn number_of_typos_allowed<'ctx>(
Ok(Box::new(move |word: &str| {
if !authorize_typos
|| word.len() < min_len_one_typo as usize
|| exact_words.as_ref().map_or(false, |fst| fst.contains(word))
|| exact_words.as_ref().is_some_and(|fst| fst.contains(word))
{
0
} else if word.len() < min_len_two_typos as usize {

View file

@ -17,7 +17,7 @@ use crate::Result;
pub struct PhraseDocIdsCache {
pub cache: FxHashMap<Interned<Phrase>, RoaringBitmap>,
}
impl<'ctx> SearchContext<'ctx> {
impl SearchContext<'_> {
/// Get the document ids associated with the given phrase
pub fn get_phrase_docids(&mut self, phrase: Interned<Phrase>) -> Result<&RoaringBitmap> {
if self.phrase_docids.cache.contains_key(&phrase) {

View file

@ -263,7 +263,7 @@ impl SmallBitmapInternal {
pub fn contains(&self, x: u16) -> bool {
let (set, x) = self.get_set_index(x);
set & 0b1 << x != 0
set & (0b1 << x) != 0
}
pub fn insert(&mut self, x: u16) {
@ -381,7 +381,7 @@ pub enum SmallBitmapInternalIter<'b> {
Tiny(u64),
Small { cur: u64, next: &'b [u64], base: u16 },
}
impl<'b> Iterator for SmallBitmapInternalIter<'b> {
impl Iterator for SmallBitmapInternalIter<'_> {
type Item = u16;
fn next(&mut self) -> Option<Self::Item> {

View file

@ -101,8 +101,7 @@ impl FacetsUpdateIncremental {
let key = FacetGroupKeyCodec::<BytesRefCodec>::bytes_decode(key)
.map_err(heed::Error::Encoding)?;
if facet_level_may_be_updated
&& current_field_id.map_or(false, |fid| fid != key.field_id)
if facet_level_may_be_updated && current_field_id.is_some_and(|fid| fid != key.field_id)
{
// Only add or remove a level after making all the field modifications.
self.inner.add_or_delete_level(wtxn, current_field_id.unwrap())?;
@ -530,8 +529,8 @@ impl FacetsUpdateIncrementalInner {
add_docids: Option<&RoaringBitmap>,
del_docids: Option<&RoaringBitmap>,
) -> Result<bool> {
if add_docids.map_or(true, RoaringBitmap::is_empty)
&& del_docids.map_or(true, RoaringBitmap::is_empty)
if add_docids.is_none_or(RoaringBitmap::is_empty)
&& del_docids.is_none_or(RoaringBitmap::is_empty)
{
return Ok(false);
}
@ -670,7 +669,7 @@ impl FacetsUpdateIncrementalInner {
}
}
impl<'a> FacetGroupKey<&'a [u8]> {
impl FacetGroupKey<&[u8]> {
pub fn into_owned(self) -> FacetGroupKey<Vec<u8>> {
FacetGroupKey {
field_id: self.field_id,

View file

@ -115,7 +115,7 @@ pub fn enrich_documents_batch<R: Read + Seek>(
if let Some(geo_value) = geo_field_id.and_then(|fid| document.get(fid)) {
if let Err(user_error) = validate_geo_from_json(&document_id, geo_value)? {
return Ok(Err(UserError::from(user_error)));
return Ok(Err(UserError::from(Box::new(user_error))));
}
}

View file

@ -160,11 +160,11 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
let del_geo_support = settings_diff
.old
.geo_fields_ids
.map_or(false, |(lat, lng)| field_id == lat || field_id == lng);
.is_some_and(|(lat, lng)| field_id == lat || field_id == lng);
let add_geo_support = settings_diff
.new
.geo_fields_ids
.map_or(false, |(lat, lng)| field_id == lat || field_id == lng);
.is_some_and(|(lat, lng)| field_id == lat || field_id == lng);
let del_filterable_values =
del_value.map(|value| extract_facet_values(&value, del_geo_support));
let add_filterable_values =

View file

@ -80,22 +80,28 @@ fn extract_lat_lng(
let (lat, lng) = match (lat, lng) {
(Some(lat), Some(lng)) => (lat, lng),
(Some(_), None) => {
return Err(GeoError::MissingLatitude { document_id: document_id() }.into())
return Err(
Box::new(GeoError::MissingLatitude { document_id: document_id() }).into()
)
}
(None, Some(_)) => {
return Err(GeoError::MissingLongitude { document_id: document_id() }.into())
return Err(
Box::new(GeoError::MissingLongitude { document_id: document_id() }).into()
)
}
(None, None) => return Ok(None),
};
let lat = extract_finite_float_from_value(
serde_json::from_slice(lat).map_err(InternalError::SerdeJson)?,
)
.map_err(|lat| GeoError::BadLatitude { document_id: document_id(), value: lat })?;
.map_err(|lat| GeoError::BadLatitude { document_id: document_id(), value: lat })
.map_err(Box::new)?;
let lng = extract_finite_float_from_value(
serde_json::from_slice(lng).map_err(InternalError::SerdeJson)?,
)
.map_err(|lng| GeoError::BadLongitude { document_id: document_id(), value: lng })?;
.map_err(|lng| GeoError::BadLongitude { document_id: document_id(), value: lng })
.map_err(Box::new)?;
Ok(Some([lat, lng]))
}
None => Ok(None),

View file

@ -69,7 +69,7 @@ pub fn extract_word_pair_proximity_docids<R: io::Read + io::Seek>(
let document_id = u32::from_be_bytes(document_id_bytes);
// if we change document, we fill the sorter
if current_document_id.map_or(false, |id| id != document_id) {
if current_document_id.is_some_and(|id| id != document_id) {
// FIXME: span inside of a hot loop might degrade performance and create big reports
let span = tracing::trace_span!(target: "indexing::details", "document_into_sorter");
let _entered = span.enter();
@ -96,7 +96,7 @@ pub fn extract_word_pair_proximity_docids<R: io::Read + io::Seek>(
if let Some(deletion) = KvReaderDelAdd::from_slice(value).get(DelAdd::Deletion) {
for (position, word) in KvReaderU16::from_slice(deletion).iter() {
// drain the proximity window until the head word is considered close to the word we are inserting.
while del_word_positions.front().map_or(false, |(_w, p)| {
while del_word_positions.front().is_some_and(|(_w, p)| {
index_proximity(*p as u32, position as u32) >= MAX_DISTANCE
}) {
word_positions_into_word_pair_proximity(
@ -129,7 +129,7 @@ pub fn extract_word_pair_proximity_docids<R: io::Read + io::Seek>(
if let Some(addition) = KvReaderDelAdd::from_slice(value).get(DelAdd::Addition) {
for (position, word) in KvReaderU16::from_slice(addition).iter() {
// drain the proximity window until the head word is considered close to the word we are inserting.
while add_word_positions.front().map_or(false, |(_w, p)| {
while add_word_positions.front().is_some_and(|(_w, p)| {
index_proximity(*p as u32, position as u32) >= MAX_DISTANCE
}) {
word_positions_into_word_pair_proximity(

View file

@ -46,7 +46,7 @@ pub fn extract_word_position_docids<R: io::Read + io::Seek>(
.ok_or(SerializationError::Decoding { db_name: Some(DOCID_WORD_POSITIONS) })?;
let document_id = DocumentId::from_be_bytes(document_id_bytes);
if current_document_id.map_or(false, |id| document_id != id) {
if current_document_id.is_some_and(|id| document_id != id) {
words_position_into_sorter(
current_document_id.unwrap(),
&mut key_buffer,

View file

@ -281,7 +281,7 @@ fn send_original_documents_data(
};
if !(remove_vectors.is_empty()
&& manual_vectors.is_empty()
&& embeddings.as_ref().map_or(true, |e| e.is_empty()))
&& embeddings.as_ref().is_none_or(|e| e.is_empty()))
{
let _ = lmdb_writer_sx.send(Ok(TypedChunk::VectorPoints {
remove_vectors,

View file

@ -514,12 +514,9 @@ where
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
)?;
let embedder_config = settings_diff.embedding_config_updates.get(&embedder_name);
let was_quantized = settings_diff
.old
.embedding_configs
.get(&embedder_name)
.map_or(false, |conf| conf.2);
let is_quantizing = embedder_config.map_or(false, |action| action.is_being_quantized);
let was_quantized =
settings_diff.old.embedding_configs.get(&embedder_name).is_some_and(|conf| conf.2);
let is_quantizing = embedder_config.is_some_and(|action| action.is_being_quantized);
pool.install(|| {
let mut writer = ArroyWrapper::new(vector_arroy, embedder_index, was_quantized);

View file

@ -197,7 +197,7 @@ impl<'a, 'i> Transform<'a, 'i> {
// drop_and_reuse is called instead of .clear() to communicate to the compiler that field_buffer
// does not keep references from the cursor between loop iterations
let mut field_buffer_cache = drop_and_reuse(field_buffer);
if self.indexer_settings.log_every_n.map_or(false, |len| documents_count % len == 0) {
if self.indexer_settings.log_every_n.is_some_and(|len| documents_count % len == 0) {
progress_callback(UpdateIndexingStep::RemapDocumentAddition {
documents_seen: documents_count,
});

View file

@ -55,7 +55,7 @@ impl ChunkAccumulator {
match self
.inner
.iter()
.position(|right| right.first().map_or(false, |right| chunk.mergeable_with(right)))
.position(|right| right.first().is_some_and(|right| chunk.mergeable_with(right)))
{
Some(position) => {
let v = self.inner.get_mut(position).unwrap();
@ -664,11 +664,8 @@ pub(crate) fn write_typed_chunk_into_index(
let embedder_index = index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or(
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
)?;
let binary_quantized = settings_diff
.old
.embedding_configs
.get(&embedder_name)
.map_or(false, |conf| conf.2);
let binary_quantized =
settings_diff.old.embedding_configs.get(&embedder_name).is_some_and(|conf| conf.2);
// FIXME: allow customizing distance
let writer = ArroyWrapper::new(index.vector_arroy, embedder_index, binary_quantized);

View file

@ -56,13 +56,13 @@ where
content: &'t KvReaderFieldId,
}
impl<'t, Mapper: FieldIdMapper> Clone for DocumentFromDb<'t, Mapper> {
impl<Mapper: FieldIdMapper> Clone for DocumentFromDb<'_, Mapper> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl<'t, Mapper: FieldIdMapper> Copy for DocumentFromDb<'t, Mapper> {}
impl<Mapper: FieldIdMapper> Copy for DocumentFromDb<'_, Mapper> {}
impl<'t, Mapper: FieldIdMapper> Document<'t> for DocumentFromDb<'t, Mapper> {
fn iter_top_level_fields(&self) -> impl Iterator<Item = Result<(&'t str, &'t RawValue)>> {
@ -154,7 +154,7 @@ impl<'a, 'doc> DocumentFromVersions<'a, 'doc> {
}
}
impl<'a, 'doc> Document<'doc> for DocumentFromVersions<'a, 'doc> {
impl<'doc> Document<'doc> for DocumentFromVersions<'_, 'doc> {
fn iter_top_level_fields(&self) -> impl Iterator<Item = Result<(&'doc str, &'doc RawValue)>> {
self.versions.iter_top_level_fields().map(Ok)
}

View file

@ -121,7 +121,7 @@ impl<'extractor> BalancedCaches<'extractor> {
}
pub fn insert_del_u32(&mut self, key: &[u8], n: u32) -> Result<()> {
if self.max_memory.map_or(false, |mm| self.alloc.allocated_bytes() >= mm) {
if self.max_memory.is_some_and(|mm| self.alloc.allocated_bytes() >= mm) {
self.start_spilling()?;
}
@ -138,7 +138,7 @@ impl<'extractor> BalancedCaches<'extractor> {
}
pub fn insert_add_u32(&mut self, key: &[u8], n: u32) -> Result<()> {
if self.max_memory.map_or(false, |mm| self.alloc.allocated_bytes() >= mm) {
if self.max_memory.is_some_and(|mm| self.alloc.allocated_bytes() >= mm) {
self.start_spilling()?;
}
@ -623,7 +623,7 @@ pub struct FrozenDelAddBbbul<'bump, B> {
pub add: Option<FrozenBbbul<'bump, B>>,
}
impl<'bump, B> FrozenDelAddBbbul<'bump, B> {
impl<B> FrozenDelAddBbbul<'_, B> {
fn is_empty(&self) -> bool {
self.del.is_none() && self.add.is_none()
}

View file

@ -31,7 +31,7 @@ pub struct DocumentExtractorData {
pub field_distribution_delta: HashMap<String, i64>,
}
impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
impl<'extractor> Extractor<'extractor> for DocumentsExtractor<'_, '_> {
type Data = FullySend<RefCell<DocumentExtractorData>>;
fn init_data(&self, _extractor_alloc: &'extractor Bump) -> Result<Self::Data> {

View file

@ -37,7 +37,7 @@ pub struct FacetedExtractorData<'a, 'b> {
is_geo_enabled: bool,
}
impl<'a, 'b, 'extractor> Extractor<'extractor> for FacetedExtractorData<'a, 'b> {
impl<'extractor> Extractor<'extractor> for FacetedExtractorData<'_, '_> {
type Data = RefCell<BalancedCaches<'extractor>>;
fn init_data(&self, extractor_alloc: &'extractor Bump) -> Result<Self::Data> {

View file

@ -92,7 +92,7 @@ pub struct FrozenGeoExtractorData<'extractor> {
pub spilled_inserted: Option<BufReader<File>>,
}
impl<'extractor> FrozenGeoExtractorData<'extractor> {
impl FrozenGeoExtractorData<'_> {
pub fn iter_and_clear_removed(
&mut self,
) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_> {
@ -160,7 +160,7 @@ impl<'extractor> Extractor<'extractor> for GeoExtractor {
for change in changes {
if data_ref.spilled_removed.is_none()
&& max_memory.map_or(false, |mm| context.extractor_alloc.allocated_bytes() >= mm)
&& max_memory.is_some_and(|mm| context.extractor_alloc.allocated_bytes() >= mm)
{
// We must spill as we allocated too much memory
data_ref.spilled_removed = tempfile::tempfile().map(BufWriter::new).map(Some)?;
@ -258,9 +258,11 @@ pub fn extract_geo_coordinates(
Value::Null => return Ok(None),
Value::Object(map) => map,
value => {
return Err(
GeoError::NotAnObject { document_id: Value::from(external_id), value }.into()
)
return Err(Box::new(GeoError::NotAnObject {
document_id: Value::from(external_id),
value,
})
.into())
}
};
@ -269,23 +271,29 @@ pub fn extract_geo_coordinates(
if geo.is_empty() {
[lat, lng]
} else {
return Err(GeoError::UnexpectedExtraFields {
return Err(Box::new(GeoError::UnexpectedExtraFields {
document_id: Value::from(external_id),
value: Value::from(geo),
}
})
.into());
}
}
(Some(_), None) => {
return Err(GeoError::MissingLongitude { document_id: Value::from(external_id) }.into())
return Err(Box::new(GeoError::MissingLongitude {
document_id: Value::from(external_id),
})
.into())
}
(None, Some(_)) => {
return Err(GeoError::MissingLatitude { document_id: Value::from(external_id) }.into())
return Err(Box::new(GeoError::MissingLatitude {
document_id: Value::from(external_id),
})
.into())
}
(None, None) => {
return Err(GeoError::MissingLatitudeAndLongitude {
return Err(Box::new(GeoError::MissingLatitudeAndLongitude {
document_id: Value::from(external_id),
}
})
.into())
}
};
@ -293,16 +301,18 @@ pub fn extract_geo_coordinates(
match (extract_finite_float_from_value(lat), extract_finite_float_from_value(lng)) {
(Ok(lat), Ok(lng)) => Ok(Some([lat, lng])),
(Ok(_), Err(value)) => {
Err(GeoError::BadLongitude { document_id: Value::from(external_id), value }.into())
Err(Box::new(GeoError::BadLongitude { document_id: Value::from(external_id), value })
.into())
}
(Err(value), Ok(_)) => {
Err(GeoError::BadLatitude { document_id: Value::from(external_id), value }.into())
Err(Box::new(GeoError::BadLatitude { document_id: Value::from(external_id), value })
.into())
}
(Err(lat), Err(lng)) => Err(GeoError::BadLatitudeAndLongitude {
(Err(lat), Err(lng)) => Err(Box::new(GeoError::BadLatitudeAndLongitude {
document_id: Value::from(external_id),
lat,
lng,
}
})
.into()),
}
}

View file

@ -31,7 +31,7 @@ pub struct WordDocidsBalancedCaches<'extractor> {
current_docid: Option<DocumentId>,
}
unsafe impl<'extractor> MostlySend for WordDocidsBalancedCaches<'extractor> {}
unsafe impl MostlySend for WordDocidsBalancedCaches<'_> {}
impl<'extractor> WordDocidsBalancedCaches<'extractor> {
pub fn new_in(buckets: usize, max_memory: Option<usize>, alloc: &'extractor Bump) -> Self {
@ -78,7 +78,7 @@ impl<'extractor> WordDocidsBalancedCaches<'extractor> {
buffer.extend_from_slice(&position.to_be_bytes());
self.word_position_docids.insert_add_u32(&buffer, docid)?;
if self.current_docid.map_or(false, |id| docid != id) {
if self.current_docid.is_some_and(|id| docid != id) {
self.flush_fid_word_count(&mut buffer)?;
}
@ -123,7 +123,7 @@ impl<'extractor> WordDocidsBalancedCaches<'extractor> {
buffer.extend_from_slice(&position.to_be_bytes());
self.word_position_docids.insert_del_u32(&buffer, docid)?;
if self.current_docid.map_or(false, |id| docid != id) {
if self.current_docid.is_some_and(|id| docid != id) {
self.flush_fid_word_count(&mut buffer)?;
}
@ -212,7 +212,7 @@ pub struct WordDocidsExtractorData<'a> {
searchable_attributes: Option<Vec<&'a str>>,
}
impl<'a, 'extractor> Extractor<'extractor> for WordDocidsExtractorData<'a> {
impl<'extractor> Extractor<'extractor> for WordDocidsExtractorData<'_> {
type Data = RefCell<Option<WordDocidsBalancedCaches<'extractor>>>;
fn init_data(&self, extractor_alloc: &'extractor Bump) -> Result<Self::Data> {

View file

@ -25,7 +25,7 @@ pub struct WordPairProximityDocidsExtractorData<'a> {
buckets: usize,
}
impl<'a, 'extractor> Extractor<'extractor> for WordPairProximityDocidsExtractorData<'a> {
impl<'extractor> Extractor<'extractor> for WordPairProximityDocidsExtractorData<'_> {
type Data = RefCell<BalancedCaches<'extractor>>;
fn init_data(&self, extractor_alloc: &'extractor Bump) -> Result<Self::Data> {
@ -270,7 +270,7 @@ fn process_document_tokens<'doc>(
// drain the proximity window until the head word is considered close to the word we are inserting.
while word_positions
.front()
.map_or(false, |(_w, p)| index_proximity(*p as u32, pos as u32) >= MAX_DISTANCE)
.is_some_and(|(_w, p)| index_proximity(*p as u32, pos as u32) >= MAX_DISTANCE)
{
word_positions_into_word_pair_proximity(word_positions, word_pair_proximity);
}

View file

@ -22,7 +22,7 @@ pub struct DocumentTokenizer<'a> {
pub max_positions_per_attributes: u32,
}
impl<'a> DocumentTokenizer<'a> {
impl DocumentTokenizer<'_> {
pub fn tokenize_document<'doc>(
&self,
document: impl Document<'doc>,

View file

@ -43,7 +43,7 @@ pub struct EmbeddingExtractorData<'extractor>(
unsafe impl MostlySend for EmbeddingExtractorData<'_> {}
impl<'a, 'b, 'extractor> Extractor<'extractor> for EmbeddingExtractor<'a, 'b> {
impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
type Data = RefCell<EmbeddingExtractorData<'extractor>>;
fn init_data<'doc>(&'doc self, extractor_alloc: &'extractor Bump) -> crate::Result<Self::Data> {

View file

@ -29,8 +29,8 @@ impl<'p, 'indexer, Mapper: MutFieldIdMapper> FieldAndDocidExtractor<'p, 'indexer
}
}
impl<'de, 'p, 'indexer: 'de, Mapper: MutFieldIdMapper> Visitor<'de>
for FieldAndDocidExtractor<'p, 'indexer, Mapper>
impl<'de, 'indexer: 'de, Mapper: MutFieldIdMapper> Visitor<'de>
for FieldAndDocidExtractor<'_, 'indexer, Mapper>
{
type Value =
Result<Result<DeOrBumpStr<'de, 'indexer>, DocumentIdExtractionError>, crate::UserError>;
@ -98,7 +98,7 @@ struct NestedPrimaryKeyVisitor<'a, 'bump> {
bump: &'bump Bump,
}
impl<'de, 'a, 'bump: 'de> Visitor<'de> for NestedPrimaryKeyVisitor<'a, 'bump> {
impl<'de, 'bump: 'de> Visitor<'de> for NestedPrimaryKeyVisitor<'_, 'bump> {
type Value = std::result::Result<Option<DeOrBumpStr<'de, 'bump>>, DocumentIdExtractionError>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
@ -237,7 +237,7 @@ impl<'de, 'a, Mapper: MutFieldIdMapper> Visitor<'de> for MutFieldIdMapVisitor<'a
pub struct FieldIdMapVisitor<'a, Mapper: FieldIdMapper>(pub &'a Mapper);
impl<'de, 'a, Mapper: FieldIdMapper> Visitor<'de> for FieldIdMapVisitor<'a, Mapper> {
impl<'de, Mapper: FieldIdMapper> Visitor<'de> for FieldIdMapVisitor<'_, Mapper> {
type Value = Option<FieldId>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {

View file

@ -149,16 +149,11 @@ pub struct IndexingContext<
pub grenad_parameters: &'indexer GrenadParameters,
}
impl<
'fid, // invariant lifetime of fields ids map
'indexer, // covariant lifetime of objects that are borrowed during the entire indexing operation
'index, // covariant lifetime of the index
MSP,
> Copy
impl<MSP> Copy
for IndexingContext<
'fid, // invariant lifetime of fields ids map
'indexer, // covariant lifetime of objects that are borrowed during the entire indexing operation
'index, // covariant lifetime of the index
'_, // invariant lifetime of fields ids map
'_, // covariant lifetime of objects that are borrowed during the entire indexing operation
'_, // covariant lifetime of the index
MSP,
>
where
@ -166,16 +161,11 @@ where
{
}
impl<
'fid, // invariant lifetime of fields ids map
'indexer, // covariant lifetime of objects that are borrowed during the entire indexing operation
'index, // covariant lifetime of the index
MSP,
> Clone
impl<MSP> Clone
for IndexingContext<
'fid, // invariant lifetime of fields ids map
'indexer, // covariant lifetime of objects that are borrowed during the entire indexing operation
'index, // covariant lifetime of the index
'_, // invariant lifetime of fields ids map
'_, // covariant lifetime of objects that are borrowed during the entire indexing operation
'_, // covariant lifetime of the index
MSP,
>
where

View file

@ -110,7 +110,7 @@ mod test {
>,
}
unsafe impl<'extractor> MostlySend for DeletionWithData<'extractor> {}
unsafe impl MostlySend for DeletionWithData<'_> {}
struct TrackDeletion<'extractor>(PhantomData<&'extractor ()>);

View file

@ -210,14 +210,8 @@ fn extract_addition_payload_changes<'r, 'pl: 'r>(
primary_key.as_ref().unwrap()
};
let external_id = match retrieved_primary_key.extract_fields_and_docid(
doc,
new_fields_ids_map,
indexer,
) {
Ok(edi) => edi,
Err(e) => return Err(e),
};
let external_id =
retrieved_primary_key.extract_fields_and_docid(doc, new_fields_ids_map, indexer)?;
let external_id = external_id.to_de();
let current_offset = iter.byte_offset();
@ -580,12 +574,12 @@ impl<'pl> PayloadOperations<'pl> {
}
}
Some(InnerDocOp::Deletion) => {
return if self.is_new {
if self.is_new {
Ok(None)
} else {
let deletion = Deletion::create(self.docid, external_doc);
Ok(Some(DocumentChange::Deletion(deletion)))
};
}
}
None => unreachable!("We must not have an empty set of operations on a document"),
}

View file

@ -14,7 +14,7 @@ use crate::index::IndexEmbeddingConfig;
use crate::progress::Progress;
use crate::update::settings::InnerIndexSettings;
use crate::vector::{ArroyWrapper, Embedder, EmbeddingConfigs, Embeddings};
use crate::{Error, Index, InternalError, Result};
use crate::{Error, Index, InternalError, Result, UserError};
pub fn write_to_db(
mut writer_receiver: WriterBbqueueReceiver<'_>,
@ -219,7 +219,12 @@ pub fn write_from_bbqueue(
arroy_writers.get(&embedder_id).expect("requested a missing embedder");
let mut embeddings = Embeddings::new(*dimensions);
let all_embeddings = asvs.read_all_embeddings_into_vec(frame, aligned_embedding);
embeddings.append(all_embeddings.to_vec()).unwrap();
if embeddings.append(all_embeddings.to_vec()).is_err() {
return Err(Error::UserError(UserError::InvalidVectorDimensions {
expected: *dimensions,
found: all_embeddings.len(),
}));
}
writer.del_items(wtxn, *dimensions, docid)?;
writer.add_items(wtxn, docid, &embeddings)?;
}

View file

@ -149,7 +149,7 @@ impl<'a, 'rtxn> FrozenPrefixBitmaps<'a, 'rtxn> {
}
}
unsafe impl<'a, 'rtxn> Sync for FrozenPrefixBitmaps<'a, 'rtxn> {}
unsafe impl Sync for FrozenPrefixBitmaps<'_, '_> {}
struct WordPrefixIntegerDocids {
database: Database<Bytes, CboRoaringBitmapCodec>,
@ -302,7 +302,7 @@ impl<'a, 'rtxn> FrozenPrefixIntegerBitmaps<'a, 'rtxn> {
}
}
unsafe impl<'a, 'rtxn> Sync for FrozenPrefixIntegerBitmaps<'a, 'rtxn> {}
unsafe impl Sync for FrozenPrefixIntegerBitmaps<'_, '_> {}
#[tracing::instrument(level = "trace", skip_all, target = "indexing::prefix")]
fn delete_prefixes(

View file

@ -560,7 +560,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
// Does the new FST differ from the previous one?
if current
.map_or(true, |current| current.as_fst().as_bytes() != fst.as_fst().as_bytes())
.is_none_or(|current| current.as_fst().as_bytes() != fst.as_fst().as_bytes())
{
// we want to re-create our FST.
self.index.put_stop_words(self.wtxn, &fst)?;
@ -580,7 +580,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
let current = self.index.non_separator_tokens(self.wtxn)?;
// Does the new list differ from the previous one?
if current.map_or(true, |current| &current != non_separator_tokens) {
if current.is_none_or(|current| &current != non_separator_tokens) {
self.index.put_non_separator_tokens(self.wtxn, non_separator_tokens)?;
true
} else {
@ -605,7 +605,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
let current = self.index.separator_tokens(self.wtxn)?;
// Does the new list differ from the previous one?
if current.map_or(true, |current| &current != separator_tokens) {
if current.is_none_or(|current| &current != separator_tokens) {
self.index.put_separator_tokens(self.wtxn, separator_tokens)?;
true
} else {
@ -630,7 +630,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
let current = self.index.dictionary(self.wtxn)?;
// Does the new list differ from the previous one?
if current.map_or(true, |current| &current != dictionary) {
if current.is_none_or(|current| &current != dictionary) {
self.index.put_dictionary(self.wtxn, dictionary)?;
true
} else {
@ -1353,7 +1353,7 @@ impl InnerIndexSettingsDiff {
new_settings.embedding_configs.inner_as_ref()
{
let was_quantized =
old_settings.embedding_configs.get(embedder_name).map_or(false, |conf| conf.2);
old_settings.embedding_configs.get(embedder_name).is_some_and(|conf| conf.2);
// skip embedders that don't use document templates
if !config.uses_document_template() {
continue;

View file

@ -311,7 +311,7 @@ fn last_named_object<'a>(
last_named_object
}
impl<'a> std::fmt::Display for LastNamedObject<'a> {
impl std::fmt::Display for LastNamedObject<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
LastNamedObject::Object { name } => write!(f, "`{name}`"),

View file

@ -59,7 +59,7 @@ impl ArroyWrapper {
&'a self,
rtxn: &'a RoTxn<'a>,
db: arroy::Database<D>,
) -> impl Iterator<Item = Result<arroy::Reader<D>, arroy::Error>> + 'a {
) -> impl Iterator<Item = Result<arroy::Reader<'a, D>, arroy::Error>> + 'a {
arroy_db_range_for_embedder(self.embedder_index).map_while(move |index| {
match arroy::Reader::open(rtxn, index, db) {
Ok(reader) => match reader.is_empty(rtxn) {

View file

@ -242,11 +242,11 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
id = contains_key_rec(opt1, "opt2").then(|| document.id.clone());
}
} else if matches!(filter, "opt1 IS NULL" | "NOT opt1 IS NOT NULL") {
id = document.opt1.as_ref().map_or(false, |v| v.is_null()).then(|| document.id.clone());
id = document.opt1.as_ref().is_some_and(|v| v.is_null()).then(|| document.id.clone());
} else if matches!(filter, "NOT opt1 IS NULL" | "opt1 IS NOT NULL") {
id = document.opt1.as_ref().map_or(true, |v| !v.is_null()).then(|| document.id.clone());
id = document.opt1.as_ref().is_none_or(|v| !v.is_null()).then(|| document.id.clone());
} else if matches!(filter, "opt1.opt2 IS NULL") {
if document.opt1opt2.as_ref().map_or(false, |v| v.is_null()) {
if document.opt1opt2.as_ref().is_some_and(|v| v.is_null()) {
id = Some(document.id.clone());
} else if let Some(opt1) = &document.opt1 {
if !opt1.is_null() {
@ -254,15 +254,11 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
}
}
} else if matches!(filter, "opt1 IS EMPTY" | "NOT opt1 IS NOT EMPTY") {
id = document.opt1.as_ref().map_or(false, is_empty_value).then(|| document.id.clone());
id = document.opt1.as_ref().is_some_and(is_empty_value).then(|| document.id.clone());
} else if matches!(filter, "NOT opt1 IS EMPTY" | "opt1 IS NOT EMPTY") {
id = document
.opt1
.as_ref()
.map_or(true, |v| !is_empty_value(v))
.then(|| document.id.clone());
id = document.opt1.as_ref().is_none_or(|v| !is_empty_value(v)).then(|| document.id.clone());
} else if matches!(filter, "opt1.opt2 IS EMPTY") {
if document.opt1opt2.as_ref().map_or(false, is_empty_value) {
if document.opt1opt2.as_ref().is_some_and(is_empty_value) {
id = Some(document.id.clone());
}
} else if matches!(

View file

@ -66,7 +66,7 @@ use tracing_error::ExtractSpanTrace as _;
use tracing_subscriber::layer::SubscriberExt as _;
use tracing_trace::processor;
fn on_panic(info: &std::panic::PanicInfo) {
fn on_panic(info: &std::panic::PanicHookInfo) {
let info = info.to_string();
let trace = SpanTrace::capture();
tracing::error!(%info, %trace);

View file

@ -282,7 +282,7 @@ struct SpanMarker<'a> {
memory_delta: Option<MemoryStats>,
}
impl<'a> ProfilerMarker for SpanMarker<'a> {
impl ProfilerMarker for SpanMarker<'_> {
const MARKER_TYPE_NAME: &'static str = "span";
fn schema() -> MarkerSchema {
@ -369,7 +369,7 @@ struct EventMarker<'a> {
memory_delta: Option<MemoryStats>,
}
impl<'a> ProfilerMarker for EventMarker<'a> {
impl ProfilerMarker for EventMarker<'_> {
const MARKER_TYPE_NAME: &'static str = "tracing-event";
fn schema() -> MarkerSchema {