mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-04-18 07:57:59 +02:00
Make Cargo and Clippy happy
This commit is contained in:
parent
249da5846c
commit
4d90e3d2ec
6
Cargo.lock
generated
6
Cargo.lock
generated
@ -1,6 +1,6 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "actix-codec"
|
||||
@ -758,9 +758,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bytemuck_derive"
|
||||
version = "1.6.0"
|
||||
version = "1.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4da9a32f3fed317401fa3c862968128267c3106685286e15d5aaa3d7389c2f60"
|
||||
checksum = "7ecc273b49b3205b83d648f0690daa588925572cc5063745bfe547fe7ec8e1a1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -108,7 +108,7 @@ where
|
||||
/// not supported on untagged enums.
|
||||
struct StarOrVisitor<T>(PhantomData<T>);
|
||||
|
||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
||||
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||
where
|
||||
T: FromStr<Err = FE>,
|
||||
FE: Display,
|
||||
|
@ -99,7 +99,7 @@ impl Task {
|
||||
/// Return true when a task is finished.
|
||||
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
||||
pub fn is_finished(&self) -> bool {
|
||||
self.events.last().map_or(false, |event| {
|
||||
self.events.last().is_some_and(|event| {
|
||||
matches!(event, TaskEvent::Succeded { .. } | TaskEvent::Failed { .. })
|
||||
})
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ where
|
||||
/// not supported on untagged enums.
|
||||
struct StarOrVisitor<T>(PhantomData<T>);
|
||||
|
||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
||||
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||
where
|
||||
T: FromStr<Err = FE>,
|
||||
FE: Display,
|
||||
|
@ -114,7 +114,7 @@ impl Task {
|
||||
/// Return true when a task is finished.
|
||||
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
||||
pub fn is_finished(&self) -> bool {
|
||||
self.events.last().map_or(false, |event| {
|
||||
self.events.last().is_some_and(|event| {
|
||||
matches!(event, TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. })
|
||||
})
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ impl<E> NomErrorExt<E> for nom::Err<E> {
|
||||
pub fn cut_with_err<'a, O>(
|
||||
mut parser: impl FnMut(Span<'a>) -> IResult<'a, O>,
|
||||
mut with: impl FnMut(Error<'a>) -> Error<'a>,
|
||||
) -> impl FnMut(Span<'a>) -> IResult<O> {
|
||||
) -> impl FnMut(Span<'a>) -> IResult<'a, O> {
|
||||
move |input| match parser.parse(input) {
|
||||
Err(nom::Err::Error(e)) => Err(nom::Err::Failure(with(e))),
|
||||
rest => rest,
|
||||
@ -121,7 +121,7 @@ impl<'a> ParseError<Span<'a>> for Error<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for Error<'a> {
|
||||
impl Display for Error<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let input = self.context.fragment();
|
||||
// When printing our error message we want to escape all `\n` to be sure we keep our format with the
|
||||
|
@ -80,7 +80,7 @@ pub struct Token<'a> {
|
||||
value: Option<String>,
|
||||
}
|
||||
|
||||
impl<'a> PartialEq for Token<'a> {
|
||||
impl PartialEq for Token<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.span.fragment() == other.span.fragment()
|
||||
}
|
||||
@ -226,7 +226,7 @@ impl<'a> FilterCondition<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(input: &'a str) -> Result<Option<Self>, Error> {
|
||||
pub fn parse(input: &'a str) -> Result<Option<Self>, Error<'a>> {
|
||||
if input.trim().is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
@ -527,7 +527,7 @@ pub fn parse_filter(input: Span) -> IResult<FilterCondition> {
|
||||
terminated(|input| parse_expression(input, 0), eof)(input)
|
||||
}
|
||||
|
||||
impl<'a> std::fmt::Display for FilterCondition<'a> {
|
||||
impl std::fmt::Display for FilterCondition<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FilterCondition::Not(filter) => {
|
||||
@ -576,7 +576,8 @@ impl<'a> std::fmt::Display for FilterCondition<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Display for Condition<'a> {
|
||||
|
||||
impl std::fmt::Display for Condition<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Condition::GreaterThan(token) => write!(f, "> {token}"),
|
||||
@ -594,7 +595,8 @@ impl<'a> std::fmt::Display for Condition<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Display for Token<'a> {
|
||||
|
||||
impl std::fmt::Display for Token<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{{{}}}", self.value())
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ fn quoted_by(quote: char, input: Span) -> IResult<Token> {
|
||||
}
|
||||
|
||||
// word = (alphanumeric | _ | - | .)+ except for reserved keywords
|
||||
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<Token<'a>> {
|
||||
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<'a, Token<'a>> {
|
||||
let (input, word): (_, Token<'a>) =
|
||||
take_while1(is_value_component)(input).map(|(s, t)| (s, t.into()))?;
|
||||
if is_keyword(word.value()) {
|
||||
|
@ -696,7 +696,7 @@ impl IndexScheduler {
|
||||
written: usize,
|
||||
}
|
||||
|
||||
impl<'a, 'b> Read for TaskReader<'a, 'b> {
|
||||
impl Read for TaskReader<'_, '_> {
|
||||
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
if self.buffer.is_empty() {
|
||||
match self.tasks.next() {
|
||||
|
@ -315,7 +315,7 @@ impl Queue {
|
||||
if let Some(batch_uids) = batch_uids {
|
||||
let mut batch_tasks = RoaringBitmap::new();
|
||||
for batch_uid in batch_uids {
|
||||
if processing_batch.as_ref().map_or(false, |batch| batch.uid == *batch_uid) {
|
||||
if processing_batch.as_ref().is_some_and(|batch| batch.uid == *batch_uid) {
|
||||
batch_tasks |= &**processing_tasks;
|
||||
} else {
|
||||
batch_tasks |= self.tasks_in_batch(rtxn, *batch_uid)?;
|
||||
|
@ -219,7 +219,7 @@ impl BatchKind {
|
||||
primary_key.is_some() &&
|
||||
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
|
||||
// 2.1.2 If the task don't have a primary-key -> we can continue
|
||||
kind.primary_key().map_or(true, |pk| pk == primary_key)
|
||||
kind.primary_key().is_none_or(|pk| pk == primary_key)
|
||||
) ||
|
||||
// 2.2 If we don't have a primary-key ->
|
||||
(
|
||||
|
@ -960,7 +960,7 @@ impl<'de> Deserialize<'de> for RankingRuleView {
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
struct Visitor;
|
||||
impl<'de> serde::de::Visitor<'de> for Visitor {
|
||||
impl serde::de::Visitor<'_> for Visitor {
|
||||
type Value = RankingRuleView;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(formatter, "the name of a valid ranking rule (string)")
|
||||
|
@ -66,7 +66,7 @@ where
|
||||
/// not supported on untagged enums.
|
||||
struct StarOrVisitor<T>(PhantomData<T>);
|
||||
|
||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
||||
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||
where
|
||||
T: FromStr<Err = FE>,
|
||||
FE: fmt::Display,
|
||||
|
@ -346,7 +346,7 @@ fn open_or_create_database_unchecked(
|
||||
match (
|
||||
index_scheduler_builder(),
|
||||
auth_controller.map_err(anyhow::Error::from),
|
||||
create_current_version_file(&opt.db_path).map_err(anyhow::Error::from),
|
||||
create_current_version_file(&opt.db_path),
|
||||
) {
|
||||
(Ok(i), Ok(a), Ok(())) => Ok((i, a)),
|
||||
(Err(e), _, _) | (_, Err(e), _) | (_, _, Err(e)) => {
|
||||
|
@ -69,7 +69,7 @@ fn setup(opt: &Opt) -> anyhow::Result<(LogRouteHandle, LogStderrHandle)> {
|
||||
Ok((route_layer_handle, stderr_layer_handle))
|
||||
}
|
||||
|
||||
fn on_panic(info: &std::panic::PanicInfo) {
|
||||
fn on_panic(info: &std::panic::PanicHookInfo) {
|
||||
let info = info.to_string().replace('\n', " ");
|
||||
tracing::error!(%info);
|
||||
}
|
||||
|
@ -929,7 +929,6 @@ where
|
||||
}
|
||||
|
||||
/// Functions used to get default value for `Opt` fields, needs to be function because of serde's default attribute.
|
||||
|
||||
fn default_db_path() -> PathBuf {
|
||||
PathBuf::from(DEFAULT_DB_PATH)
|
||||
}
|
||||
@ -1037,7 +1036,7 @@ where
|
||||
{
|
||||
struct BoolOrInt;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for BoolOrInt {
|
||||
impl serde::de::Visitor<'_> for BoolOrInt {
|
||||
type Value = ScheduleSnapshot;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
|
@ -302,7 +302,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
||||
|
||||
// If exhaustive_facet_count is true, we need to set the page to 0
|
||||
// because the facet search is not exhaustive by default.
|
||||
let page = if exhaustive_facet_count.map_or(false, |exhaustive| exhaustive) {
|
||||
let page = if exhaustive_facet_count.is_some_and(|exhaustive| exhaustive) {
|
||||
// setting the page to 0 will force the search to be exhaustive when computing the number of hits,
|
||||
// but it will skip the bucket sort saving time.
|
||||
Some(0)
|
||||
|
@ -169,8 +169,7 @@ pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
|
||||
)
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.map_or(false, |s| s.to_lowercase() == "true"))
|
||||
.transpose()?.is_some_and(|s| s.to_lowercase() == "true"))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
|
@ -32,7 +32,6 @@ pub const FEDERATION_REMOTE: &str = "remote";
|
||||
#[derive(Debug, Default, Clone, PartialEq, Serialize, deserr::Deserr, ToSchema)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
||||
pub struct FederationOptions {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchWeight>)]
|
||||
#[schema(value_type = f64)]
|
||||
|
@ -1544,7 +1544,7 @@ pub fn perform_facet_search(
|
||||
let locales = localized_attributes_locales.map(|attr| {
|
||||
attr.locales
|
||||
.into_iter()
|
||||
.filter(|locale| locales.as_ref().map_or(true, |locales| locales.contains(locale)))
|
||||
.filter(|locale| locales.as_ref().is_none_or(|locales| locales.contains(locale)))
|
||||
.collect()
|
||||
});
|
||||
|
||||
|
@ -259,7 +259,7 @@ impl<'a> Index<'a, Owned> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Index<'a, Shared> {
|
||||
impl Index<'_, Shared> {
|
||||
/// You cannot modify the content of a shared index, thus the delete_document_by_filter call
|
||||
/// must fail. If the task successfully enqueue itself, we'll wait for the task to finishes,
|
||||
/// and if it succeed the function will panic.
|
||||
|
@ -271,7 +271,7 @@ fn fetch_matching_values_in_object(
|
||||
}
|
||||
|
||||
fn starts_with(selector: &str, key: &str) -> bool {
|
||||
selector.strip_prefix(key).map_or(false, |tail| {
|
||||
selector.strip_prefix(key).is_some_and(|tail| {
|
||||
tail.chars().next().map(|c| c == PRIMARY_KEY_SPLIT_SYMBOL).unwrap_or(true)
|
||||
})
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ impl<'a, W> DocumentVisitor<'a, W> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
|
||||
impl<'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'_, W> {
|
||||
/// This Visitor value is nothing, since it write the value to a file.
|
||||
type Value = Result<(), Error>;
|
||||
|
||||
@ -61,7 +61,7 @@ impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'a, W>
|
||||
impl<'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'_, W>
|
||||
where
|
||||
W: Write,
|
||||
{
|
||||
|
@ -25,7 +25,7 @@ impl ExternalDocumentsIds {
|
||||
|
||||
/// Returns `true` if hard and soft external documents lists are empty.
|
||||
pub fn is_empty(&self, rtxn: &RoTxn<'_>) -> heed::Result<bool> {
|
||||
self.0.is_empty(rtxn).map_err(Into::into)
|
||||
self.0.is_empty(rtxn)
|
||||
}
|
||||
|
||||
pub fn get<A: AsRef<str>>(
|
||||
|
@ -119,7 +119,7 @@ impl<'indexing> GlobalFieldsIdsMap<'indexing> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'indexing> MutFieldIdMapper for GlobalFieldsIdsMap<'indexing> {
|
||||
impl MutFieldIdMapper for GlobalFieldsIdsMap<'_> {
|
||||
fn insert(&mut self, name: &str) -> Option<FieldId> {
|
||||
self.id_or_insert(name)
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ pub fn relative_from_absolute_position(absolute: Position) -> (FieldId, Relative
|
||||
|
||||
// Compute the absolute word position with the field id of the attribute and relative position in the attribute.
|
||||
pub fn absolute_from_relative_position(field_id: FieldId, relative: RelativePosition) -> Position {
|
||||
(field_id as u32) << 16 | (relative as u32)
|
||||
((field_id as u32) << 16) | (relative as u32)
|
||||
}
|
||||
// TODO: this is wrong, but will do for now
|
||||
/// Compute the "bucketed" absolute position from the field id and relative position in the field.
|
||||
@ -372,7 +372,7 @@ pub fn is_faceted(field: &str, faceted_fields: impl IntoIterator<Item = impl AsR
|
||||
/// assert!(!is_faceted_by("animaux.chien", "animaux.chie"));
|
||||
/// ```
|
||||
pub fn is_faceted_by(field: &str, facet: &str) -> bool {
|
||||
field.starts_with(facet) && field[facet.len()..].chars().next().map_or(true, |c| c == '.')
|
||||
field.starts_with(facet) && field[facet.len()..].chars().next().is_none_or(|c| c == '.')
|
||||
}
|
||||
|
||||
pub fn normalize_facet(original: &str) -> String {
|
||||
|
@ -15,7 +15,7 @@ impl<'a, D: ObjectView, F: ArrayView> Context<'a, D, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
|
||||
impl<D: ObjectView, F: ArrayView> ObjectView for Context<'_, D, F> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -52,7 +52,7 @@ impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView, F: ArrayView> ValueView for Context<'a, D, F> {
|
||||
impl<D: ObjectView, F: ArrayView> ValueView for Context<'_, D, F> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ impl<'a> Document<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ObjectView for Document<'a> {
|
||||
impl ObjectView for Document<'_> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -98,7 +98,7 @@ impl<'a> ObjectView for Document<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ValueView for Document<'a> {
|
||||
impl ValueView for Document<'_> {
|
||||
fn as_debug(&self) -> &dyn Debug {
|
||||
self
|
||||
}
|
||||
@ -283,7 +283,7 @@ impl<'doc> ParseableArray<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ArrayView for ParseableArray<'doc> {
|
||||
impl ArrayView for ParseableArray<'_> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -311,7 +311,7 @@ impl<'doc> ArrayView for ParseableArray<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ValueView for ParseableArray<'doc> {
|
||||
impl ValueView for ParseableArray<'_> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -353,7 +353,7 @@ impl<'doc> ValueView for ParseableArray<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ObjectView for ParseableMap<'doc> {
|
||||
impl ObjectView for ParseableMap<'_> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -392,7 +392,7 @@ impl<'doc> ObjectView for ParseableMap<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ValueView for ParseableMap<'doc> {
|
||||
impl ValueView for ParseableMap<'_> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -441,7 +441,7 @@ impl<'doc> ValueView for ParseableMap<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ValueView for ParseableValue<'doc> {
|
||||
impl ValueView for ParseableValue<'_> {
|
||||
fn as_debug(&self) -> &dyn Debug {
|
||||
self
|
||||
}
|
||||
@ -622,7 +622,7 @@ struct ArraySource<'s, 'doc> {
|
||||
s: &'s RawVec<'doc>,
|
||||
}
|
||||
|
||||
impl<'s, 'doc> fmt::Display for ArraySource<'s, 'doc> {
|
||||
impl fmt::Display for ArraySource<'_, '_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "[")?;
|
||||
for item in self.s {
|
||||
@ -638,7 +638,7 @@ struct ArrayRender<'s, 'doc> {
|
||||
s: &'s RawVec<'doc>,
|
||||
}
|
||||
|
||||
impl<'s, 'doc> fmt::Display for ArrayRender<'s, 'doc> {
|
||||
impl fmt::Display for ArrayRender<'_, '_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for item in self.s {
|
||||
let v = ParseableValue::new(item, self.s.bump());
|
||||
|
@ -17,7 +17,7 @@ pub struct FieldValue<'a, D: ObjectView> {
|
||||
metadata: Metadata,
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView> ValueView for FieldValue<'a, D> {
|
||||
impl<D: ObjectView> ValueView for FieldValue<'_, D> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -78,7 +78,7 @@ impl<'a, D: ObjectView> FieldValue<'a, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView> ObjectView for FieldValue<'a, D> {
|
||||
impl<D: ObjectView> ObjectView for FieldValue<'_, D> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -148,7 +148,7 @@ impl<'a, 'map, D: ObjectView> BorrowedFields<'a, 'map, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
|
||||
impl<D: ObjectView> ArrayView for OwnedFields<'_, D> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self.0.as_value()
|
||||
}
|
||||
@ -170,7 +170,7 @@ impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
|
||||
impl<D: ObjectView> ArrayView for BorrowedFields<'_, '_, D> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -212,7 +212,7 @@ impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
|
||||
impl<D: ObjectView> ValueView for BorrowedFields<'_, '_, D> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -254,7 +254,7 @@ impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView> ValueView for OwnedFields<'a, D> {
|
||||
impl<D: ObjectView> ValueView for OwnedFields<'_, D> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -292,7 +292,7 @@ struct ArraySource<'a, 'map, D: ObjectView> {
|
||||
s: &'a BorrowedFields<'a, 'map, D>,
|
||||
}
|
||||
|
||||
impl<'a, 'map, D: ObjectView> fmt::Display for ArraySource<'a, 'map, D> {
|
||||
impl<D: ObjectView> fmt::Display for ArraySource<'_, '_, D> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "[")?;
|
||||
for item in self.s.values() {
|
||||
@ -307,7 +307,7 @@ struct ArrayRender<'a, 'map, D: ObjectView> {
|
||||
s: &'a BorrowedFields<'a, 'map, D>,
|
||||
}
|
||||
|
||||
impl<'a, 'map, D: ObjectView> fmt::Display for ArrayRender<'a, 'map, D> {
|
||||
impl<D: ObjectView> fmt::Display for ArrayRender<'_, '_, D> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for item in self.s.values() {
|
||||
write!(f, "{}", item.render())?;
|
||||
|
@ -358,7 +358,7 @@ impl<'a> FacetDistribution<'a> {
|
||||
) -> bool {
|
||||
// If the field is not filterable, we don't want to compute the facet distribution.
|
||||
if !matching_features(name, filterable_attributes_rules)
|
||||
.map_or(false, |(_, features)| features.is_filterable())
|
||||
.is_some_and(|(_, features)| features.is_filterable())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -383,8 +383,7 @@ impl<'a> FacetDistribution<'a> {
|
||||
if let Some(facets) = &self.facets {
|
||||
for field in facets.keys() {
|
||||
let matched_rule = matching_features(field, filterable_attributes_rules);
|
||||
let is_filterable =
|
||||
matched_rule.map_or(false, |(_, features)| features.is_filterable());
|
||||
let is_filterable = matched_rule.is_some_and(|(_, f)| f.is_filterable());
|
||||
|
||||
if !is_filterable {
|
||||
invalid_facets.insert(field.to_string());
|
||||
|
@ -79,7 +79,7 @@ struct FacetRangeSearch<'t, 'b, 'bitmap> {
|
||||
docids: &'bitmap mut RoaringBitmap,
|
||||
}
|
||||
|
||||
impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> {
|
||||
impl<'t> FacetRangeSearch<'t, '_, '_> {
|
||||
fn run_level_0(&mut self, starting_left_bound: &'t [u8], group_size: usize) -> Result<()> {
|
||||
let left_key =
|
||||
FacetGroupKey { field_id: self.field_id, level: 0, left_bound: starting_left_bound };
|
||||
|
@ -62,7 +62,7 @@ struct AscendingFacetSort<'t, 'e> {
|
||||
)>,
|
||||
}
|
||||
|
||||
impl<'t, 'e> Iterator for AscendingFacetSort<'t, 'e> {
|
||||
impl<'t> Iterator for AscendingFacetSort<'t, '_> {
|
||||
type Item = Result<(RoaringBitmap, &'t [u8])>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -66,15 +66,15 @@ enum FilterError<'a> {
|
||||
ParseGeoError(BadGeoError),
|
||||
TooDeep,
|
||||
}
|
||||
impl<'a> std::error::Error for FilterError<'a> {}
|
||||
impl std::error::Error for FilterError<'_> {}
|
||||
|
||||
impl<'a> From<BadGeoError> for FilterError<'a> {
|
||||
impl From<BadGeoError> for FilterError<'_> {
|
||||
fn from(geo_error: BadGeoError) -> Self {
|
||||
FilterError::ParseGeoError(geo_error)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for FilterError<'a> {
|
||||
impl Display for FilterError<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::AttributeNotFilterable { attribute, filterable_patterns } => {
|
||||
@ -236,8 +236,7 @@ impl<'a> Filter<'a> {
|
||||
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
|
||||
for fid in self.condition.fids(MAX_FILTER_DEPTH) {
|
||||
let attribute = fid.value();
|
||||
if matching_features(attribute, &filterable_attributes_rules)
|
||||
.map_or(false, |(_, features)| features.is_filterable())
|
||||
if matching_features(attribute, &filterable_attributes_rules).is_some_and(|(_, features)| features.is_filterable())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
@ -461,7 +460,7 @@ impl<'a> Filter<'a> {
|
||||
filterable_attribute_rules: &[FilterableAttributesRule],
|
||||
universe: Option<&RoaringBitmap>,
|
||||
) -> Result<RoaringBitmap> {
|
||||
if universe.map_or(false, |u| u.is_empty()) {
|
||||
if universe.is_some_and(|u| u.is_empty()) {
|
||||
return Ok(RoaringBitmap::new());
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ impl<'a> SearchForFacetValues<'a> {
|
||||
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
|
||||
let matched_rule = matching_features(&self.facet, &filterable_attributes_rules);
|
||||
let is_facet_searchable =
|
||||
matched_rule.map_or(false, |(_, features)| features.is_facet_searchable());
|
||||
matched_rule.is_some_and(|(_, features)| features.is_facet_searchable());
|
||||
|
||||
if !is_facet_searchable {
|
||||
let matching_field_names =
|
||||
@ -135,7 +135,7 @@ impl<'a> SearchForFacetValues<'a> {
|
||||
|
||||
if authorize_typos && field_authorizes_typos {
|
||||
let exact_words_fst = self.search_query.index.exact_words(rtxn)?;
|
||||
if exact_words_fst.map_or(false, |fst| fst.contains(query)) {
|
||||
if exact_words_fst.is_some_and(|fst| fst.contains(query)) {
|
||||
if fst.contains(query) {
|
||||
self.fetch_original_facets_using_normalized(
|
||||
fid,
|
||||
|
@ -151,7 +151,7 @@ impl ScoreWithRatioResult {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Search<'a> {
|
||||
impl Search<'_> {
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search::hybrid")]
|
||||
pub fn execute_hybrid(&self, semantic_ratio: f32) -> Result<(SearchResult, Option<u32>)> {
|
||||
// TODO: find classier way to achieve that than to reset vector and query params
|
||||
|
@ -192,7 +192,7 @@ impl<'a> Search<'a> {
|
||||
// check if the distinct field is in the filterable fields
|
||||
let matched_rule = matching_features(distinct, &filterable_fields);
|
||||
let is_filterable =
|
||||
matched_rule.map_or(false, |(_, features)| features.is_filterable());
|
||||
matched_rule.is_some_and(|(_, features)| features.is_filterable());
|
||||
|
||||
if !is_filterable {
|
||||
// if not, remove the hidden fields from the filterable fields to generate the error message
|
||||
|
@ -537,7 +537,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
fid: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
// if the requested fid isn't in the restricted list, return None.
|
||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
||||
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
@ -558,7 +558,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
fid: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
// if the requested fid isn't in the restricted list, return None.
|
||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
||||
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
|
@ -71,8 +71,7 @@ pub fn find_best_match_interval(matches: &[Match], crop_size: usize) -> [&Match;
|
||||
let mut save_best_interval = |interval_first, interval_last| {
|
||||
let interval_score = get_interval_score(&matches[interval_first..=interval_last]);
|
||||
let is_interval_score_better = &best_interval
|
||||
.as_ref()
|
||||
.map_or(true, |MatchIntervalWithScore { score, .. }| interval_score > *score);
|
||||
.as_ref().is_none_or(|MatchIntervalWithScore { score, .. }| interval_score > *score);
|
||||
|
||||
if *is_interval_score_better {
|
||||
best_interval = Some(MatchIntervalWithScore {
|
||||
|
@ -123,7 +123,7 @@ pub struct Matcher<'t, 'tokenizer, 'b, 'lang> {
|
||||
matches: Option<(Vec<Token<'t>>, Vec<Match>)>,
|
||||
}
|
||||
|
||||
impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
|
||||
impl<'t> Matcher<'t, '_, '_, '_> {
|
||||
/// Iterates over tokens and save any of them that matches the query.
|
||||
fn compute_matches(&mut self) -> &mut Self {
|
||||
/// some words are counted as matches only if they are close together and in the good order,
|
||||
|
@ -327,7 +327,7 @@ impl QueryGraph {
|
||||
let mut peekable = term_with_frequency.into_iter().peekable();
|
||||
while let Some((idx, frequency)) = peekable.next() {
|
||||
term_weight.insert(idx, weight);
|
||||
if peekable.peek().map_or(false, |(_, f)| frequency != *f) {
|
||||
if peekable.peek().is_some_and(|(_, f)| frequency != *f) {
|
||||
weight += 1;
|
||||
}
|
||||
}
|
||||
|
@ -418,7 +418,7 @@ fn split_best_frequency(
|
||||
let right = ctx.word_interner.insert(right.to_owned());
|
||||
|
||||
if let Some(frequency) = ctx.get_db_word_pair_proximity_docids_len(None, left, right, 1)? {
|
||||
if best.map_or(true, |(old, _, _)| frequency > old) {
|
||||
if best.is_none_or(|(old, _, _)| frequency > old) {
|
||||
best = Some((frequency, left, right));
|
||||
}
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ pub fn number_of_typos_allowed<'ctx>(
|
||||
Ok(Box::new(move |word: &str| {
|
||||
if !authorize_typos
|
||||
|| word.len() < min_len_one_typo as usize
|
||||
|| exact_words.as_ref().map_or(false, |fst| fst.contains(word))
|
||||
|| exact_words.as_ref().is_some_and(|fst| fst.contains(word))
|
||||
{
|
||||
0
|
||||
} else if word.len() < min_len_two_typos as usize {
|
||||
|
@ -17,7 +17,7 @@ use crate::Result;
|
||||
pub struct PhraseDocIdsCache {
|
||||
pub cache: FxHashMap<Interned<Phrase>, RoaringBitmap>,
|
||||
}
|
||||
impl<'ctx> SearchContext<'ctx> {
|
||||
impl SearchContext<'_> {
|
||||
/// Get the document ids associated with the given phrase
|
||||
pub fn get_phrase_docids(&mut self, phrase: Interned<Phrase>) -> Result<&RoaringBitmap> {
|
||||
if self.phrase_docids.cache.contains_key(&phrase) {
|
||||
|
@ -263,7 +263,7 @@ impl SmallBitmapInternal {
|
||||
|
||||
pub fn contains(&self, x: u16) -> bool {
|
||||
let (set, x) = self.get_set_index(x);
|
||||
set & 0b1 << x != 0
|
||||
set & (0b1 << x) != 0
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, x: u16) {
|
||||
@ -381,7 +381,7 @@ pub enum SmallBitmapInternalIter<'b> {
|
||||
Tiny(u64),
|
||||
Small { cur: u64, next: &'b [u64], base: u16 },
|
||||
}
|
||||
impl<'b> Iterator for SmallBitmapInternalIter<'b> {
|
||||
impl Iterator for SmallBitmapInternalIter<'_> {
|
||||
type Item = u16;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -102,7 +102,7 @@ impl FacetsUpdateIncremental {
|
||||
.map_err(heed::Error::Encoding)?;
|
||||
|
||||
if facet_level_may_be_updated
|
||||
&& current_field_id.map_or(false, |fid| fid != key.field_id)
|
||||
&& current_field_id.is_some_and(|fid| fid != key.field_id)
|
||||
{
|
||||
// Only add or remove a level after making all the field modifications.
|
||||
self.inner.add_or_delete_level(wtxn, current_field_id.unwrap())?;
|
||||
@ -530,8 +530,8 @@ impl FacetsUpdateIncrementalInner {
|
||||
add_docids: Option<&RoaringBitmap>,
|
||||
del_docids: Option<&RoaringBitmap>,
|
||||
) -> Result<bool> {
|
||||
if add_docids.map_or(true, RoaringBitmap::is_empty)
|
||||
&& del_docids.map_or(true, RoaringBitmap::is_empty)
|
||||
if add_docids.is_none_or(RoaringBitmap::is_empty)
|
||||
&& del_docids.is_none_or(RoaringBitmap::is_empty)
|
||||
{
|
||||
return Ok(false);
|
||||
}
|
||||
@ -670,7 +670,7 @@ impl FacetsUpdateIncrementalInner {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> FacetGroupKey<&'a [u8]> {
|
||||
impl FacetGroupKey<&[u8]> {
|
||||
pub fn into_owned(self) -> FacetGroupKey<Vec<u8>> {
|
||||
FacetGroupKey {
|
||||
field_id: self.field_id,
|
||||
|
@ -159,12 +159,10 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
|
||||
let del_geo_support = settings_diff
|
||||
.old
|
||||
.geo_fields_ids
|
||||
.map_or(false, |(lat, lng)| field_id == lat || field_id == lng);
|
||||
.geo_fields_ids.is_some_and(|(lat, lng)| field_id == lat || field_id == lng);
|
||||
let add_geo_support = settings_diff
|
||||
.new
|
||||
.geo_fields_ids
|
||||
.map_or(false, |(lat, lng)| field_id == lat || field_id == lng);
|
||||
.geo_fields_ids.is_some_and(|(lat, lng)| field_id == lat || field_id == lng);
|
||||
let del_filterable_values =
|
||||
del_value.map(|value| extract_facet_values(&value, del_geo_support));
|
||||
let add_filterable_values =
|
||||
|
@ -69,7 +69,7 @@ pub fn extract_word_pair_proximity_docids<R: io::Read + io::Seek>(
|
||||
let document_id = u32::from_be_bytes(document_id_bytes);
|
||||
|
||||
// if we change document, we fill the sorter
|
||||
if current_document_id.map_or(false, |id| id != document_id) {
|
||||
if current_document_id.is_some_and(|id| id != document_id) {
|
||||
// FIXME: span inside of a hot loop might degrade performance and create big reports
|
||||
let span = tracing::trace_span!(target: "indexing::details", "document_into_sorter");
|
||||
let _entered = span.enter();
|
||||
@ -96,7 +96,7 @@ pub fn extract_word_pair_proximity_docids<R: io::Read + io::Seek>(
|
||||
if let Some(deletion) = KvReaderDelAdd::from_slice(value).get(DelAdd::Deletion) {
|
||||
for (position, word) in KvReaderU16::from_slice(deletion).iter() {
|
||||
// drain the proximity window until the head word is considered close to the word we are inserting.
|
||||
while del_word_positions.front().map_or(false, |(_w, p)| {
|
||||
while del_word_positions.front().is_some_and(|(_w, p)| {
|
||||
index_proximity(*p as u32, position as u32) >= MAX_DISTANCE
|
||||
}) {
|
||||
word_positions_into_word_pair_proximity(
|
||||
@ -129,7 +129,7 @@ pub fn extract_word_pair_proximity_docids<R: io::Read + io::Seek>(
|
||||
if let Some(addition) = KvReaderDelAdd::from_slice(value).get(DelAdd::Addition) {
|
||||
for (position, word) in KvReaderU16::from_slice(addition).iter() {
|
||||
// drain the proximity window until the head word is considered close to the word we are inserting.
|
||||
while add_word_positions.front().map_or(false, |(_w, p)| {
|
||||
while add_word_positions.front().is_some_and(|(_w, p)| {
|
||||
index_proximity(*p as u32, position as u32) >= MAX_DISTANCE
|
||||
}) {
|
||||
word_positions_into_word_pair_proximity(
|
||||
|
@ -46,7 +46,7 @@ pub fn extract_word_position_docids<R: io::Read + io::Seek>(
|
||||
.ok_or(SerializationError::Decoding { db_name: Some(DOCID_WORD_POSITIONS) })?;
|
||||
let document_id = DocumentId::from_be_bytes(document_id_bytes);
|
||||
|
||||
if current_document_id.map_or(false, |id| document_id != id) {
|
||||
if current_document_id.is_some_and(|id| document_id != id) {
|
||||
words_position_into_sorter(
|
||||
current_document_id.unwrap(),
|
||||
&mut key_buffer,
|
||||
|
@ -281,7 +281,7 @@ fn send_original_documents_data(
|
||||
};
|
||||
if !(remove_vectors.is_empty()
|
||||
&& manual_vectors.is_empty()
|
||||
&& embeddings.as_ref().map_or(true, |e| e.is_empty()))
|
||||
&& embeddings.as_ref().is_none_or(|e| e.is_empty()))
|
||||
{
|
||||
let _ = lmdb_writer_sx.send(Ok(TypedChunk::VectorPoints {
|
||||
remove_vectors,
|
||||
|
@ -515,9 +515,8 @@ where
|
||||
let was_quantized = settings_diff
|
||||
.old
|
||||
.embedding_configs
|
||||
.get(&embedder_name)
|
||||
.map_or(false, |conf| conf.2);
|
||||
let is_quantizing = embedder_config.map_or(false, |action| action.is_being_quantized);
|
||||
.get(&embedder_name).is_some_and(|conf| conf.2);
|
||||
let is_quantizing = embedder_config.is_some_and(|action| action.is_being_quantized);
|
||||
|
||||
pool.install(|| {
|
||||
let mut writer = ArroyWrapper::new(vector_arroy, embedder_index, was_quantized);
|
||||
|
@ -197,7 +197,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
// drop_and_reuse is called instead of .clear() to communicate to the compiler that field_buffer
|
||||
// does not keep references from the cursor between loop iterations
|
||||
let mut field_buffer_cache = drop_and_reuse(field_buffer);
|
||||
if self.indexer_settings.log_every_n.map_or(false, |len| documents_count % len == 0) {
|
||||
if self.indexer_settings.log_every_n.is_some_and(|len| documents_count % len == 0) {
|
||||
progress_callback(UpdateIndexingStep::RemapDocumentAddition {
|
||||
documents_seen: documents_count,
|
||||
});
|
||||
|
@ -55,7 +55,7 @@ impl ChunkAccumulator {
|
||||
match self
|
||||
.inner
|
||||
.iter()
|
||||
.position(|right| right.first().map_or(false, |right| chunk.mergeable_with(right)))
|
||||
.position(|right| right.first().is_some_and(|right| chunk.mergeable_with(right)))
|
||||
{
|
||||
Some(position) => {
|
||||
let v = self.inner.get_mut(position).unwrap();
|
||||
@ -667,8 +667,7 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
let binary_quantized = settings_diff
|
||||
.old
|
||||
.embedding_configs
|
||||
.get(&embedder_name)
|
||||
.map_or(false, |conf| conf.2);
|
||||
.get(&embedder_name).is_some_and(|conf| conf.2);
|
||||
// FIXME: allow customizing distance
|
||||
let writer = ArroyWrapper::new(index.vector_arroy, embedder_index, binary_quantized);
|
||||
|
||||
|
@ -56,13 +56,13 @@ where
|
||||
content: &'t KvReaderFieldId,
|
||||
}
|
||||
|
||||
impl<'t, Mapper: FieldIdMapper> Clone for DocumentFromDb<'t, Mapper> {
|
||||
impl<Mapper: FieldIdMapper> Clone for DocumentFromDb<'_, Mapper> {
|
||||
#[inline]
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
impl<'t, Mapper: FieldIdMapper> Copy for DocumentFromDb<'t, Mapper> {}
|
||||
impl<Mapper: FieldIdMapper> Copy for DocumentFromDb<'_, Mapper> {}
|
||||
|
||||
impl<'t, Mapper: FieldIdMapper> Document<'t> for DocumentFromDb<'t, Mapper> {
|
||||
fn iter_top_level_fields(&self) -> impl Iterator<Item = Result<(&'t str, &'t RawValue)>> {
|
||||
@ -154,7 +154,7 @@ impl<'a, 'doc> DocumentFromVersions<'a, 'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'doc> Document<'doc> for DocumentFromVersions<'a, 'doc> {
|
||||
impl<'doc> Document<'doc> for DocumentFromVersions<'_, 'doc> {
|
||||
fn iter_top_level_fields(&self) -> impl Iterator<Item = Result<(&'doc str, &'doc RawValue)>> {
|
||||
self.versions.iter_top_level_fields().map(Ok)
|
||||
}
|
||||
|
@ -121,7 +121,7 @@ impl<'extractor> BalancedCaches<'extractor> {
|
||||
}
|
||||
|
||||
pub fn insert_del_u32(&mut self, key: &[u8], n: u32) -> Result<()> {
|
||||
if self.max_memory.map_or(false, |mm| self.alloc.allocated_bytes() >= mm) {
|
||||
if self.max_memory.is_some_and(|mm| self.alloc.allocated_bytes() >= mm) {
|
||||
self.start_spilling()?;
|
||||
}
|
||||
|
||||
@ -138,7 +138,7 @@ impl<'extractor> BalancedCaches<'extractor> {
|
||||
}
|
||||
|
||||
pub fn insert_add_u32(&mut self, key: &[u8], n: u32) -> Result<()> {
|
||||
if self.max_memory.map_or(false, |mm| self.alloc.allocated_bytes() >= mm) {
|
||||
if self.max_memory.is_some_and(|mm| self.alloc.allocated_bytes() >= mm) {
|
||||
self.start_spilling()?;
|
||||
}
|
||||
|
||||
@ -623,7 +623,7 @@ pub struct FrozenDelAddBbbul<'bump, B> {
|
||||
pub add: Option<FrozenBbbul<'bump, B>>,
|
||||
}
|
||||
|
||||
impl<'bump, B> FrozenDelAddBbbul<'bump, B> {
|
||||
impl<B> FrozenDelAddBbbul<'_, B> {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.del.is_none() && self.add.is_none()
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ pub struct DocumentExtractorData {
|
||||
pub field_distribution_delta: HashMap<String, i64>,
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
|
||||
impl<'extractor> Extractor<'extractor> for DocumentsExtractor<'_, '_> {
|
||||
type Data = FullySend<RefCell<DocumentExtractorData>>;
|
||||
|
||||
fn init_data(&self, _extractor_alloc: &'extractor Bump) -> Result<Self::Data> {
|
||||
|
@ -37,7 +37,7 @@ pub struct FacetedExtractorData<'a, 'b> {
|
||||
is_geo_enabled: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'extractor> Extractor<'extractor> for FacetedExtractorData<'a, 'b> {
|
||||
impl<'extractor> Extractor<'extractor> for FacetedExtractorData<'_, '_> {
|
||||
type Data = RefCell<BalancedCaches<'extractor>>;
|
||||
|
||||
fn init_data(&self, extractor_alloc: &'extractor Bump) -> Result<Self::Data> {
|
||||
|
@ -92,7 +92,7 @@ pub struct FrozenGeoExtractorData<'extractor> {
|
||||
pub spilled_inserted: Option<BufReader<File>>,
|
||||
}
|
||||
|
||||
impl<'extractor> FrozenGeoExtractorData<'extractor> {
|
||||
impl FrozenGeoExtractorData<'_> {
|
||||
pub fn iter_and_clear_removed(
|
||||
&mut self,
|
||||
) -> io::Result<impl IntoIterator<Item = io::Result<ExtractedGeoPoint>> + '_> {
|
||||
@ -160,7 +160,7 @@ impl<'extractor> Extractor<'extractor> for GeoExtractor {
|
||||
|
||||
for change in changes {
|
||||
if data_ref.spilled_removed.is_none()
|
||||
&& max_memory.map_or(false, |mm| context.extractor_alloc.allocated_bytes() >= mm)
|
||||
&& max_memory.is_some_and(|mm| context.extractor_alloc.allocated_bytes() >= mm)
|
||||
{
|
||||
// We must spill as we allocated too much memory
|
||||
data_ref.spilled_removed = tempfile::tempfile().map(BufWriter::new).map(Some)?;
|
||||
|
@ -31,7 +31,7 @@ pub struct WordDocidsBalancedCaches<'extractor> {
|
||||
current_docid: Option<DocumentId>,
|
||||
}
|
||||
|
||||
unsafe impl<'extractor> MostlySend for WordDocidsBalancedCaches<'extractor> {}
|
||||
unsafe impl MostlySend for WordDocidsBalancedCaches<'_> {}
|
||||
|
||||
impl<'extractor> WordDocidsBalancedCaches<'extractor> {
|
||||
pub fn new_in(buckets: usize, max_memory: Option<usize>, alloc: &'extractor Bump) -> Self {
|
||||
@ -78,7 +78,7 @@ impl<'extractor> WordDocidsBalancedCaches<'extractor> {
|
||||
buffer.extend_from_slice(&position.to_be_bytes());
|
||||
self.word_position_docids.insert_add_u32(&buffer, docid)?;
|
||||
|
||||
if self.current_docid.map_or(false, |id| docid != id) {
|
||||
if self.current_docid.is_some_and(|id| docid != id) {
|
||||
self.flush_fid_word_count(&mut buffer)?;
|
||||
}
|
||||
|
||||
@ -123,7 +123,7 @@ impl<'extractor> WordDocidsBalancedCaches<'extractor> {
|
||||
buffer.extend_from_slice(&position.to_be_bytes());
|
||||
self.word_position_docids.insert_del_u32(&buffer, docid)?;
|
||||
|
||||
if self.current_docid.map_or(false, |id| docid != id) {
|
||||
if self.current_docid.is_some_and(|id| docid != id) {
|
||||
self.flush_fid_word_count(&mut buffer)?;
|
||||
}
|
||||
|
||||
@ -212,7 +212,7 @@ pub struct WordDocidsExtractorData<'a> {
|
||||
searchable_attributes: Option<Vec<&'a str>>,
|
||||
}
|
||||
|
||||
impl<'a, 'extractor> Extractor<'extractor> for WordDocidsExtractorData<'a> {
|
||||
impl<'extractor> Extractor<'extractor> for WordDocidsExtractorData<'_> {
|
||||
type Data = RefCell<Option<WordDocidsBalancedCaches<'extractor>>>;
|
||||
|
||||
fn init_data(&self, extractor_alloc: &'extractor Bump) -> Result<Self::Data> {
|
||||
|
@ -25,7 +25,7 @@ pub struct WordPairProximityDocidsExtractorData<'a> {
|
||||
buckets: usize,
|
||||
}
|
||||
|
||||
impl<'a, 'extractor> Extractor<'extractor> for WordPairProximityDocidsExtractorData<'a> {
|
||||
impl<'extractor> Extractor<'extractor> for WordPairProximityDocidsExtractorData<'_> {
|
||||
type Data = RefCell<BalancedCaches<'extractor>>;
|
||||
|
||||
fn init_data(&self, extractor_alloc: &'extractor Bump) -> Result<Self::Data> {
|
||||
@ -269,8 +269,7 @@ fn process_document_tokens<'doc>(
|
||||
}
|
||||
// drain the proximity window until the head word is considered close to the word we are inserting.
|
||||
while word_positions
|
||||
.front()
|
||||
.map_or(false, |(_w, p)| index_proximity(*p as u32, pos as u32) >= MAX_DISTANCE)
|
||||
.front().is_some_and(|(_w, p)| index_proximity(*p as u32, pos as u32) >= MAX_DISTANCE)
|
||||
{
|
||||
word_positions_into_word_pair_proximity(word_positions, word_pair_proximity);
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ pub struct DocumentTokenizer<'a> {
|
||||
pub max_positions_per_attributes: u32,
|
||||
}
|
||||
|
||||
impl<'a> DocumentTokenizer<'a> {
|
||||
impl DocumentTokenizer<'_> {
|
||||
pub fn tokenize_document<'doc>(
|
||||
&self,
|
||||
document: impl Document<'doc>,
|
||||
|
@ -43,7 +43,7 @@ pub struct EmbeddingExtractorData<'extractor>(
|
||||
|
||||
unsafe impl MostlySend for EmbeddingExtractorData<'_> {}
|
||||
|
||||
impl<'a, 'b, 'extractor> Extractor<'extractor> for EmbeddingExtractor<'a, 'b> {
|
||||
impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
type Data = RefCell<EmbeddingExtractorData<'extractor>>;
|
||||
|
||||
fn init_data<'doc>(&'doc self, extractor_alloc: &'extractor Bump) -> crate::Result<Self::Data> {
|
||||
|
@ -29,8 +29,8 @@ impl<'p, 'indexer, Mapper: MutFieldIdMapper> FieldAndDocidExtractor<'p, 'indexer
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, 'p, 'indexer: 'de, Mapper: MutFieldIdMapper> Visitor<'de>
|
||||
for FieldAndDocidExtractor<'p, 'indexer, Mapper>
|
||||
impl<'de, 'indexer: 'de, Mapper: MutFieldIdMapper> Visitor<'de>
|
||||
for FieldAndDocidExtractor<'_, 'indexer, Mapper>
|
||||
{
|
||||
type Value =
|
||||
Result<Result<DeOrBumpStr<'de, 'indexer>, DocumentIdExtractionError>, crate::UserError>;
|
||||
@ -98,7 +98,7 @@ struct NestedPrimaryKeyVisitor<'a, 'bump> {
|
||||
bump: &'bump Bump,
|
||||
}
|
||||
|
||||
impl<'de, 'a, 'bump: 'de> Visitor<'de> for NestedPrimaryKeyVisitor<'a, 'bump> {
|
||||
impl<'de, 'bump: 'de> Visitor<'de> for NestedPrimaryKeyVisitor<'_, 'bump> {
|
||||
type Value = std::result::Result<Option<DeOrBumpStr<'de, 'bump>>, DocumentIdExtractionError>;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
@ -237,7 +237,7 @@ impl<'de, 'a, Mapper: MutFieldIdMapper> Visitor<'de> for MutFieldIdMapVisitor<'a
|
||||
|
||||
pub struct FieldIdMapVisitor<'a, Mapper: FieldIdMapper>(pub &'a Mapper);
|
||||
|
||||
impl<'de, 'a, Mapper: FieldIdMapper> Visitor<'de> for FieldIdMapVisitor<'a, Mapper> {
|
||||
impl<'de, Mapper: FieldIdMapper> Visitor<'de> for FieldIdMapVisitor<'_, Mapper> {
|
||||
type Value = Option<FieldId>;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
|
@ -150,15 +150,12 @@ pub struct IndexingContext<
|
||||
}
|
||||
|
||||
impl<
|
||||
'fid, // invariant lifetime of fields ids map
|
||||
'indexer, // covariant lifetime of objects that are borrowed during the entire indexing operation
|
||||
'index, // covariant lifetime of the index
|
||||
MSP,
|
||||
> Copy
|
||||
for IndexingContext<
|
||||
'fid, // invariant lifetime of fields ids map
|
||||
'indexer, // covariant lifetime of objects that are borrowed during the entire indexing operation
|
||||
'index, // covariant lifetime of the index
|
||||
'_, // invariant lifetime of fields ids map
|
||||
'_, // covariant lifetime of objects that are borrowed during the entire indexing operation
|
||||
'_, // covariant lifetime of the index
|
||||
MSP,
|
||||
>
|
||||
where
|
||||
@ -167,15 +164,12 @@ where
|
||||
}
|
||||
|
||||
impl<
|
||||
'fid, // invariant lifetime of fields ids map
|
||||
'indexer, // covariant lifetime of objects that are borrowed during the entire indexing operation
|
||||
'index, // covariant lifetime of the index
|
||||
MSP,
|
||||
> Clone
|
||||
for IndexingContext<
|
||||
'fid, // invariant lifetime of fields ids map
|
||||
'indexer, // covariant lifetime of objects that are borrowed during the entire indexing operation
|
||||
'index, // covariant lifetime of the index
|
||||
'_, // invariant lifetime of fields ids map
|
||||
'_, // covariant lifetime of objects that are borrowed during the entire indexing operation
|
||||
'_, // covariant lifetime of the index
|
||||
MSP,
|
||||
>
|
||||
where
|
||||
|
@ -110,7 +110,7 @@ mod test {
|
||||
>,
|
||||
}
|
||||
|
||||
unsafe impl<'extractor> MostlySend for DeletionWithData<'extractor> {}
|
||||
unsafe impl MostlySend for DeletionWithData<'_> {}
|
||||
|
||||
struct TrackDeletion<'extractor>(PhantomData<&'extractor ()>);
|
||||
|
||||
|
@ -210,14 +210,11 @@ fn extract_addition_payload_changes<'r, 'pl: 'r>(
|
||||
primary_key.as_ref().unwrap()
|
||||
};
|
||||
|
||||
let external_id = match retrieved_primary_key.extract_fields_and_docid(
|
||||
let external_id = retrieved_primary_key.extract_fields_and_docid(
|
||||
doc,
|
||||
new_fields_ids_map,
|
||||
indexer,
|
||||
) {
|
||||
Ok(edi) => edi,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
)?;
|
||||
|
||||
let external_id = external_id.to_de();
|
||||
let current_offset = iter.byte_offset();
|
||||
@ -580,12 +577,12 @@ impl<'pl> PayloadOperations<'pl> {
|
||||
}
|
||||
}
|
||||
Some(InnerDocOp::Deletion) => {
|
||||
return if self.is_new {
|
||||
if self.is_new {
|
||||
Ok(None)
|
||||
} else {
|
||||
let deletion = Deletion::create(self.docid, external_doc);
|
||||
Ok(Some(DocumentChange::Deletion(deletion)))
|
||||
};
|
||||
}
|
||||
}
|
||||
None => unreachable!("We must not have an empty set of operations on a document"),
|
||||
}
|
||||
|
@ -149,7 +149,7 @@ impl<'a, 'rtxn> FrozenPrefixBitmaps<'a, 'rtxn> {
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<'a, 'rtxn> Sync for FrozenPrefixBitmaps<'a, 'rtxn> {}
|
||||
unsafe impl Sync for FrozenPrefixBitmaps<'_, '_> {}
|
||||
|
||||
struct WordPrefixIntegerDocids {
|
||||
database: Database<Bytes, CboRoaringBitmapCodec>,
|
||||
@ -302,7 +302,7 @@ impl<'a, 'rtxn> FrozenPrefixIntegerBitmaps<'a, 'rtxn> {
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<'a, 'rtxn> Sync for FrozenPrefixIntegerBitmaps<'a, 'rtxn> {}
|
||||
unsafe impl Sync for FrozenPrefixIntegerBitmaps<'_, '_> {}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::prefix")]
|
||||
fn delete_prefixes(
|
||||
|
@ -559,8 +559,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
let fst = fst::Set::from_iter(stop_words.into_iter())?;
|
||||
|
||||
// Does the new FST differ from the previous one?
|
||||
if current
|
||||
.map_or(true, |current| current.as_fst().as_bytes() != fst.as_fst().as_bytes())
|
||||
if current.is_none_or(|current| current.as_fst().as_bytes() != fst.as_fst().as_bytes())
|
||||
{
|
||||
// we want to re-create our FST.
|
||||
self.index.put_stop_words(self.wtxn, &fst)?;
|
||||
@ -580,7 +579,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
let current = self.index.non_separator_tokens(self.wtxn)?;
|
||||
|
||||
// Does the new list differ from the previous one?
|
||||
if current.map_or(true, |current| ¤t != non_separator_tokens) {
|
||||
if current.is_none_or(|current| ¤t != non_separator_tokens) {
|
||||
self.index.put_non_separator_tokens(self.wtxn, non_separator_tokens)?;
|
||||
true
|
||||
} else {
|
||||
@ -605,7 +604,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
let current = self.index.separator_tokens(self.wtxn)?;
|
||||
|
||||
// Does the new list differ from the previous one?
|
||||
if current.map_or(true, |current| ¤t != separator_tokens) {
|
||||
if current.is_none_or(|current| ¤t != separator_tokens) {
|
||||
self.index.put_separator_tokens(self.wtxn, separator_tokens)?;
|
||||
true
|
||||
} else {
|
||||
@ -630,7 +629,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
let current = self.index.dictionary(self.wtxn)?;
|
||||
|
||||
// Does the new list differ from the previous one?
|
||||
if current.map_or(true, |current| ¤t != dictionary) {
|
||||
if current.is_none_or(|current| ¤t != dictionary) {
|
||||
self.index.put_dictionary(self.wtxn, dictionary)?;
|
||||
true
|
||||
} else {
|
||||
@ -1340,7 +1339,7 @@ impl InnerIndexSettingsDiff {
|
||||
new_settings.embedding_configs.inner_as_ref()
|
||||
{
|
||||
let was_quantized =
|
||||
old_settings.embedding_configs.get(embedder_name).map_or(false, |conf| conf.2);
|
||||
old_settings.embedding_configs.get(embedder_name).is_some_and(|conf| conf.2);
|
||||
// skip embedders that don't use document templates
|
||||
if !config.uses_document_template() {
|
||||
continue;
|
||||
|
@ -311,7 +311,7 @@ fn last_named_object<'a>(
|
||||
last_named_object
|
||||
}
|
||||
|
||||
impl<'a> std::fmt::Display for LastNamedObject<'a> {
|
||||
impl std::fmt::Display for LastNamedObject<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
LastNamedObject::Object { name } => write!(f, "`{name}`"),
|
||||
|
@ -59,7 +59,7 @@ impl ArroyWrapper {
|
||||
&'a self,
|
||||
rtxn: &'a RoTxn<'a>,
|
||||
db: arroy::Database<D>,
|
||||
) -> impl Iterator<Item = Result<arroy::Reader<D>, arroy::Error>> + 'a {
|
||||
) -> impl Iterator<Item = Result<arroy::Reader<'a, D>, arroy::Error>> + 'a {
|
||||
arroy_db_range_for_embedder(self.embedder_index).map_while(move |index| {
|
||||
match arroy::Reader::open(rtxn, index, db) {
|
||||
Ok(reader) => match reader.is_empty(rtxn) {
|
||||
|
@ -242,11 +242,11 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
|
||||
id = contains_key_rec(opt1, "opt2").then(|| document.id.clone());
|
||||
}
|
||||
} else if matches!(filter, "opt1 IS NULL" | "NOT opt1 IS NOT NULL") {
|
||||
id = document.opt1.as_ref().map_or(false, |v| v.is_null()).then(|| document.id.clone());
|
||||
id = document.opt1.as_ref().is_some_and(|v| v.is_null()).then(|| document.id.clone());
|
||||
} else if matches!(filter, "NOT opt1 IS NULL" | "opt1 IS NOT NULL") {
|
||||
id = document.opt1.as_ref().map_or(true, |v| !v.is_null()).then(|| document.id.clone());
|
||||
id = document.opt1.as_ref().is_none_or(|v| !v.is_null()).then(|| document.id.clone());
|
||||
} else if matches!(filter, "opt1.opt2 IS NULL") {
|
||||
if document.opt1opt2.as_ref().map_or(false, |v| v.is_null()) {
|
||||
if document.opt1opt2.as_ref().is_some_and(|v| v.is_null()) {
|
||||
id = Some(document.id.clone());
|
||||
} else if let Some(opt1) = &document.opt1 {
|
||||
if !opt1.is_null() {
|
||||
@ -254,15 +254,14 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
|
||||
}
|
||||
}
|
||||
} else if matches!(filter, "opt1 IS EMPTY" | "NOT opt1 IS NOT EMPTY") {
|
||||
id = document.opt1.as_ref().map_or(false, is_empty_value).then(|| document.id.clone());
|
||||
id = document.opt1.as_ref().is_some_and(is_empty_value).then(|| document.id.clone());
|
||||
} else if matches!(filter, "NOT opt1 IS EMPTY" | "opt1 IS NOT EMPTY") {
|
||||
id = document
|
||||
.opt1
|
||||
.as_ref()
|
||||
.map_or(true, |v| !is_empty_value(v))
|
||||
.as_ref().is_none_or(|v| !is_empty_value(v))
|
||||
.then(|| document.id.clone());
|
||||
} else if matches!(filter, "opt1.opt2 IS EMPTY") {
|
||||
if document.opt1opt2.as_ref().map_or(false, is_empty_value) {
|
||||
if document.opt1opt2.as_ref().is_some_and(is_empty_value) {
|
||||
id = Some(document.id.clone());
|
||||
}
|
||||
} else if matches!(
|
||||
|
@ -66,7 +66,7 @@ use tracing_error::ExtractSpanTrace as _;
|
||||
use tracing_subscriber::layer::SubscriberExt as _;
|
||||
use tracing_trace::processor;
|
||||
|
||||
fn on_panic(info: &std::panic::PanicInfo) {
|
||||
fn on_panic(info: &std::panic::PanicHookInfo) {
|
||||
let info = info.to_string();
|
||||
let trace = SpanTrace::capture();
|
||||
tracing::error!(%info, %trace);
|
||||
|
@ -282,7 +282,7 @@ struct SpanMarker<'a> {
|
||||
memory_delta: Option<MemoryStats>,
|
||||
}
|
||||
|
||||
impl<'a> ProfilerMarker for SpanMarker<'a> {
|
||||
impl ProfilerMarker for SpanMarker<'_> {
|
||||
const MARKER_TYPE_NAME: &'static str = "span";
|
||||
|
||||
fn schema() -> MarkerSchema {
|
||||
@ -369,7 +369,7 @@ struct EventMarker<'a> {
|
||||
memory_delta: Option<MemoryStats>,
|
||||
}
|
||||
|
||||
impl<'a> ProfilerMarker for EventMarker<'a> {
|
||||
impl ProfilerMarker for EventMarker<'_> {
|
||||
const MARKER_TYPE_NAME: &'static str = "tracing-event";
|
||||
|
||||
fn schema() -> MarkerSchema {
|
||||
|
Loading…
x
Reference in New Issue
Block a user