mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 04:17:10 +02:00
Make Cargo and Clippy happy
This commit is contained in:
parent
249da5846c
commit
4d90e3d2ec
72 changed files with 145 additions and 165 deletions
|
@ -358,7 +358,7 @@ impl<'a> FacetDistribution<'a> {
|
|||
) -> bool {
|
||||
// If the field is not filterable, we don't want to compute the facet distribution.
|
||||
if !matching_features(name, filterable_attributes_rules)
|
||||
.map_or(false, |(_, features)| features.is_filterable())
|
||||
.is_some_and(|(_, features)| features.is_filterable())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -383,8 +383,7 @@ impl<'a> FacetDistribution<'a> {
|
|||
if let Some(facets) = &self.facets {
|
||||
for field in facets.keys() {
|
||||
let matched_rule = matching_features(field, filterable_attributes_rules);
|
||||
let is_filterable =
|
||||
matched_rule.map_or(false, |(_, features)| features.is_filterable());
|
||||
let is_filterable = matched_rule.is_some_and(|(_, f)| f.is_filterable());
|
||||
|
||||
if !is_filterable {
|
||||
invalid_facets.insert(field.to_string());
|
||||
|
|
|
@ -79,7 +79,7 @@ struct FacetRangeSearch<'t, 'b, 'bitmap> {
|
|||
docids: &'bitmap mut RoaringBitmap,
|
||||
}
|
||||
|
||||
impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> {
|
||||
impl<'t> FacetRangeSearch<'t, '_, '_> {
|
||||
fn run_level_0(&mut self, starting_left_bound: &'t [u8], group_size: usize) -> Result<()> {
|
||||
let left_key =
|
||||
FacetGroupKey { field_id: self.field_id, level: 0, left_bound: starting_left_bound };
|
||||
|
|
|
@ -62,7 +62,7 @@ struct AscendingFacetSort<'t, 'e> {
|
|||
)>,
|
||||
}
|
||||
|
||||
impl<'t, 'e> Iterator for AscendingFacetSort<'t, 'e> {
|
||||
impl<'t> Iterator for AscendingFacetSort<'t, '_> {
|
||||
type Item = Result<(RoaringBitmap, &'t [u8])>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -66,15 +66,15 @@ enum FilterError<'a> {
|
|||
ParseGeoError(BadGeoError),
|
||||
TooDeep,
|
||||
}
|
||||
impl<'a> std::error::Error for FilterError<'a> {}
|
||||
impl std::error::Error for FilterError<'_> {}
|
||||
|
||||
impl<'a> From<BadGeoError> for FilterError<'a> {
|
||||
impl From<BadGeoError> for FilterError<'_> {
|
||||
fn from(geo_error: BadGeoError) -> Self {
|
||||
FilterError::ParseGeoError(geo_error)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for FilterError<'a> {
|
||||
impl Display for FilterError<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::AttributeNotFilterable { attribute, filterable_patterns } => {
|
||||
|
@ -236,8 +236,7 @@ impl<'a> Filter<'a> {
|
|||
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
|
||||
for fid in self.condition.fids(MAX_FILTER_DEPTH) {
|
||||
let attribute = fid.value();
|
||||
if matching_features(attribute, &filterable_attributes_rules)
|
||||
.map_or(false, |(_, features)| features.is_filterable())
|
||||
if matching_features(attribute, &filterable_attributes_rules).is_some_and(|(_, features)| features.is_filterable())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
@ -461,7 +460,7 @@ impl<'a> Filter<'a> {
|
|||
filterable_attribute_rules: &[FilterableAttributesRule],
|
||||
universe: Option<&RoaringBitmap>,
|
||||
) -> Result<RoaringBitmap> {
|
||||
if universe.map_or(false, |u| u.is_empty()) {
|
||||
if universe.is_some_and(|u| u.is_empty()) {
|
||||
return Ok(RoaringBitmap::new());
|
||||
}
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ impl<'a> SearchForFacetValues<'a> {
|
|||
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
|
||||
let matched_rule = matching_features(&self.facet, &filterable_attributes_rules);
|
||||
let is_facet_searchable =
|
||||
matched_rule.map_or(false, |(_, features)| features.is_facet_searchable());
|
||||
matched_rule.is_some_and(|(_, features)| features.is_facet_searchable());
|
||||
|
||||
if !is_facet_searchable {
|
||||
let matching_field_names =
|
||||
|
@ -135,7 +135,7 @@ impl<'a> SearchForFacetValues<'a> {
|
|||
|
||||
if authorize_typos && field_authorizes_typos {
|
||||
let exact_words_fst = self.search_query.index.exact_words(rtxn)?;
|
||||
if exact_words_fst.map_or(false, |fst| fst.contains(query)) {
|
||||
if exact_words_fst.is_some_and(|fst| fst.contains(query)) {
|
||||
if fst.contains(query) {
|
||||
self.fetch_original_facets_using_normalized(
|
||||
fid,
|
||||
|
|
|
@ -151,7 +151,7 @@ impl ScoreWithRatioResult {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Search<'a> {
|
||||
impl Search<'_> {
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search::hybrid")]
|
||||
pub fn execute_hybrid(&self, semantic_ratio: f32) -> Result<(SearchResult, Option<u32>)> {
|
||||
// TODO: find classier way to achieve that than to reset vector and query params
|
||||
|
|
|
@ -192,7 +192,7 @@ impl<'a> Search<'a> {
|
|||
// check if the distinct field is in the filterable fields
|
||||
let matched_rule = matching_features(distinct, &filterable_fields);
|
||||
let is_filterable =
|
||||
matched_rule.map_or(false, |(_, features)| features.is_filterable());
|
||||
matched_rule.is_some_and(|(_, features)| features.is_filterable());
|
||||
|
||||
if !is_filterable {
|
||||
// if not, remove the hidden fields from the filterable fields to generate the error message
|
||||
|
|
|
@ -537,7 +537,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||
fid: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
// if the requested fid isn't in the restricted list, return None.
|
||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
||||
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
|
@ -558,7 +558,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||
fid: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
// if the requested fid isn't in the restricted list, return None.
|
||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
||||
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
|
|
|
@ -71,8 +71,7 @@ pub fn find_best_match_interval(matches: &[Match], crop_size: usize) -> [&Match;
|
|||
let mut save_best_interval = |interval_first, interval_last| {
|
||||
let interval_score = get_interval_score(&matches[interval_first..=interval_last]);
|
||||
let is_interval_score_better = &best_interval
|
||||
.as_ref()
|
||||
.map_or(true, |MatchIntervalWithScore { score, .. }| interval_score > *score);
|
||||
.as_ref().is_none_or(|MatchIntervalWithScore { score, .. }| interval_score > *score);
|
||||
|
||||
if *is_interval_score_better {
|
||||
best_interval = Some(MatchIntervalWithScore {
|
||||
|
|
|
@ -123,7 +123,7 @@ pub struct Matcher<'t, 'tokenizer, 'b, 'lang> {
|
|||
matches: Option<(Vec<Token<'t>>, Vec<Match>)>,
|
||||
}
|
||||
|
||||
impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
|
||||
impl<'t> Matcher<'t, '_, '_, '_> {
|
||||
/// Iterates over tokens and save any of them that matches the query.
|
||||
fn compute_matches(&mut self) -> &mut Self {
|
||||
/// some words are counted as matches only if they are close together and in the good order,
|
||||
|
|
|
@ -327,7 +327,7 @@ impl QueryGraph {
|
|||
let mut peekable = term_with_frequency.into_iter().peekable();
|
||||
while let Some((idx, frequency)) = peekable.next() {
|
||||
term_weight.insert(idx, weight);
|
||||
if peekable.peek().map_or(false, |(_, f)| frequency != *f) {
|
||||
if peekable.peek().is_some_and(|(_, f)| frequency != *f) {
|
||||
weight += 1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -418,7 +418,7 @@ fn split_best_frequency(
|
|||
let right = ctx.word_interner.insert(right.to_owned());
|
||||
|
||||
if let Some(frequency) = ctx.get_db_word_pair_proximity_docids_len(None, left, right, 1)? {
|
||||
if best.map_or(true, |(old, _, _)| frequency > old) {
|
||||
if best.is_none_or(|(old, _, _)| frequency > old) {
|
||||
best = Some((frequency, left, right));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -203,7 +203,7 @@ pub fn number_of_typos_allowed<'ctx>(
|
|||
Ok(Box::new(move |word: &str| {
|
||||
if !authorize_typos
|
||||
|| word.len() < min_len_one_typo as usize
|
||||
|| exact_words.as_ref().map_or(false, |fst| fst.contains(word))
|
||||
|| exact_words.as_ref().is_some_and(|fst| fst.contains(word))
|
||||
{
|
||||
0
|
||||
} else if word.len() < min_len_two_typos as usize {
|
||||
|
|
|
@ -17,7 +17,7 @@ use crate::Result;
|
|||
pub struct PhraseDocIdsCache {
|
||||
pub cache: FxHashMap<Interned<Phrase>, RoaringBitmap>,
|
||||
}
|
||||
impl<'ctx> SearchContext<'ctx> {
|
||||
impl SearchContext<'_> {
|
||||
/// Get the document ids associated with the given phrase
|
||||
pub fn get_phrase_docids(&mut self, phrase: Interned<Phrase>) -> Result<&RoaringBitmap> {
|
||||
if self.phrase_docids.cache.contains_key(&phrase) {
|
||||
|
|
|
@ -263,7 +263,7 @@ impl SmallBitmapInternal {
|
|||
|
||||
pub fn contains(&self, x: u16) -> bool {
|
||||
let (set, x) = self.get_set_index(x);
|
||||
set & 0b1 << x != 0
|
||||
set & (0b1 << x) != 0
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, x: u16) {
|
||||
|
@ -381,7 +381,7 @@ pub enum SmallBitmapInternalIter<'b> {
|
|||
Tiny(u64),
|
||||
Small { cur: u64, next: &'b [u64], base: u16 },
|
||||
}
|
||||
impl<'b> Iterator for SmallBitmapInternalIter<'b> {
|
||||
impl Iterator for SmallBitmapInternalIter<'_> {
|
||||
type Item = u16;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue