Make clippy happy

This commit is contained in:
Loïc Lecrenier 2023-03-23 09:15:57 +01:00
parent 9b1f439a91
commit 56b7209f26
14 changed files with 61 additions and 73 deletions

View File

@ -22,8 +22,8 @@ pub struct DistinctOutput {
/// is considered unique.
/// - `excluded`: the set of document ids that contain a value for the given field that occurs
/// in the given candidates.
pub fn apply_distinct_rule<'ctx>(
ctx: &mut SearchContext<'ctx>,
pub fn apply_distinct_rule(
ctx: &mut SearchContext,
field_id: u16,
candidates: &RoaringBitmap,
// TODO: add a universe here, such that the `excluded` are a subset of the universe?

View File

@ -102,10 +102,10 @@ impl SearchLogger<QueryGraph> for DetailedSearchLogger {
self.ranking_rules_ids = Some(rr.iter().map(|rr| rr.id()).collect());
}
fn start_iteration_ranking_rule<'transaction>(
fn start_iteration_ranking_rule(
&mut self,
ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<'transaction, QueryGraph>,
_ranking_rule: &dyn RankingRule<QueryGraph>,
query: &QueryGraph,
universe: &RoaringBitmap,
) {
@ -117,10 +117,10 @@ impl SearchLogger<QueryGraph> for DetailedSearchLogger {
})
}
fn next_bucket_ranking_rule<'transaction>(
fn next_bucket_ranking_rule(
&mut self,
ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<'transaction, QueryGraph>,
_ranking_rule: &dyn RankingRule<QueryGraph>,
universe: &RoaringBitmap,
candidates: &RoaringBitmap,
) {
@ -131,10 +131,10 @@ impl SearchLogger<QueryGraph> for DetailedSearchLogger {
time: Instant::now(),
})
}
fn skip_bucket_ranking_rule<'transaction>(
fn skip_bucket_ranking_rule(
&mut self,
ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<'transaction, QueryGraph>,
_ranking_rule: &dyn RankingRule<QueryGraph>,
candidates: &RoaringBitmap,
) {
self.events.push(SearchEvents::RankingRuleSkipBucket {
@ -144,10 +144,10 @@ impl SearchLogger<QueryGraph> for DetailedSearchLogger {
})
}
fn end_iteration_ranking_rule<'transaction>(
fn end_iteration_ranking_rule(
&mut self,
ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<'transaction, QueryGraph>,
_ranking_rule: &dyn RankingRule<QueryGraph>,
universe: &RoaringBitmap,
) {
self.events.push(SearchEvents::RankingRuleEndIteration {
@ -427,7 +427,7 @@ results.{cur_ranking_rule}{cur_activated_id} {{
ctx: &mut SearchContext,
node_idx: Interned<QueryNode>,
node: &QueryNode,
distances: &[u16],
_distances: &[u16],
file: &mut File,
) {
match &node.data {

View File

@ -25,33 +25,33 @@ pub trait SearchLogger<Q: RankingRuleQueryTrait> {
fn ranking_rules(&mut self, rr: &[Box<dyn RankingRule<Q>>]);
/// Logs the start of a ranking rule's iteration.
fn start_iteration_ranking_rule<'transaction>(
fn start_iteration_ranking_rule(
&mut self,
ranking_rule_idx: usize,
ranking_rule: &dyn RankingRule<'transaction, Q>,
ranking_rule: &dyn RankingRule<Q>,
query: &Q,
universe: &RoaringBitmap,
);
/// Logs the end of the computation of a ranking rule bucket
fn next_bucket_ranking_rule<'transaction>(
fn next_bucket_ranking_rule(
&mut self,
ranking_rule_idx: usize,
ranking_rule: &dyn RankingRule<'transaction, Q>,
ranking_rule: &dyn RankingRule<Q>,
universe: &RoaringBitmap,
candidates: &RoaringBitmap,
);
/// Logs the skipping of a ranking rule bucket
fn skip_bucket_ranking_rule<'transaction>(
fn skip_bucket_ranking_rule(
&mut self,
ranking_rule_idx: usize,
ranking_rule: &dyn RankingRule<'transaction, Q>,
ranking_rule: &dyn RankingRule<Q>,
candidates: &RoaringBitmap,
);
/// Logs the end of a ranking rule's iteration.
fn end_iteration_ranking_rule<'transaction>(
fn end_iteration_ranking_rule(
&mut self,
ranking_rule_idx: usize,
ranking_rule: &dyn RankingRule<'transaction, Q>,
ranking_rule: &dyn RankingRule<Q>,
universe: &RoaringBitmap,
);
/// Logs the addition of document ids to the final results
@ -95,35 +95,35 @@ impl<Q: RankingRuleQueryTrait> SearchLogger<Q> for DefaultSearchLogger {
fn ranking_rules(&mut self, _rr: &[Box<dyn RankingRule<Q>>]) {}
fn start_iteration_ranking_rule<'transaction>(
fn start_iteration_ranking_rule(
&mut self,
_ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<'transaction, Q>,
_ranking_rule: &dyn RankingRule<Q>,
_query: &Q,
_universe: &RoaringBitmap,
) {
}
fn next_bucket_ranking_rule<'transaction>(
fn next_bucket_ranking_rule(
&mut self,
_ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<'transaction, Q>,
_ranking_rule: &dyn RankingRule<Q>,
_universe: &RoaringBitmap,
_candidates: &RoaringBitmap,
) {
}
fn skip_bucket_ranking_rule<'transaction>(
fn skip_bucket_ranking_rule(
&mut self,
_ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<'transaction, Q>,
_ranking_rule: &dyn RankingRule<Q>,
_candidates: &RoaringBitmap,
) {
}
fn end_iteration_ranking_rule<'transaction>(
fn end_iteration_ranking_rule(
&mut self,
_ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<'transaction, Q>,
_ranking_rule: &dyn RankingRule<Q>,
_universe: &RoaringBitmap,
) {
}

View File

@ -61,8 +61,8 @@ impl<'ctx> SearchContext<'ctx> {
/// Apply the [`TermsMatchingStrategy`] to the query graph and resolve it.
#[allow(clippy::too_many_arguments)]
fn resolve_maximally_reduced_query_graph<'ctx>(
ctx: &mut SearchContext<'ctx>,
fn resolve_maximally_reduced_query_graph(
ctx: &mut SearchContext,
universe: &RoaringBitmap,
query_graph: &QueryGraph,
matching_strategy: TermsMatchingStrategy,
@ -75,7 +75,7 @@ fn resolve_maximally_reduced_query_graph<'ctx>(
for (_, n) in query_graph.nodes.iter() {
match &n.data {
QueryNodeData::Term(term) => {
all_positions.extend(term.positions.clone().into_iter());
all_positions.extend(term.positions.clone());
}
QueryNodeData::Deleted | QueryNodeData::Start | QueryNodeData::End => {}
}
@ -222,8 +222,8 @@ fn get_ranking_rules_for_query_graph_search<'ctx>(
}
#[allow(clippy::too_many_arguments)]
pub fn execute_search<'ctx>(
ctx: &mut SearchContext<'ctx>,
pub fn execute_search(
ctx: &mut SearchContext,
query: &str,
terms_matching_strategy: TermsMatchingStrategy,
filters: Option<Filter>,

View File

@ -425,8 +425,8 @@ impl LocatedQueryTerm {
}
/// Convert the tokenised search query into a list of located query terms.
pub fn located_query_terms_from_string<'ctx>(
ctx: &mut SearchContext<'ctx>,
pub fn located_query_terms_from_string(
ctx: &mut SearchContext,
query: NormalizedTokenIter<Vec<u8>>,
words_limit: Option<usize>,
) -> Result<Vec<LocatedQueryTerm>> {

View File

@ -43,9 +43,9 @@ impl<G: RankingRuleGraphTrait> ConditionDocIdsCache<G> {
///
/// If the cache does not yet contain these docids, they are computed
/// and inserted in the cache.
pub fn get_condition_docids<'s, 'ctx>(
pub fn get_condition_docids<'s>(
&'s mut self,
ctx: &mut SearchContext<'ctx>,
ctx: &mut SearchContext,
interned_condition: Interned<G::Condition>,
graph: &mut RankingRuleGraph<G>,
universe: &RoaringBitmap,

View File

@ -77,22 +77,19 @@ pub trait RankingRuleGraphTrait: Sized {
/// Return the label of the given edge condition, to be used when visualising
/// the ranking rule graph.
fn label_for_condition<'ctx>(
ctx: &mut SearchContext<'ctx>,
condition: &Self::Condition,
) -> Result<String>;
fn label_for_condition(ctx: &mut SearchContext, condition: &Self::Condition) -> Result<String>;
/// Compute the document ids associated with the given edge condition,
/// restricted to the given universe.
fn resolve_condition<'ctx>(
ctx: &mut SearchContext<'ctx>,
fn resolve_condition(
ctx: &mut SearchContext,
condition: &Self::Condition,
universe: &RoaringBitmap,
) -> Result<(RoaringBitmap, FxHashSet<Interned<String>>, FxHashSet<Interned<Phrase>>)>;
/// Return the costs and conditions of the edges going from the source node to the destination node
fn build_edges<'ctx>(
ctx: &mut SearchContext<'ctx>,
fn build_edges(
ctx: &mut SearchContext,
conditions_interner: &mut DedupInterner<Self::Condition>,
source_node: &QueryNode,
dest_node: &QueryNode,

View File

@ -7,8 +7,8 @@ use crate::search::new::query_term::LocatedQueryTerm;
use crate::search::new::{QueryNode, SearchContext};
use crate::Result;
pub fn build_edges<'ctx>(
_ctx: &mut SearchContext<'ctx>,
pub fn build_edges(
_ctx: &mut SearchContext,
conditions_interner: &mut DedupInterner<ProximityCondition>,
from_node: &QueryNode,
to_node: &QueryNode,

View File

@ -13,8 +13,8 @@ use fxhash::FxHashSet;
use heed::RoTxn;
use roaring::RoaringBitmap;
pub fn compute_docids<'ctx>(
ctx: &mut SearchContext<'ctx>,
pub fn compute_docids(
ctx: &mut SearchContext,
condition: &ProximityCondition,
universe: &RoaringBitmap,
) -> Result<(RoaringBitmap, FxHashSet<Interned<String>>, FxHashSet<Interned<Phrase>>)> {

View File

@ -29,8 +29,8 @@ pub enum ProximityGraph {}
impl RankingRuleGraphTrait for ProximityGraph {
type Condition = ProximityCondition;
fn resolve_condition<'ctx>(
ctx: &mut SearchContext<'ctx>,
fn resolve_condition(
ctx: &mut SearchContext,
condition: &Self::Condition,
universe: &RoaringBitmap,
) -> Result<(roaring::RoaringBitmap, FxHashSet<Interned<String>>, FxHashSet<Interned<Phrase>>)>
@ -38,8 +38,8 @@ impl RankingRuleGraphTrait for ProximityGraph {
compute_docids::compute_docids(ctx, condition, universe)
}
fn build_edges<'ctx>(
ctx: &mut SearchContext<'ctx>,
fn build_edges(
ctx: &mut SearchContext,
conditions_interner: &mut DedupInterner<Self::Condition>,
source_node: &QueryNode,
dest_node: &QueryNode,
@ -59,10 +59,7 @@ impl RankingRuleGraphTrait for ProximityGraph {
logger.log_proximity_state(graph, paths, dead_ends_cache, universe, distances, cost);
}
fn label_for_condition<'ctx>(
ctx: &mut SearchContext<'ctx>,
condition: &Self::Condition,
) -> Result<String> {
fn label_for_condition(ctx: &mut SearchContext, condition: &Self::Condition) -> Result<String> {
match condition {
ProximityCondition::Uninit { cost, .. } => {
// TODO

View File

@ -23,8 +23,8 @@ pub enum TypoGraph {}
impl RankingRuleGraphTrait for TypoGraph {
type Condition = TypoCondition;
fn resolve_condition<'db_cache, 'ctx>(
ctx: &mut SearchContext<'ctx>,
fn resolve_condition<'db_cache>(
ctx: &mut SearchContext,
condition: &Self::Condition,
universe: &RoaringBitmap,
) -> Result<(RoaringBitmap, FxHashSet<Interned<String>>, FxHashSet<Interned<Phrase>>)> {
@ -57,8 +57,8 @@ impl RankingRuleGraphTrait for TypoGraph {
))
}
fn build_edges<'ctx>(
ctx: &mut SearchContext<'ctx>,
fn build_edges(
ctx: &mut SearchContext,
conditions_interner: &mut DedupInterner<Self::Condition>,
_from_node: &QueryNode,
to_node: &QueryNode,
@ -152,10 +152,7 @@ impl RankingRuleGraphTrait for TypoGraph {
logger.log_typo_state(graph, paths, dead_ends_cache, universe, distances, cost);
}
fn label_for_condition<'ctx>(
ctx: &mut SearchContext<'ctx>,
condition: &Self::Condition,
) -> Result<String> {
fn label_for_condition(ctx: &mut SearchContext, condition: &Self::Condition) -> Result<String> {
let TypoCondition { term } = condition;
let term = ctx.term_interner.get(*term);
let QueryTerm {

View File

@ -87,8 +87,8 @@ impl QueryTermDocIdsCache {
}
}
pub fn resolve_query_graph<'ctx>(
ctx: &mut SearchContext<'ctx>,
pub fn resolve_query_graph(
ctx: &mut SearchContext,
q: &QueryGraph,
universe: &RoaringBitmap,
) -> Result<RoaringBitmap> {

View File

@ -47,7 +47,7 @@ impl<'ctx> RankingRule<'ctx, QueryGraph> for Words {
for (_, n) in parent_query_graph.nodes.iter() {
match &n.data {
QueryNodeData::Term(term) => {
all_positions.extend(term.positions.clone().into_iter());
all_positions.extend(term.positions.clone());
}
QueryNodeData::Deleted | QueryNodeData::Start | QueryNodeData::End => {}
}

View File

@ -591,8 +591,7 @@ fn create_matching_words(
}
// create a CONSECUTIVE matchings words wrapping all word in the phrase
PrimitiveQueryPart::Phrase(words) => {
let ids: Vec<_> =
(0..words.len()).into_iter().map(|i| id + i as PrimitiveWordId).collect();
let ids: Vec<_> = (0..words.len()).map(|i| id + i as PrimitiveWordId).collect();
// Require that all words of the phrase have a corresponding MatchingWord
// before adding any of them to the matching_words result
if let Some(phrase_matching_words) = words
@ -649,10 +648,8 @@ fn create_matching_words(
}
})
.collect();
let ids: Vec<_> = (0..words.len())
.into_iter()
.map(|i| id + i as PrimitiveWordId)
.collect();
let ids: Vec<_> =
(0..words.len()).map(|i| id + i as PrimitiveWordId).collect();
if let Some(synonyms) = ctx.synonyms(&words)? {
for synonym in synonyms {