From 2ce025a9069243a74a0a704aafa803aa875e1ad8 Mon Sep 17 00:00:00 2001 From: Ewan Higgs Date: Tue, 25 Oct 2022 20:58:31 +0200 Subject: [PATCH] Fixes after rebase to fix new issues. --- milli/src/index.rs | 2 +- milli/src/search/criteria/mod.rs | 18 +++++++++--------- milli/src/update/facets.rs | 4 ++-- .../update/prefix_word_pairs/prefix_word.rs | 7 +++---- .../update/prefix_word_pairs/word_prefix.rs | 16 ++++++---------- 5 files changed, 21 insertions(+), 26 deletions(-) diff --git a/milli/src/index.rs b/milli/src/index.rs index 0601ae7b7..4144728f1 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -200,7 +200,7 @@ impl Index { pub fn new>(options: heed::EnvOpenOptions, path: P) -> Result { let now = OffsetDateTime::now_utc(); - Self::new_with_creation_dates(options, path, now.clone(), now) + Self::new_with_creation_dates(options, path, now, now) } fn set_creation_dates( diff --git a/milli/src/search/criteria/mod.rs b/milli/src/search/criteria/mod.rs index 7d59bb3c0..1b46c8441 100644 --- a/milli/src/search/criteria/mod.rs +++ b/milli/src/search/criteria/mod.rs @@ -562,11 +562,11 @@ fn query_pair_proximity_docids( )? { Some(docids) => Ok(docids), None => { - let r_words = word_derivations(&right, true, 0, ctx.words_fst(), wdcache)?; + let r_words = word_derivations(right, true, 0, ctx.words_fst(), wdcache)?; all_word_pair_overall_proximity_docids( ctx, &[(left, 0)], - &r_words, + r_words, proximity, ) } @@ -592,11 +592,11 @@ fn query_pair_proximity_docids( Some(docids) => Ok(docids), None => { let r_words = - word_derivations(&right, true, 0, ctx.words_fst(), wdcache)?; + word_derivations(right, true, 0, ctx.words_fst(), wdcache)?; all_word_pair_overall_proximity_docids( ctx, &[(left, 0)], - &r_words, + r_words, proximity, ) } @@ -609,17 +609,17 @@ fn query_pair_proximity_docids( } } (QueryKind::Exact { word: left, .. }, QueryKind::Tolerant { typo, word: right }) => { - let r_words = word_derivations(&right, prefix, *typo, ctx.words_fst(), wdcache)?; - all_word_pair_overall_proximity_docids(ctx, &[(left, 0)], &r_words, proximity) + let r_words = word_derivations(right, prefix, *typo, ctx.words_fst(), wdcache)?; + all_word_pair_overall_proximity_docids(ctx, &[(left, 0)], r_words, proximity) } ( QueryKind::Tolerant { typo: l_typo, word: left }, QueryKind::Tolerant { typo: r_typo, word: right }, ) => { let l_words = - word_derivations(&left, false, *l_typo, ctx.words_fst(), wdcache)?.to_owned(); - let r_words = word_derivations(&right, prefix, *r_typo, ctx.words_fst(), wdcache)?; - all_word_pair_overall_proximity_docids(ctx, &l_words, &r_words, proximity) + word_derivations(left, false, *l_typo, ctx.words_fst(), wdcache)?.to_owned(); + let r_words = word_derivations(right, prefix, *r_typo, ctx.words_fst(), wdcache)?; + all_word_pair_overall_proximity_docids(ctx, &l_words, r_words, proximity) } } } diff --git a/milli/src/update/facets.rs b/milli/src/update/facets.rs index ae2a6d7fd..50b34a714 100644 --- a/milli/src/update/facets.rs +++ b/milli/src/update/facets.rs @@ -332,8 +332,8 @@ fn compute_facet_number_levels( /// 1. a vector of grenad::Reader. The reader at index `i` corresponds to the elements of level `i + 1` /// that must be inserted into the database. /// 2. a roaring bitmap of all the document ids present in the database -fn compute_facet_strings_levels<'t>( - rtxn: &'t heed::RoTxn, +fn compute_facet_strings_levels( + rtxn: &heed::RoTxn, db: heed::Database, compression_type: CompressionType, compression_level: Option, diff --git a/milli/src/update/prefix_word_pairs/prefix_word.rs b/milli/src/update/prefix_word_pairs/prefix_word.rs index 26fe0105e..952e02558 100644 --- a/milli/src/update/prefix_word_pairs/prefix_word.rs +++ b/milli/src/update/prefix_word_pairs/prefix_word.rs @@ -30,9 +30,8 @@ pub fn index_prefix_word_database( debug!("Computing and writing the word prefix pair proximity docids into LMDB on disk..."); let common_prefixes: Vec<_> = common_prefix_fst_words - .into_iter() - .map(|s| s.into_iter()) - .flatten() + .iter() + .flat_map(|s| s.iter()) .map(|s| s.as_str()) .filter(|s| s.len() <= max_prefix_length) .collect(); @@ -73,7 +72,7 @@ pub fn index_prefix_word_database( // Now we do the same thing with the new prefixes and all word pairs in the DB let new_prefixes: Vec<_> = new_prefix_fst_words - .into_iter() + .iter() .map(|s| s.as_str()) .filter(|s| s.len() <= max_prefix_length) .collect(); diff --git a/milli/src/update/prefix_word_pairs/word_prefix.rs b/milli/src/update/prefix_word_pairs/word_prefix.rs index 5895cdc46..53e421fac 100644 --- a/milli/src/update/prefix_word_pairs/word_prefix.rs +++ b/milli/src/update/prefix_word_pairs/word_prefix.rs @@ -195,9 +195,8 @@ pub fn index_word_prefix_database( // Make a prefix trie from the common prefixes that are shorter than self.max_prefix_length let prefixes = PrefixTrieNode::from_sorted_prefixes( common_prefix_fst_words - .into_iter() - .map(|s| s.into_iter()) - .flatten() + .iter() + .flat_map(|s| s.iter()) .map(|s| s.as_str()) .filter(|s| s.len() <= max_prefix_length), ); @@ -237,10 +236,7 @@ pub fn index_word_prefix_database( // Now we do the same thing with the new prefixes and all word pairs in the DB let prefixes = PrefixTrieNode::from_sorted_prefixes( - new_prefix_fst_words - .into_iter() - .map(|s| s.as_str()) - .filter(|s| s.len() <= max_prefix_length), + new_prefix_fst_words.iter().map(|s| s.as_str()).filter(|s| s.len() <= max_prefix_length), ); if !prefixes.is_empty() { @@ -366,7 +362,7 @@ fn execute_on_word_pairs_and_prefixes( &mut prefix_buffer, &prefix_search_start, |prefix_buffer| { - batch.insert(&prefix_buffer, data.to_vec()); + batch.insert(prefix_buffer, data.to_vec()); }, ); } @@ -484,7 +480,7 @@ impl PrefixTrieNode { fn set_search_start(&self, word: &[u8], search_start: &mut PrefixTrieNodeSearchStart) -> bool { let byte = word[0]; if self.children[search_start.0].1 == byte { - return true; + true } else { match self.children[search_start.0..].binary_search_by_key(&byte, |x| x.1) { Ok(position) => { @@ -502,7 +498,7 @@ impl PrefixTrieNode { fn from_sorted_prefixes<'a>(prefixes: impl Iterator) -> Self { let mut node = PrefixTrieNode::default(); for prefix in prefixes { - node.insert_sorted_prefix(prefix.as_bytes().into_iter()); + node.insert_sorted_prefix(prefix.as_bytes().iter()); } node }