diff --git a/milli/src/search/criteria/mod.rs b/milli/src/search/criteria/mod.rs index 3673aef78..77d92f6ea 100644 --- a/milli/src/search/criteria/mod.rs +++ b/milli/src/search/criteria/mod.rs @@ -14,6 +14,7 @@ pub trait Criterion { } /// The result of a call to the parent criterion. +#[derive(Debug, Clone, PartialEq)] pub struct CriterionResult { /// The query tree that must be used by the children criterion to fetch candidates. pub query_tree: Option, @@ -188,3 +189,139 @@ fn query_pair_proximity_docids(ctx: &dyn Context, left: &Query, right: &Query, p }, } } + +#[cfg(test)] +pub mod test { + use maplit::hashmap; + use rand::{Rng, SeedableRng, rngs::StdRng}; + + use super::*; + use std::collections::HashMap; + + fn s(s: &str) -> String { s.to_string() } + pub struct TestContext<'t> { + words_fst: fst::Set>, + word_docids: HashMap, + word_prefix_docids: HashMap, + word_pair_proximity_docids: HashMap<(String, String, i32), RoaringBitmap>, + word_prefix_pair_proximity_docids: HashMap<(String, String, i32), RoaringBitmap>, + } + + impl<'a> Context for TestContext<'a> { + fn word_docids(&self, word: &str) -> heed::Result> { + Ok(self.word_docids.get(&word.to_string()).cloned()) + } + + fn word_prefix_docids(&self, word: &str) -> heed::Result> { + Ok(self.word_prefix_docids.get(&word.to_string()).cloned()) + } + + fn word_pair_proximity_docids(&self, left: &str, right: &str, proximity: u8) -> heed::Result> { + let key = (left.to_string(), right.to_string(), proximity.into()); + Ok(self.word_pair_proximity_docids.get(&key).cloned()) + } + + fn word_prefix_pair_proximity_docids(&self, left: &str, right: &str, proximity: u8) -> heed::Result> { + let key = (left.to_string(), right.to_string(), proximity.into()); + Ok(self.word_prefix_pair_proximity_docids.get(&key).cloned()) + } + + fn words_fst<'t>(&self) -> &'t fst::Set> { + &self.words_fst + } + + fn in_prefix_cache(&self, word: &str) -> bool { + self.word_prefix_docids.contains_key(&word.to_string()) + } + } + + impl<'a> Default for TestContext<'a> { + fn default() -> TestContext<'a> { + let mut rng = StdRng::seed_from_u64(102); + let rng = &mut rng; + + fn random_postings(rng: &mut R, len: usize) -> RoaringBitmap { + let mut values = Vec::::with_capacity(len); + while values.len() != len { + values.push(rng.gen()); + } + values.sort_unstable(); + + RoaringBitmap::from_sorted_iter(values.into_iter()) + } + + let word_docids = hashmap!{ + s("hello") => random_postings(rng, 1500), + s("hi") => random_postings(rng, 4000), + s("word") => random_postings(rng, 2500), + s("split") => random_postings(rng, 400), + s("ngrams") => random_postings(rng, 1400), + s("world") => random_postings(rng, 15_000), + s("earth") => random_postings(rng, 8000), + s("2021") => random_postings(rng, 100), + s("2020") => random_postings(rng, 500), + s("is") => random_postings(rng, 50_000), + s("this") => random_postings(rng, 50_000), + s("good") => random_postings(rng, 1250), + s("morning") => random_postings(rng, 125), + }; + + let word_prefix_docids = hashmap!{ + s("h") => &word_docids[&s("hello")] | &word_docids[&s("hi")], + s("wor") => &word_docids[&s("word")] | &word_docids[&s("world")], + s("20") => &word_docids[&s("2020")] | &word_docids[&s("2021")], + }; + + let hello_world = &word_docids[&s("hello")] & &word_docids[&s("world")]; + let hello_world_split = (hello_world.len() / 2) as usize; + let hello_world_1 = hello_world.iter().take(hello_world_split).collect(); + let hello_world_2 = hello_world.iter().skip(hello_world_split).collect(); + + let hello_word = &word_docids[&s("hello")] & &word_docids[&s("word")]; + let hello_word_split = (hello_word.len() / 2) as usize; + let hello_word_4 = hello_word.iter().take(hello_word_split).collect(); + let hello_word_6 = hello_word.iter().skip(hello_word_split).take(hello_word_split/2).collect(); + let hello_word_7 = hello_word.iter().skip(hello_word_split + hello_word_split/2).collect(); + let word_pair_proximity_docids = hashmap!{ + (s("good"), s("morning"), 1) => &word_docids[&s("good")] & &word_docids[&s("morning")], + (s("hello"), s("world"), 1) => hello_world_1, + (s("hello"), s("world"), 4) => hello_world_2, + (s("this"), s("is"), 1) => &word_docids[&s("this")] & &word_docids[&s("is")], + (s("is"), s("2021"), 1) => &word_docids[&s("this")] & &word_docids[&s("is")] & &word_docids[&s("2021")], + (s("is"), s("2020"), 1) => &word_docids[&s("this")] & &word_docids[&s("is")] & (&word_docids[&s("2020")] - &word_docids[&s("2021")]), + (s("this"), s("2021"), 2) => &word_docids[&s("this")] & &word_docids[&s("is")] & &word_docids[&s("2021")], + (s("this"), s("2020"), 2) => &word_docids[&s("this")] & &word_docids[&s("is")] & (&word_docids[&s("2020")] - &word_docids[&s("2021")]), + (s("word"), s("split"), 1) => &word_docids[&s("word")] & &word_docids[&s("split")], + (s("world"), s("split"), 1) => (&word_docids[&s("world")] & &word_docids[&s("split")]) - &word_docids[&s("word")], + (s("hello"), s("word"), 4) => hello_word_4, + (s("hello"), s("word"), 6) => hello_word_6, + (s("hello"), s("word"), 7) => hello_word_7, + (s("split"), s("ngrams"), 3) => (&word_docids[&s("split")] & &word_docids[&s("ngrams")]) - &word_docids[&s("word")], + (s("split"), s("ngrams"), 5) => &word_docids[&s("split")] & &word_docids[&s("ngrams")] & &word_docids[&s("word")], + (s("this"), s("ngrams"), 1) => (&word_docids[&s("split")] & &word_docids[&s("this")] & &word_docids[&s("ngrams")] ) - &word_docids[&s("word")], + (s("this"), s("ngrams"), 2) => &word_docids[&s("split")] & &word_docids[&s("this")] & &word_docids[&s("ngrams")] & &word_docids[&s("word")], + }; + + let word_prefix_pair_proximity_docids = hashmap!{ + (s("hello"), s("wor"), 1) => word_pair_proximity_docids.get(&(s("hello"), s("world"), 1)).unwrap().clone(), + (s("hello"), s("wor"), 4) => word_pair_proximity_docids.get(&(s("hello"), s("world"), 4)).unwrap() | word_pair_proximity_docids.get(&(s("hello"), s("word"), 4)).unwrap(), + (s("hello"), s("wor"), 6) => word_pair_proximity_docids.get(&(s("hello"), s("word"), 6)).unwrap().clone(), + (s("hello"), s("wor"), 7) => word_pair_proximity_docids.get(&(s("hello"), s("word"), 7)).unwrap().clone(), + (s("is"), s("20"), 1) => word_pair_proximity_docids.get(&(s("is"), s("2020"), 1)).unwrap() | word_pair_proximity_docids.get(&(s("is"), s("2021"), 1)).unwrap(), + (s("this"), s("20"), 2) => word_pair_proximity_docids.get(&(s("this"), s("2020"), 2)).unwrap() | word_pair_proximity_docids.get(&(s("this"), s("2021"), 2)).unwrap(), + }; + + let mut keys = word_docids.keys().collect::>(); + keys.sort_unstable(); + let words_fst = fst::Set::from_iter(keys).unwrap().map_data(|v| Cow::Owned(v)).unwrap(); + + TestContext { + words_fst, + word_docids, + word_prefix_docids, + word_pair_proximity_docids, + word_prefix_pair_proximity_docids, + } + } + } +} diff --git a/milli/src/search/criteria/typo.rs b/milli/src/search/criteria/typo.rs index 75f3f5666..d9a5f8aa6 100644 --- a/milli/src/search/criteria/typo.rs +++ b/milli/src/search/criteria/typo.rs @@ -328,3 +328,153 @@ fn resolve_candidates<'t>( resolve_operation(ctx, query_tree, number_typos, cache) } + +#[cfg(test)] +mod test { + + use super::*; + use super::super::test::TestContext; + + #[test] + fn initial_placeholder_no_facets() { + let context = TestContext::default(); + let query_tree = None; + let facet_candidates = None; + + let mut criteria = Typo::initial(&context, query_tree, facet_candidates).unwrap(); + + assert!(criteria.next().unwrap().is_none()); + } + + #[test] + fn initial_query_tree_no_facets() { + let context = TestContext::default(); + let query_tree = Operation::Or(false, vec![ + Operation::And(vec![ + Operation::Query(Query { prefix: false, kind: QueryKind::exact("split".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("this".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::tolerant(1, "world".to_string()) }), + ]) + ]); + + let facet_candidates = None; + + let mut criteria = Typo::initial(&context, Some(query_tree), facet_candidates).unwrap(); + + let candidates_1 = context.word_docids("split").unwrap().unwrap() + & context.word_docids("this").unwrap().unwrap() + & context.word_docids("world").unwrap().unwrap(); + let expected_1 = CriterionResult { + query_tree: Some(Operation::Or(false, vec![ + Operation::And(vec![ + Operation::Query(Query { prefix: false, kind: QueryKind::exact("split".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("this".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("world".to_string()) }), + ]), + ])), + candidates: candidates_1.clone(), + bucket_candidates: Some(candidates_1), + }; + + assert_eq!(criteria.next().unwrap(), Some(expected_1)); + + let candidates_2 = ( + context.word_docids("split").unwrap().unwrap() + & context.word_docids("this").unwrap().unwrap() + & context.word_docids("word").unwrap().unwrap() + ) - context.word_docids("world").unwrap().unwrap(); + let expected_2 = CriterionResult { + query_tree: Some(Operation::Or(false, vec![ + Operation::And(vec![ + Operation::Query(Query { prefix: false, kind: QueryKind::exact("split".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("this".to_string()) }), + Operation::Or(false, vec![ + Operation::Query(Query { prefix: false, kind: QueryKind::exact_with_typo(1, "word".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("world".to_string()) }), + ]), + ]), + ])), + candidates: candidates_2.clone(), + bucket_candidates: Some(candidates_2), + }; + + assert_eq!(criteria.next().unwrap(), Some(expected_2)); + } + + #[test] + fn initial_placeholder_with_facets() { + let context = TestContext::default(); + let query_tree = None; + let facet_candidates = context.word_docids("earth").unwrap(); + + let mut criteria = Typo::initial(&context, query_tree, facet_candidates.clone()).unwrap(); + + let expected = CriterionResult { + query_tree: None, + candidates: facet_candidates.clone().unwrap(), + bucket_candidates: facet_candidates, + }; + + // first iteration, returns the facet candidates + assert_eq!(criteria.next().unwrap(), Some(expected)); + + // second iteration, returns None because there is no more things to do + assert!(criteria.next().unwrap().is_none()); + } + + #[test] + fn initial_query_tree_with_facets() { + let context = TestContext::default(); + let query_tree = Operation::Or(false, vec![ + Operation::And(vec![ + Operation::Query(Query { prefix: false, kind: QueryKind::exact("split".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("this".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::tolerant(1, "world".to_string()) }), + ]) + ]); + + let facet_candidates = context.word_docids("earth").unwrap().unwrap(); + + let mut criteria = Typo::initial(&context, Some(query_tree), Some(facet_candidates.clone())).unwrap(); + + let candidates_1 = context.word_docids("split").unwrap().unwrap() + & context.word_docids("this").unwrap().unwrap() + & context.word_docids("world").unwrap().unwrap(); + let expected_1 = CriterionResult { + query_tree: Some(Operation::Or(false, vec![ + Operation::And(vec![ + Operation::Query(Query { prefix: false, kind: QueryKind::exact("split".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("this".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("world".to_string()) }), + ]), + ])), + candidates: &candidates_1 & &facet_candidates, + bucket_candidates: Some(candidates_1 & &facet_candidates), + }; + + assert_eq!(criteria.next().unwrap(), Some(expected_1)); + + let candidates_2 = ( + context.word_docids("split").unwrap().unwrap() + & context.word_docids("this").unwrap().unwrap() + & context.word_docids("word").unwrap().unwrap() + ) - context.word_docids("world").unwrap().unwrap(); + let expected_2 = CriterionResult { + query_tree: Some(Operation::Or(false, vec![ + Operation::And(vec![ + Operation::Query(Query { prefix: false, kind: QueryKind::exact("split".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("this".to_string()) }), + Operation::Or(false, vec![ + Operation::Query(Query { prefix: false, kind: QueryKind::exact_with_typo(1, "word".to_string()) }), + Operation::Query(Query { prefix: false, kind: QueryKind::exact("world".to_string()) }), + ]), + ]), + ])), + candidates: &candidates_2 & &facet_candidates, + bucket_candidates: Some(candidates_2 & &facet_candidates), + }; + + assert_eq!(criteria.next().unwrap(), Some(expected_2)); + } + +} diff --git a/milli/src/search/query_tree.rs b/milli/src/search/query_tree.rs index 02f6dc0c8..715a4864e 100644 --- a/milli/src/search/query_tree.rs +++ b/milli/src/search/query_tree.rs @@ -94,11 +94,15 @@ pub enum QueryKind { } impl QueryKind { - fn exact(word: String) -> Self { + pub fn exact(word: String) -> Self { QueryKind::Exact { original_typo: 0, word } } - fn tolerant(typo: u8, word: String) -> Self { + pub fn exact_with_typo(original_typo: u8, word: String) -> Self { + QueryKind::Exact { original_typo, word } + } + + pub fn tolerant(typo: u8, word: String) -> Self { QueryKind::Tolerant { typo, word } }