mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-29 16:24:26 +01:00
Merge Phrase and WordDerivations into one structure
This commit is contained in:
parent
3004e281d7
commit
31628c5cd4
@ -6,7 +6,7 @@ use std::time::Instant;
|
|||||||
use rand::random;
|
use rand::random;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
|
||||||
use crate::search::new::query_term::{LocatedQueryTerm, QueryTerm, WordDerivations};
|
use crate::search::new::query_term::{LocatedQueryTerm, QueryTerm};
|
||||||
use crate::search::new::ranking_rule_graph::{
|
use crate::search::new::ranking_rule_graph::{
|
||||||
Edge, EdgeCondition, EmptyPathsCache, ProximityGraph, RankingRuleGraph, RankingRuleGraphTrait,
|
Edge, EdgeCondition, EmptyPathsCache, ProximityGraph, RankingRuleGraph, RankingRuleGraphTrait,
|
||||||
TypoGraph,
|
TypoGraph,
|
||||||
@ -432,70 +432,70 @@ results.{random} {{
|
|||||||
file: &mut File,
|
file: &mut File,
|
||||||
) {
|
) {
|
||||||
match &node {
|
match &node {
|
||||||
QueryNode::Term(LocatedQueryTerm { value, .. }) => match value {
|
QueryNode::Term(LocatedQueryTerm { value, .. }) => {
|
||||||
QueryTerm::Phrase { phrase } => {
|
let QueryTerm {
|
||||||
|
original,
|
||||||
|
zero_typo,
|
||||||
|
one_typo,
|
||||||
|
two_typos,
|
||||||
|
use_prefix_db,
|
||||||
|
synonyms,
|
||||||
|
split_words,
|
||||||
|
prefix_of,
|
||||||
|
is_prefix: _,
|
||||||
|
is_ngram: _,
|
||||||
|
phrase,
|
||||||
|
} = ctx.term_interner.get(*value);
|
||||||
|
|
||||||
|
let original = ctx.word_interner.get(*original);
|
||||||
|
writeln!(
|
||||||
|
file,
|
||||||
|
"{node_idx} : \"{original}\" {{
|
||||||
|
shape: class"
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
for w in zero_typo.iter().copied() {
|
||||||
|
let w = ctx.word_interner.get(w);
|
||||||
|
writeln!(file, "\"{w}\" : 0").unwrap();
|
||||||
|
}
|
||||||
|
for w in prefix_of.iter().copied() {
|
||||||
|
let w = ctx.word_interner.get(w);
|
||||||
|
writeln!(file, "\"{w}\" : 0P").unwrap();
|
||||||
|
}
|
||||||
|
for w in one_typo.iter().copied() {
|
||||||
|
let w = ctx.word_interner.get(w);
|
||||||
|
writeln!(file, "\"{w}\" : 1").unwrap();
|
||||||
|
}
|
||||||
|
for w in two_typos.iter().copied() {
|
||||||
|
let w = ctx.word_interner.get(w);
|
||||||
|
writeln!(file, "\"{w}\" : 2").unwrap();
|
||||||
|
}
|
||||||
|
if let Some(phrase) = phrase {
|
||||||
let phrase = ctx.phrase_interner.get(*phrase);
|
let phrase = ctx.phrase_interner.get(*phrase);
|
||||||
let phrase_str = phrase.description(&ctx.word_interner);
|
let phrase_str = phrase.description(&ctx.word_interner);
|
||||||
writeln!(file, "{node_idx} : \"{phrase_str}\"").unwrap();
|
writeln!(file, "\"{phrase_str}\" : phrase").unwrap();
|
||||||
}
|
}
|
||||||
QueryTerm::Word { derivations } => {
|
if let Some(split_words) = split_words {
|
||||||
let WordDerivations {
|
let phrase = ctx.phrase_interner.get(*split_words);
|
||||||
original,
|
let phrase_str = phrase.description(&ctx.word_interner);
|
||||||
zero_typo,
|
writeln!(file, "\"{phrase_str}\" : split_words").unwrap();
|
||||||
one_typo,
|
|
||||||
two_typos,
|
|
||||||
use_prefix_db,
|
|
||||||
synonyms,
|
|
||||||
split_words,
|
|
||||||
prefix_of,
|
|
||||||
is_prefix: _,
|
|
||||||
} = ctx.derivations_interner.get(*derivations);
|
|
||||||
|
|
||||||
let original = ctx.word_interner.get(*original);
|
|
||||||
writeln!(
|
|
||||||
file,
|
|
||||||
"{node_idx} : \"{original}\" {{
|
|
||||||
shape: class"
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
for w in zero_typo.iter().copied() {
|
|
||||||
let w = ctx.word_interner.get(w);
|
|
||||||
writeln!(file, "\"{w}\" : 0").unwrap();
|
|
||||||
}
|
|
||||||
for w in prefix_of.iter().copied() {
|
|
||||||
let w = ctx.word_interner.get(w);
|
|
||||||
writeln!(file, "\"{w}\" : 0P").unwrap();
|
|
||||||
}
|
|
||||||
for w in one_typo.iter().copied() {
|
|
||||||
let w = ctx.word_interner.get(w);
|
|
||||||
writeln!(file, "\"{w}\" : 1").unwrap();
|
|
||||||
}
|
|
||||||
for w in two_typos.iter().copied() {
|
|
||||||
let w = ctx.word_interner.get(w);
|
|
||||||
writeln!(file, "\"{w}\" : 2").unwrap();
|
|
||||||
}
|
|
||||||
if let Some(split_words) = split_words {
|
|
||||||
let phrase = ctx.phrase_interner.get(*split_words);
|
|
||||||
let phrase_str = phrase.description(&ctx.word_interner);
|
|
||||||
writeln!(file, "\"{phrase_str}\" : split_words").unwrap();
|
|
||||||
}
|
|
||||||
for synonym in synonyms.iter().copied() {
|
|
||||||
let phrase = ctx.phrase_interner.get(synonym);
|
|
||||||
let phrase_str = phrase.description(&ctx.word_interner);
|
|
||||||
writeln!(file, "\"{phrase_str}\" : synonym").unwrap();
|
|
||||||
}
|
|
||||||
if let Some(use_prefix_db) = use_prefix_db {
|
|
||||||
let p = ctx.word_interner.get(*use_prefix_db);
|
|
||||||
writeln!(file, "use prefix DB : {p}").unwrap();
|
|
||||||
}
|
|
||||||
for (d, edges) in distances.iter() {
|
|
||||||
writeln!(file, "\"distance {d}\" : {:?}", edges.iter().collect::<Vec<_>>())
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
writeln!(file, "}}").unwrap();
|
|
||||||
}
|
}
|
||||||
},
|
for synonym in synonyms.iter().copied() {
|
||||||
|
let phrase = ctx.phrase_interner.get(synonym);
|
||||||
|
let phrase_str = phrase.description(&ctx.word_interner);
|
||||||
|
writeln!(file, "\"{phrase_str}\" : synonym").unwrap();
|
||||||
|
}
|
||||||
|
if let Some(use_prefix_db) = use_prefix_db {
|
||||||
|
let p = ctx.word_interner.get(*use_prefix_db);
|
||||||
|
writeln!(file, "use prefix DB : {p}").unwrap();
|
||||||
|
}
|
||||||
|
for (d, edges) in distances.iter() {
|
||||||
|
writeln!(file, "\"distance {d}\" : {:?}", edges.iter().collect::<Vec<_>>())
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
writeln!(file, "}}").unwrap();
|
||||||
|
}
|
||||||
QueryNode::Deleted => panic!(),
|
QueryNode::Deleted => panic!(),
|
||||||
QueryNode::Start => {
|
QueryNode::Start => {
|
||||||
writeln!(file, "{node_idx} : START").unwrap();
|
writeln!(file, "{node_idx} : START").unwrap();
|
||||||
@ -600,32 +600,20 @@ shape: class"
|
|||||||
graph.edges_store[edge_idx as usize].as_ref().unwrap();
|
graph.edges_store[edge_idx as usize].as_ref().unwrap();
|
||||||
let source_node = &graph.query_graph.nodes[*source_node as usize];
|
let source_node = &graph.query_graph.nodes[*source_node as usize];
|
||||||
let source_node_desc = match source_node {
|
let source_node_desc = match source_node {
|
||||||
QueryNode::Term(term) => match term.value {
|
QueryNode::Term(term) => {
|
||||||
QueryTerm::Phrase { phrase } => {
|
let term = ctx.term_interner.get(term.value);
|
||||||
let phrase = ctx.phrase_interner.get(phrase);
|
ctx.word_interner.get(term.original).to_owned()
|
||||||
phrase.description(&ctx.word_interner)
|
}
|
||||||
}
|
|
||||||
QueryTerm::Word { derivations } => {
|
|
||||||
let derivations = ctx.derivations_interner.get(derivations);
|
|
||||||
ctx.word_interner.get(derivations.original).to_owned()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
QueryNode::Deleted => panic!(),
|
QueryNode::Deleted => panic!(),
|
||||||
QueryNode::Start => "START".to_owned(),
|
QueryNode::Start => "START".to_owned(),
|
||||||
QueryNode::End => "END".to_owned(),
|
QueryNode::End => "END".to_owned(),
|
||||||
};
|
};
|
||||||
let dest_node = &graph.query_graph.nodes[*dest_node as usize];
|
let dest_node = &graph.query_graph.nodes[*dest_node as usize];
|
||||||
let dest_node_desc = match dest_node {
|
let dest_node_desc = match dest_node {
|
||||||
QueryNode::Term(term) => match term.value {
|
QueryNode::Term(term) => {
|
||||||
QueryTerm::Phrase { phrase } => {
|
let term = ctx.term_interner.get(term.value);
|
||||||
let phrase = ctx.phrase_interner.get(phrase);
|
ctx.word_interner.get(term.original).to_owned()
|
||||||
phrase.description(&ctx.word_interner)
|
}
|
||||||
}
|
|
||||||
QueryTerm::Word { derivations } => {
|
|
||||||
let derivations = ctx.derivations_interner.get(derivations);
|
|
||||||
ctx.word_interner.get(derivations.original).to_owned()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
QueryNode::Deleted => panic!(),
|
QueryNode::Deleted => panic!(),
|
||||||
QueryNode::Start => "START".to_owned(),
|
QueryNode::Start => "START".to_owned(),
|
||||||
QueryNode::End => "END".to_owned(),
|
QueryNode::End => "END".to_owned(),
|
||||||
|
@ -27,7 +27,7 @@ pub use ranking_rules::{bucket_sort, RankingRule, RankingRuleOutput, RankingRule
|
|||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
|
||||||
use self::interner::Interner;
|
use self::interner::Interner;
|
||||||
use self::query_term::{Phrase, WordDerivations};
|
use self::query_term::{Phrase, QueryTerm};
|
||||||
use self::ranking_rules::PlaceholderQuery;
|
use self::ranking_rules::PlaceholderQuery;
|
||||||
use self::resolve_query_graph::{resolve_query_graph, QueryTermDocIdsCache};
|
use self::resolve_query_graph::{resolve_query_graph, QueryTermDocIdsCache};
|
||||||
use crate::search::new::graph_based_ranking_rule::{Proximity, Typo};
|
use crate::search::new::graph_based_ranking_rule::{Proximity, Typo};
|
||||||
@ -41,8 +41,8 @@ pub struct SearchContext<'ctx> {
|
|||||||
pub db_cache: DatabaseCache<'ctx>,
|
pub db_cache: DatabaseCache<'ctx>,
|
||||||
pub word_interner: Interner<String>,
|
pub word_interner: Interner<String>,
|
||||||
pub phrase_interner: Interner<Phrase>,
|
pub phrase_interner: Interner<Phrase>,
|
||||||
pub derivations_interner: Interner<WordDerivations>,
|
pub term_interner: Interner<QueryTerm>,
|
||||||
pub query_term_docids: QueryTermDocIdsCache,
|
pub term_docids: QueryTermDocIdsCache,
|
||||||
}
|
}
|
||||||
impl<'ctx> SearchContext<'ctx> {
|
impl<'ctx> SearchContext<'ctx> {
|
||||||
pub fn new(index: &'ctx Index, txn: &'ctx RoTxn<'ctx>) -> Self {
|
pub fn new(index: &'ctx Index, txn: &'ctx RoTxn<'ctx>) -> Self {
|
||||||
@ -52,8 +52,8 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
db_cache: <_>::default(),
|
db_cache: <_>::default(),
|
||||||
word_interner: <_>::default(),
|
word_interner: <_>::default(),
|
||||||
phrase_interner: <_>::default(),
|
phrase_interner: <_>::default(),
|
||||||
derivations_interner: <_>::default(),
|
term_interner: <_>::default(),
|
||||||
query_term_docids: <_>::default(),
|
term_docids: <_>::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,7 @@ For the search query `sunflower`, we need to register the following things:
|
|||||||
- and also the couple of adjacent words `sun flower`
|
- and also the couple of adjacent words `sun flower`
|
||||||
- as well as all the user-defined synonyms of `sunflower`
|
- as well as all the user-defined synonyms of `sunflower`
|
||||||
|
|
||||||
All these derivations of a word will be stored in [`WordDerivations`].
|
All these derivations of a word will be stored in [`QueryTerm`].
|
||||||
|
|
||||||
## Example 2:
|
## Example 2:
|
||||||
For the search query `summer house by`.
|
For the search query `summer house by`.
|
||||||
@ -148,7 +148,7 @@ impl QueryGraph {
|
|||||||
let mut new_nodes = vec![];
|
let mut new_nodes = vec![];
|
||||||
let new_node_idx = graph.add_node(&prev0, QueryNode::Term(term0.clone()));
|
let new_node_idx = graph.add_node(&prev0, QueryNode::Term(term0.clone()));
|
||||||
new_nodes.push(new_node_idx);
|
new_nodes.push(new_node_idx);
|
||||||
if term0.is_empty(&ctx.derivations_interner) {
|
if term0.is_empty(&ctx.term_interner) {
|
||||||
empty_nodes.push(new_node_idx);
|
empty_nodes.push(new_node_idx);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,16 +30,20 @@ impl Phrase {
|
|||||||
/// A structure storing all the different ways to match
|
/// A structure storing all the different ways to match
|
||||||
/// a term in the user's search query.
|
/// a term in the user's search query.
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct WordDerivations {
|
pub struct QueryTerm {
|
||||||
/// The original terms, for debugging purposes
|
/// The original terms, for debugging purposes
|
||||||
pub original: Interned<String>,
|
pub original: Interned<String>,
|
||||||
|
/// Whether the term is an ngram
|
||||||
|
pub is_ngram: bool,
|
||||||
|
/// Whether the term can be only the prefix of a word
|
||||||
pub is_prefix: bool,
|
pub is_prefix: bool,
|
||||||
|
/// The original phrase, if any
|
||||||
/// A single word equivalent to the original one, with zero typos
|
pub phrase: Option<Interned<Phrase>>,
|
||||||
|
/// A single word equivalent to the original term, with zero typos
|
||||||
pub zero_typo: Option<Interned<String>>,
|
pub zero_typo: Option<Interned<String>>,
|
||||||
/// All the words that contain the original word as prefix
|
/// All the words that contain the original word as prefix
|
||||||
pub prefix_of: Box<[Interned<String>]>,
|
pub prefix_of: Box<[Interned<String>]>,
|
||||||
/// All the synonyms of the original word
|
/// All the synonyms of the original word or phrase
|
||||||
pub synonyms: Box<[Interned<Phrase>]>,
|
pub synonyms: Box<[Interned<Phrase>]>,
|
||||||
|
|
||||||
/// The original word split into multiple consecutive words
|
/// The original word split into multiple consecutive words
|
||||||
@ -54,10 +58,15 @@ pub struct WordDerivations {
|
|||||||
/// A prefix in the prefix databases matching the original word
|
/// A prefix in the prefix databases matching the original word
|
||||||
pub use_prefix_db: Option<Interned<String>>,
|
pub use_prefix_db: Option<Interned<String>>,
|
||||||
}
|
}
|
||||||
impl WordDerivations {
|
impl QueryTerm {
|
||||||
pub fn empty(word_interner: &mut Interner<String>, original: &str) -> Self {
|
pub fn phrase(
|
||||||
|
word_interner: &mut Interner<String>,
|
||||||
|
phrase_interner: &mut Interner<Phrase>,
|
||||||
|
phrase: Phrase,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
original: word_interner.insert(original.to_owned()),
|
original: word_interner.insert(phrase.description(word_interner)),
|
||||||
|
phrase: Some(phrase_interner.insert(phrase)),
|
||||||
is_prefix: false,
|
is_prefix: false,
|
||||||
zero_typo: None,
|
zero_typo: None,
|
||||||
prefix_of: Box::new([]),
|
prefix_of: Box::new([]),
|
||||||
@ -66,12 +75,28 @@ impl WordDerivations {
|
|||||||
one_typo: Box::new([]),
|
one_typo: Box::new([]),
|
||||||
two_typos: Box::new([]),
|
two_typos: Box::new([]),
|
||||||
use_prefix_db: None,
|
use_prefix_db: None,
|
||||||
|
is_ngram: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn empty(word_interner: &mut Interner<String>, original: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
original: word_interner.insert(original.to_owned()),
|
||||||
|
phrase: None,
|
||||||
|
is_prefix: false,
|
||||||
|
zero_typo: None,
|
||||||
|
prefix_of: Box::new([]),
|
||||||
|
synonyms: Box::new([]),
|
||||||
|
split_words: None,
|
||||||
|
one_typo: Box::new([]),
|
||||||
|
two_typos: Box::new([]),
|
||||||
|
use_prefix_db: None,
|
||||||
|
is_ngram: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// Return an iterator over all the single words derived from the original word.
|
/// Return an iterator over all the single words derived from the original word.
|
||||||
///
|
///
|
||||||
/// This excludes synonyms, split words, and words stored in the prefix databases.
|
/// This excludes synonyms, split words, and words stored in the prefix databases.
|
||||||
pub fn all_single_word_derivations_except_prefix_db(
|
pub fn all_single_words_except_prefix_db(
|
||||||
&'_ self,
|
&'_ self,
|
||||||
) -> impl Iterator<Item = Interned<String>> + Clone + '_ {
|
) -> impl Iterator<Item = Interned<String>> + Clone + '_ {
|
||||||
self.zero_typo
|
self.zero_typo
|
||||||
@ -84,7 +109,7 @@ impl WordDerivations {
|
|||||||
/// Return an iterator over all the single words derived from the original word.
|
/// Return an iterator over all the single words derived from the original word.
|
||||||
///
|
///
|
||||||
/// This excludes synonyms, split words, and words stored in the prefix databases.
|
/// This excludes synonyms, split words, and words stored in the prefix databases.
|
||||||
pub fn all_phrase_derivations(&'_ self) -> impl Iterator<Item = Interned<Phrase>> + Clone + '_ {
|
pub fn all_phrases(&'_ self) -> impl Iterator<Item = Interned<Phrase>> + Clone + '_ {
|
||||||
self.split_words.iter().chain(self.synonyms.iter()).copied()
|
self.split_words.iter().chain(self.synonyms.iter()).copied()
|
||||||
}
|
}
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
@ -98,15 +123,15 @@ impl WordDerivations {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compute the word derivations for the given word
|
/// Compute the query term for the given word
|
||||||
pub fn word_derivations(
|
pub fn query_term_from_word(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext,
|
||||||
word: &str,
|
word: &str,
|
||||||
max_typo: u8,
|
max_typo: u8,
|
||||||
is_prefix: bool,
|
is_prefix: bool,
|
||||||
) -> Result<WordDerivations> {
|
) -> Result<QueryTerm> {
|
||||||
if word.len() > MAX_WORD_LENGTH {
|
if word.len() > MAX_WORD_LENGTH {
|
||||||
return Ok(WordDerivations::empty(&mut ctx.word_interner, word));
|
return Ok(QueryTerm::empty(&mut ctx.word_interner, word));
|
||||||
}
|
}
|
||||||
|
|
||||||
let fst = ctx.index.words_fst(ctx.txn)?;
|
let fst = ctx.index.words_fst(ctx.txn)?;
|
||||||
@ -223,8 +248,9 @@ pub fn word_derivations(
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(WordDerivations {
|
Ok(QueryTerm {
|
||||||
original: word_interned,
|
original: word_interned,
|
||||||
|
phrase: None,
|
||||||
is_prefix,
|
is_prefix,
|
||||||
zero_typo,
|
zero_typo,
|
||||||
prefix_of: prefix_of.into_boxed_slice(),
|
prefix_of: prefix_of.into_boxed_slice(),
|
||||||
@ -233,6 +259,7 @@ pub fn word_derivations(
|
|||||||
one_typo: one_typo.into_boxed_slice(),
|
one_typo: one_typo.into_boxed_slice(),
|
||||||
two_typos: two_typos.into_boxed_slice(),
|
two_typos: two_typos.into_boxed_slice(),
|
||||||
use_prefix_db,
|
use_prefix_db,
|
||||||
|
is_ngram: false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -266,35 +293,13 @@ fn split_best_frequency(
|
|||||||
Ok(best.map(|(_, left, right)| (left.to_owned(), right.to_owned())))
|
Ok(best.map(|(_, left, right)| (left.to_owned(), right.to_owned())))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
|
||||||
pub enum QueryTerm {
|
|
||||||
Phrase { phrase: Interned<Phrase> },
|
|
||||||
// TODO: change to `Interned<WordDerivations>`?
|
|
||||||
Word { derivations: Interned<WordDerivations> },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl QueryTerm {
|
impl QueryTerm {
|
||||||
pub fn is_prefix(&self, derivations_interner: &Interner<WordDerivations>) -> bool {
|
|
||||||
match self {
|
|
||||||
QueryTerm::Phrase { .. } => false,
|
|
||||||
QueryTerm::Word { derivations } => derivations_interner.get(*derivations).is_prefix,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Return the original word from the given query term
|
/// Return the original word from the given query term
|
||||||
pub fn original_single_word(
|
pub fn original_single_word(&self) -> Option<Interned<String>> {
|
||||||
&self,
|
if self.phrase.is_some() || self.is_ngram {
|
||||||
derivations_interner: &Interner<WordDerivations>,
|
None
|
||||||
) -> Option<Interned<String>> {
|
} else {
|
||||||
match self {
|
Some(self.original)
|
||||||
QueryTerm::Phrase { phrase: _ } => None,
|
|
||||||
QueryTerm::Word { derivations } => {
|
|
||||||
let derivations = derivations_interner.get(*derivations);
|
|
||||||
if derivations.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(derivations.original)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -302,19 +307,14 @@ impl QueryTerm {
|
|||||||
/// A query term term coupled with its position in the user's search query.
|
/// A query term term coupled with its position in the user's search query.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct LocatedQueryTerm {
|
pub struct LocatedQueryTerm {
|
||||||
pub value: QueryTerm,
|
pub value: Interned<QueryTerm>,
|
||||||
pub positions: RangeInclusive<i8>,
|
pub positions: RangeInclusive<i8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LocatedQueryTerm {
|
impl LocatedQueryTerm {
|
||||||
/// Return `true` iff the word derivations within the query term are empty
|
/// Return `true` iff the term is empty
|
||||||
pub fn is_empty(&self, interner: &Interner<WordDerivations>) -> bool {
|
pub fn is_empty(&self, interner: &Interner<QueryTerm>) -> bool {
|
||||||
match self.value {
|
interner.get(self.value).is_empty()
|
||||||
// TODO: phrases should be greedily computed, so that they can be excluded from
|
|
||||||
// the query graph right from the start?
|
|
||||||
QueryTerm::Phrase { phrase: _ } => false,
|
|
||||||
QueryTerm::Word { derivations, .. } => interner.get(derivations).is_empty(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -360,18 +360,16 @@ pub fn located_query_terms_from_string<'ctx>(
|
|||||||
} else {
|
} else {
|
||||||
let word = ctx.word_interner.insert(token.lemma().to_string());
|
let word = ctx.word_interner.insert(token.lemma().to_string());
|
||||||
// TODO: in a phrase, check that every word exists
|
// TODO: in a phrase, check that every word exists
|
||||||
// otherwise return WordDerivations::Empty
|
// otherwise return an empty term
|
||||||
phrase.push(Some(word));
|
phrase.push(Some(word));
|
||||||
}
|
}
|
||||||
} else if peekable.peek().is_some() {
|
} else if peekable.peek().is_some() {
|
||||||
match token.kind {
|
match token.kind {
|
||||||
TokenKind::Word => {
|
TokenKind::Word => {
|
||||||
let word = token.lemma();
|
let word = token.lemma();
|
||||||
let derivations = word_derivations(ctx, word, nbr_typos(word), false)?;
|
let term = query_term_from_word(ctx, word, nbr_typos(word), false)?;
|
||||||
let located_term = LocatedQueryTerm {
|
let located_term = LocatedQueryTerm {
|
||||||
value: QueryTerm::Word {
|
value: ctx.term_interner.insert(term),
|
||||||
derivations: ctx.derivations_interner.insert(derivations),
|
|
||||||
},
|
|
||||||
positions: position..=position,
|
positions: position..=position,
|
||||||
};
|
};
|
||||||
located_terms.push(located_term);
|
located_terms.push(located_term);
|
||||||
@ -380,11 +378,9 @@ pub fn located_query_terms_from_string<'ctx>(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let word = token.lemma();
|
let word = token.lemma();
|
||||||
let derivations = word_derivations(ctx, word, nbr_typos(word), true)?;
|
let term = query_term_from_word(ctx, word, nbr_typos(word), true)?;
|
||||||
let located_term = LocatedQueryTerm {
|
let located_term = LocatedQueryTerm {
|
||||||
value: QueryTerm::Word {
|
value: ctx.term_interner.insert(term),
|
||||||
derivations: ctx.derivations_interner.insert(derivations),
|
|
||||||
},
|
|
||||||
positions: position..=position,
|
positions: position..=position,
|
||||||
};
|
};
|
||||||
located_terms.push(located_term);
|
located_terms.push(located_term);
|
||||||
@ -408,11 +404,11 @@ pub fn located_query_terms_from_string<'ctx>(
|
|||||||
if !phrase.is_empty() && (quote_count > 0 || separator_kind == SeparatorKind::Hard)
|
if !phrase.is_empty() && (quote_count > 0 || separator_kind == SeparatorKind::Hard)
|
||||||
{
|
{
|
||||||
let located_query_term = LocatedQueryTerm {
|
let located_query_term = LocatedQueryTerm {
|
||||||
value: QueryTerm::Phrase {
|
value: ctx.term_interner.insert(QueryTerm::phrase(
|
||||||
phrase: ctx
|
&mut ctx.word_interner,
|
||||||
.phrase_interner
|
&mut ctx.phrase_interner,
|
||||||
.insert(Phrase { words: mem::take(&mut phrase) }),
|
Phrase { words: mem::take(&mut phrase) },
|
||||||
},
|
)),
|
||||||
positions: phrase_start..=phrase_end,
|
positions: phrase_start..=phrase_end,
|
||||||
};
|
};
|
||||||
located_terms.push(located_query_term);
|
located_terms.push(located_query_term);
|
||||||
@ -425,9 +421,11 @@ pub fn located_query_terms_from_string<'ctx>(
|
|||||||
// If a quote is never closed, we consider all of the end of the query as a phrase.
|
// If a quote is never closed, we consider all of the end of the query as a phrase.
|
||||||
if !phrase.is_empty() {
|
if !phrase.is_empty() {
|
||||||
let located_query_term = LocatedQueryTerm {
|
let located_query_term = LocatedQueryTerm {
|
||||||
value: QueryTerm::Phrase {
|
value: ctx.term_interner.insert(QueryTerm::phrase(
|
||||||
phrase: ctx.phrase_interner.insert(Phrase { words: mem::take(&mut phrase) }),
|
&mut ctx.word_interner,
|
||||||
},
|
&mut ctx.phrase_interner,
|
||||||
|
Phrase { words: mem::take(&mut phrase) },
|
||||||
|
)),
|
||||||
positions: phrase_start..=phrase_end,
|
positions: phrase_start..=phrase_end,
|
||||||
};
|
};
|
||||||
located_terms.push(located_query_term);
|
located_terms.push(located_query_term);
|
||||||
@ -474,8 +472,7 @@ pub fn make_ngram(
|
|||||||
}
|
}
|
||||||
let mut words_interned = vec![];
|
let mut words_interned = vec![];
|
||||||
for term in terms {
|
for term in terms {
|
||||||
if let Some(original_term_word) = term.value.original_single_word(&ctx.derivations_interner)
|
if let Some(original_term_word) = ctx.term_interner.get(term.value).original_single_word() {
|
||||||
{
|
|
||||||
words_interned.push(original_term_word);
|
words_interned.push(original_term_word);
|
||||||
} else {
|
} else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
@ -486,121 +483,40 @@ pub fn make_ngram(
|
|||||||
|
|
||||||
let start = *terms.first().as_ref().unwrap().positions.start();
|
let start = *terms.first().as_ref().unwrap().positions.start();
|
||||||
let end = *terms.last().as_ref().unwrap().positions.end();
|
let end = *terms.last().as_ref().unwrap().positions.end();
|
||||||
let is_prefix = terms.last().as_ref().unwrap().value.is_prefix(&ctx.derivations_interner);
|
let is_prefix = ctx.term_interner.get(terms.last().as_ref().unwrap().value).is_prefix;
|
||||||
let ngram_str = words.join("");
|
let ngram_str = words.join("");
|
||||||
if ngram_str.len() > MAX_WORD_LENGTH {
|
if ngram_str.len() > MAX_WORD_LENGTH {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut derivations = word_derivations(
|
let mut term = query_term_from_word(
|
||||||
ctx,
|
ctx,
|
||||||
&ngram_str,
|
&ngram_str,
|
||||||
number_of_typos_allowed(ngram_str.as_str()).saturating_sub(terms.len() as u8),
|
number_of_typos_allowed(ngram_str.as_str()).saturating_sub(terms.len() as u8),
|
||||||
is_prefix,
|
is_prefix,
|
||||||
)?;
|
)?;
|
||||||
derivations.original = ctx.word_interner.insert(words.join(" "));
|
term.original = ctx.word_interner.insert(words.join(" "));
|
||||||
// Now add the synonyms
|
// Now add the synonyms
|
||||||
let index_synonyms = ctx.index.synonyms(ctx.txn)?;
|
let index_synonyms = ctx.index.synonyms(ctx.txn)?;
|
||||||
let mut derivations_synonyms = derivations.synonyms.to_vec();
|
let mut term_synonyms = term.synonyms.to_vec();
|
||||||
derivations_synonyms.extend(
|
term_synonyms.extend(index_synonyms.get(&words).cloned().unwrap_or_default().into_iter().map(
|
||||||
index_synonyms.get(&words).cloned().unwrap_or_default().into_iter().map(|words| {
|
|words| {
|
||||||
let words = words.into_iter().map(|w| Some(ctx.word_interner.insert(w))).collect();
|
let words = words.into_iter().map(|w| Some(ctx.word_interner.insert(w))).collect();
|
||||||
ctx.phrase_interner.insert(Phrase { words })
|
ctx.phrase_interner.insert(Phrase { words })
|
||||||
}),
|
},
|
||||||
);
|
));
|
||||||
derivations.synonyms = derivations_synonyms.into_boxed_slice();
|
term.synonyms = term_synonyms.into_boxed_slice();
|
||||||
if let Some(split_words) = derivations.split_words {
|
if let Some(split_words) = term.split_words {
|
||||||
let split_words = ctx.phrase_interner.get(split_words);
|
let split_words = ctx.phrase_interner.get(split_words);
|
||||||
if split_words.words == words_interned.iter().map(|&i| Some(i)).collect::<Vec<_>>() {
|
if split_words.words == words_interned.iter().map(|&i| Some(i)).collect::<Vec<_>>() {
|
||||||
derivations.split_words = None;
|
term.split_words = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if derivations.is_empty() {
|
if term.is_empty() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
let term = LocatedQueryTerm {
|
term.is_ngram = true;
|
||||||
value: QueryTerm::Word { derivations: ctx.derivations_interner.insert(derivations) },
|
let term = LocatedQueryTerm { value: ctx.term_interner.insert(term), positions: start..=end };
|
||||||
positions: start..=end,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Some(term))
|
Ok(Some(term))
|
||||||
}
|
}
|
||||||
|
|
||||||
// // TODO: return a word derivations instead?
|
|
||||||
// pub fn ngram2(
|
|
||||||
// ctx: &mut SearchContext,
|
|
||||||
// x: &LocatedQueryTerm,
|
|
||||||
// y: &LocatedQueryTerm,
|
|
||||||
// number_of_typos_allowed: impl Fn(&str) -> u8,
|
|
||||||
// ) -> Result<Option<LocatedQueryTerm>> {
|
|
||||||
// if *x.positions.end() != y.positions.start() - 1 {
|
|
||||||
// return Ok(None);
|
|
||||||
// }
|
|
||||||
// match (
|
|
||||||
// x.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
|
|
||||||
// y.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
|
|
||||||
// ) {
|
|
||||||
// (Some(w1), Some(w2)) => {
|
|
||||||
// let ngram2_str = format!("{w1}{w2}");
|
|
||||||
// let mut derivations = word_derivations(
|
|
||||||
// ctx,
|
|
||||||
// &ngram2_str,
|
|
||||||
// number_of_typos_allowed(ngram2_str.as_str()).saturating_sub(1),
|
|
||||||
// y.value.is_prefix(&ctx.derivations_interner),
|
|
||||||
// )?;
|
|
||||||
// // Now add the synonyms
|
|
||||||
// let index_synonyms = ctx.index.synonyms(ctx.txn)?;
|
|
||||||
// let mut derivations_synonyms = derivations.synonyms.to_vec();
|
|
||||||
// derivations_synonyms.extend(
|
|
||||||
// index_synonyms
|
|
||||||
// .get(&vec![w1.to_owned(), w2.to_owned()])
|
|
||||||
// .cloned()
|
|
||||||
// .unwrap_or_default()
|
|
||||||
// .into_iter()
|
|
||||||
// .map(|words| {
|
|
||||||
// let words =
|
|
||||||
// words.into_iter().map(|w| Some(ctx.word_interner.insert(w))).collect();
|
|
||||||
// ctx.phrase_interner.insert(Phrase { words })
|
|
||||||
// }),
|
|
||||||
// );
|
|
||||||
|
|
||||||
// let term = LocatedQueryTerm {
|
|
||||||
// value: QueryTerm::Word {
|
|
||||||
// derivations: ctx.derivations_interner.insert(derivations),
|
|
||||||
// },
|
|
||||||
// positions: *x.positions.start()..=*y.positions.end(),
|
|
||||||
// };
|
|
||||||
|
|
||||||
// Ok(Some(term))
|
|
||||||
// }
|
|
||||||
// _ => Ok(None),
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // TODO: return a word derivations instead?
|
|
||||||
// pub fn ngram3(
|
|
||||||
// ctx: &mut SearchContext,
|
|
||||||
// x: &LocatedQueryTerm,
|
|
||||||
// y: &LocatedQueryTerm,
|
|
||||||
// z: &LocatedQueryTerm,
|
|
||||||
// ) -> Option<(Interned<String>, RangeInclusive<i8>)> {
|
|
||||||
// if *x.positions.end() != y.positions.start() - 1
|
|
||||||
// || *y.positions.end() != z.positions.start() - 1
|
|
||||||
// {
|
|
||||||
// return None;
|
|
||||||
// }
|
|
||||||
// match (
|
|
||||||
// &x.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
|
|
||||||
// &y.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
|
|
||||||
// &z.value.original_single_word(&ctx.word_interner, &ctx.derivations_interner),
|
|
||||||
// ) {
|
|
||||||
// (Some(w1), Some(w2), Some(w3)) => {
|
|
||||||
// let term = (
|
|
||||||
// ctx.word_interner.insert(format!("{w1}{w2}{w3}")),
|
|
||||||
// *x.positions.start()..=*z.positions.end(),
|
|
||||||
// );
|
|
||||||
// Some(term)
|
|
||||||
// }
|
|
||||||
// _ => None,
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
@ -22,28 +22,21 @@ impl<G: RankingRuleGraphTrait> RankingRuleGraph<G> {
|
|||||||
let mut edges_store = vec![];
|
let mut edges_store = vec![];
|
||||||
let mut edges_of_node = vec![];
|
let mut edges_of_node = vec![];
|
||||||
|
|
||||||
for (node_idx, node) in graph_nodes.iter().enumerate() {
|
for (source_idx, source_node) in graph_nodes.iter().enumerate() {
|
||||||
edges_of_node.push(HashSet::new());
|
edges_of_node.push(HashSet::new());
|
||||||
let new_edges = edges_of_node.last_mut().unwrap();
|
let new_edges = edges_of_node.last_mut().unwrap();
|
||||||
|
|
||||||
let Some(source_node_data) = G::build_step_visit_source_node(ctx, node)? else { continue };
|
for dest_idx in graph_edges[source_idx].successors.iter() {
|
||||||
|
let dest_node = &graph_nodes[dest_idx as usize];
|
||||||
for successor_idx in graph_edges[node_idx].successors.iter() {
|
let edges = G::build_edges(ctx, &mut conditions_interner, source_node, dest_node)?;
|
||||||
let dest_node = &graph_nodes[successor_idx as usize];
|
|
||||||
let edges = G::build_step_visit_destination_node(
|
|
||||||
ctx,
|
|
||||||
&mut conditions_interner,
|
|
||||||
dest_node,
|
|
||||||
&source_node_data,
|
|
||||||
)?;
|
|
||||||
if edges.is_empty() {
|
if edges.is_empty() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (cost, condition) in edges {
|
for (cost, condition) in edges {
|
||||||
edges_store.push(Some(Edge {
|
edges_store.push(Some(Edge {
|
||||||
source_node: node_idx as u16,
|
source_node: source_idx as u16,
|
||||||
dest_node: successor_idx,
|
dest_node: dest_idx,
|
||||||
cost,
|
cost,
|
||||||
condition,
|
condition,
|
||||||
}));
|
}));
|
||||||
|
@ -80,11 +80,6 @@ pub trait RankingRuleGraphTrait: Sized {
|
|||||||
/// in [`resolve_edge_condition`](RankingRuleGraphTrait::resolve_edge_condition).
|
/// in [`resolve_edge_condition`](RankingRuleGraphTrait::resolve_edge_condition).
|
||||||
type EdgeCondition: Sized + Clone + PartialEq + Eq + Hash;
|
type EdgeCondition: Sized + Clone + PartialEq + Eq + Hash;
|
||||||
|
|
||||||
/// A structure used in the construction of the graph, created when a
|
|
||||||
/// query graph source node is visited. It is used to determine the cost
|
|
||||||
/// and condition of a ranking rule edge when the destination node is visited.
|
|
||||||
type BuildVisitedFromNode;
|
|
||||||
|
|
||||||
/// Return the label of the given edge condition, to be used when visualising
|
/// Return the label of the given edge condition, to be used when visualising
|
||||||
/// the ranking rule graph.
|
/// the ranking rule graph.
|
||||||
fn label_for_edge_condition(edge: &Self::EdgeCondition) -> String;
|
fn label_for_edge_condition(edge: &Self::EdgeCondition) -> String;
|
||||||
@ -97,22 +92,13 @@ pub trait RankingRuleGraphTrait: Sized {
|
|||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<RoaringBitmap>;
|
) -> Result<RoaringBitmap>;
|
||||||
|
|
||||||
/// Prepare to build the edges outgoing from `source_node`.
|
|
||||||
///
|
|
||||||
/// This call is followed by zero, one or more calls to [`build_step_visit_destination_node`](RankingRuleGraphTrait::build_step_visit_destination_node),
|
|
||||||
/// which builds the actual edges.
|
|
||||||
fn build_step_visit_source_node<'ctx>(
|
|
||||||
ctx: &mut SearchContext<'ctx>,
|
|
||||||
source_node: &QueryNode,
|
|
||||||
) -> Result<Option<Self::BuildVisitedFromNode>>;
|
|
||||||
|
|
||||||
/// Return the cost and condition of the edges going from the previously visited node
|
/// Return the cost and condition of the edges going from the previously visited node
|
||||||
/// (with [`build_step_visit_source_node`](RankingRuleGraphTrait::build_step_visit_source_node)) to `dest_node`.
|
/// (with [`build_step_visit_source_node`](RankingRuleGraphTrait::build_step_visit_source_node)) to `dest_node`.
|
||||||
fn build_step_visit_destination_node<'from_data, 'ctx: 'from_data>(
|
fn build_edges<'ctx>(
|
||||||
ctx: &mut SearchContext<'ctx>,
|
ctx: &mut SearchContext<'ctx>,
|
||||||
conditions_interner: &mut Interner<Self::EdgeCondition>,
|
conditions_interner: &mut Interner<Self::EdgeCondition>,
|
||||||
|
source_node: &QueryNode,
|
||||||
dest_node: &QueryNode,
|
dest_node: &QueryNode,
|
||||||
source_node_data: &'from_data Self::BuildVisitedFromNode,
|
|
||||||
) -> Result<Vec<(u8, EdgeCondition<Self::EdgeCondition>)>>;
|
) -> Result<Vec<(u8, EdgeCondition<Self::EdgeCondition>)>>;
|
||||||
|
|
||||||
fn log_state(
|
fn log_state(
|
||||||
|
@ -4,89 +4,40 @@ use std::collections::BTreeMap;
|
|||||||
use super::ProximityEdge;
|
use super::ProximityEdge;
|
||||||
use crate::search::new::db_cache::DatabaseCache;
|
use crate::search::new::db_cache::DatabaseCache;
|
||||||
use crate::search::new::interner::{Interned, Interner};
|
use crate::search::new::interner::{Interned, Interner};
|
||||||
use crate::search::new::query_term::{LocatedQueryTerm, Phrase, QueryTerm, WordDerivations};
|
use crate::search::new::query_term::{LocatedQueryTerm, Phrase, QueryTerm};
|
||||||
use crate::search::new::ranking_rule_graph::proximity::WordPair;
|
use crate::search::new::ranking_rule_graph::proximity::WordPair;
|
||||||
use crate::search::new::ranking_rule_graph::EdgeCondition;
|
use crate::search::new::ranking_rule_graph::EdgeCondition;
|
||||||
use crate::search::new::{QueryNode, SearchContext};
|
use crate::search::new::{QueryNode, SearchContext};
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
use heed::RoTxn;
|
use heed::RoTxn;
|
||||||
|
|
||||||
pub fn visit_from_node(
|
fn last_word_of_term_iter<'t>(
|
||||||
ctx: &mut SearchContext,
|
t: &'t QueryTerm,
|
||||||
from_node: &QueryNode,
|
phrase_interner: &'t Interner<Phrase>,
|
||||||
) -> Result<Option<(Vec<(Option<Interned<Phrase>>, Interned<String>)>, i8)>> {
|
) -> impl Iterator<Item = (Option<Interned<Phrase>>, Interned<String>)> + 't {
|
||||||
let SearchContext { derivations_interner, .. } = ctx;
|
t.all_single_words_except_prefix_db().map(|w| (None, w)).chain(t.all_phrases().flat_map(
|
||||||
|
move |p| {
|
||||||
let (left_phrase, left_derivations, left_end_position) = match from_node {
|
let phrase = phrase_interner.get(p);
|
||||||
QueryNode::Term(LocatedQueryTerm { value: value1, positions: pos1 }) => {
|
phrase.words.last().unwrap().map(|last| (Some(p), last))
|
||||||
match value1 {
|
},
|
||||||
QueryTerm::Word { derivations } => {
|
))
|
||||||
(None, derivations_interner.get(*derivations).clone(), *pos1.end())
|
}
|
||||||
}
|
fn first_word_of_term_iter<'t>(
|
||||||
QueryTerm::Phrase { phrase: phrase_interned } => {
|
t: &'t QueryTerm,
|
||||||
let phrase = ctx.phrase_interner.get(*phrase_interned);
|
phrase_interner: &'t Interner<Phrase>,
|
||||||
if let Some(original) = *phrase.words.last().unwrap() {
|
) -> impl Iterator<Item = (Interned<String>, Option<Interned<Phrase>>)> + 't {
|
||||||
(
|
t.all_single_words_except_prefix_db().map(|w| (w, None)).chain(t.all_phrases().flat_map(
|
||||||
Some(*phrase_interned),
|
move |p| {
|
||||||
WordDerivations {
|
let phrase = phrase_interner.get(p);
|
||||||
original,
|
phrase.words.first().unwrap().map(|first| (first, Some(p)))
|
||||||
zero_typo: Some(original),
|
},
|
||||||
one_typo: Box::new([]),
|
))
|
||||||
two_typos: Box::new([]),
|
|
||||||
use_prefix_db: None,
|
|
||||||
synonyms: Box::new([]),
|
|
||||||
split_words: None,
|
|
||||||
is_prefix: false,
|
|
||||||
prefix_of: Box::new([]),
|
|
||||||
},
|
|
||||||
*pos1.end(),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
// No word pairs if the phrase does not have a regular word as its last term
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
QueryNode::Start => (None, WordDerivations::empty(&mut ctx.word_interner, ""), -1),
|
|
||||||
_ => return Ok(None),
|
|
||||||
};
|
|
||||||
|
|
||||||
// left term cannot be a prefix
|
|
||||||
assert!(left_derivations.use_prefix_db.is_none() && !left_derivations.is_prefix);
|
|
||||||
|
|
||||||
let last_word_left_phrase = if let Some(left_phrase_interned) = left_phrase {
|
|
||||||
let left_phrase = ctx.phrase_interner.get(left_phrase_interned);
|
|
||||||
left_phrase.words.last().copied().unwrap()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let left_single_word_iter: Vec<(Option<Interned<Phrase>>, Interned<String>)> = left_derivations
|
|
||||||
.all_single_word_derivations_except_prefix_db()
|
|
||||||
.chain(last_word_left_phrase.iter().copied())
|
|
||||||
.map(|w| (left_phrase, w))
|
|
||||||
.collect();
|
|
||||||
let left_phrase_iter: Vec<(Option<Interned<Phrase>>, Interned<String>)> = left_derivations
|
|
||||||
.all_phrase_derivations()
|
|
||||||
.map(|left_phrase_interned: Interned<Phrase>| {
|
|
||||||
let left_phrase = ctx.phrase_interner.get(left_phrase_interned);
|
|
||||||
let last_word_left_phrase: Interned<String> =
|
|
||||||
left_phrase.words.last().unwrap().unwrap();
|
|
||||||
let r: (Option<Interned<Phrase>>, Interned<String>) =
|
|
||||||
(Some(left_phrase_interned), last_word_left_phrase);
|
|
||||||
r
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let mut left_word_iter = left_single_word_iter;
|
|
||||||
left_word_iter.extend(left_phrase_iter);
|
|
||||||
|
|
||||||
Ok(Some((left_word_iter, left_end_position)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_step_visit_destination_node<'ctx, 'from_data>(
|
pub fn build_edges<'ctx>(
|
||||||
ctx: &mut SearchContext<'ctx>,
|
ctx: &mut SearchContext<'ctx>,
|
||||||
conditions_interner: &mut Interner<ProximityEdge>,
|
conditions_interner: &mut Interner<ProximityEdge>,
|
||||||
from_node_data: &'from_data (Vec<(Option<Interned<Phrase>>, Interned<String>)>, i8),
|
from_node: &QueryNode,
|
||||||
to_node: &QueryNode,
|
to_node: &QueryNode,
|
||||||
) -> Result<Vec<(u8, EdgeCondition<ProximityEdge>)>> {
|
) -> Result<Vec<(u8, EdgeCondition<ProximityEdge>)>> {
|
||||||
let SearchContext {
|
let SearchContext {
|
||||||
@ -95,9 +46,19 @@ pub fn build_step_visit_destination_node<'ctx, 'from_data>(
|
|||||||
db_cache,
|
db_cache,
|
||||||
word_interner,
|
word_interner,
|
||||||
phrase_interner,
|
phrase_interner,
|
||||||
derivations_interner,
|
term_interner,
|
||||||
query_term_docids: _,
|
term_docids: _,
|
||||||
} = ctx;
|
} = ctx;
|
||||||
|
|
||||||
|
let (left_term, left_end_position) = match from_node {
|
||||||
|
QueryNode::Term(LocatedQueryTerm { value, positions }) => {
|
||||||
|
(term_interner.get(*value), *positions.end())
|
||||||
|
}
|
||||||
|
QueryNode::Deleted => return Ok(vec![]),
|
||||||
|
QueryNode::Start => return Ok(vec![(0, EdgeCondition::Unconditional)]),
|
||||||
|
QueryNode::End => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
let right_term = match &to_node {
|
let right_term = match &to_node {
|
||||||
QueryNode::End => return Ok(vec![(0, EdgeCondition::Unconditional)]),
|
QueryNode::End => return Ok(vec![(0, EdgeCondition::Unconditional)]),
|
||||||
QueryNode::Deleted | QueryNode::Start => return Ok(vec![]),
|
QueryNode::Deleted | QueryNode::Start => return Ok(vec![]),
|
||||||
@ -105,47 +66,14 @@ pub fn build_step_visit_destination_node<'ctx, 'from_data>(
|
|||||||
};
|
};
|
||||||
let LocatedQueryTerm { value: right_value, positions: right_positions } = right_term;
|
let LocatedQueryTerm { value: right_value, positions: right_positions } = right_term;
|
||||||
|
|
||||||
let (right_phrase, right_derivations, right_start_position, right_ngram_length) =
|
let (right_term, right_start_position, right_ngram_length) =
|
||||||
match right_value {
|
(term_interner.get(*right_value), *right_positions.start(), right_positions.len());
|
||||||
QueryTerm::Word { derivations } => (
|
|
||||||
None,
|
|
||||||
derivations_interner.get(*derivations).clone(),
|
|
||||||
*right_positions.start(),
|
|
||||||
right_positions.len(),
|
|
||||||
),
|
|
||||||
QueryTerm::Phrase { phrase: right_phrase_interned } => {
|
|
||||||
let right_phrase = phrase_interner.get(*right_phrase_interned);
|
|
||||||
if let Some(original) = *right_phrase.words.first().unwrap() {
|
|
||||||
(
|
|
||||||
Some(*right_phrase_interned),
|
|
||||||
WordDerivations {
|
|
||||||
original,
|
|
||||||
zero_typo: Some(original),
|
|
||||||
one_typo: Box::new([]),
|
|
||||||
two_typos: Box::new([]),
|
|
||||||
use_prefix_db: None,
|
|
||||||
synonyms: Box::new([]),
|
|
||||||
split_words: None,
|
|
||||||
is_prefix: false,
|
|
||||||
prefix_of: Box::new([]),
|
|
||||||
},
|
|
||||||
*right_positions.start(),
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
// No word pairs if the phrase does not have a regular word as its first term
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let (left_derivations, left_end_position) = from_node_data;
|
|
||||||
|
|
||||||
if left_end_position + 1 != right_start_position {
|
if left_end_position + 1 != right_start_position {
|
||||||
// We want to ignore this pair of terms
|
// We want to ignore this pair of terms
|
||||||
// Unconditionally walk through the edge without computing the docids
|
// Unconditionally walk through the edge without computing the docids
|
||||||
// This can happen when, in a query like `the sun flowers are beautiful`, the term
|
// This can happen when, in a query like `the sun flowers are beautiful`, the term
|
||||||
// `flowers` is removed by the words ranking rule due to the terms matching strategy.
|
// `flowers` is removed by the `words` ranking rule.
|
||||||
// The remaining query graph represents `the sun .. are beautiful`
|
// The remaining query graph represents `the sun .. are beautiful`
|
||||||
// but `sun` and `are` have no proximity condition between them
|
// but `sun` and `are` have no proximity condition between them
|
||||||
return Ok(vec![(0, EdgeCondition::Unconditional)]);
|
return Ok(vec![(0, EdgeCondition::Unconditional)]);
|
||||||
@ -153,8 +81,8 @@ pub fn build_step_visit_destination_node<'ctx, 'from_data>(
|
|||||||
|
|
||||||
let mut cost_proximity_word_pairs = BTreeMap::<u8, BTreeMap<u8, Vec<WordPair>>>::new();
|
let mut cost_proximity_word_pairs = BTreeMap::<u8, BTreeMap<u8, Vec<WordPair>>>::new();
|
||||||
|
|
||||||
if let Some(right_prefix) = right_derivations.use_prefix_db {
|
if let Some(right_prefix) = right_term.use_prefix_db {
|
||||||
for (left_phrase, left_word) in left_derivations.iter().copied() {
|
for (left_phrase, left_word) in last_word_of_term_iter(left_term, phrase_interner) {
|
||||||
add_prefix_edges(
|
add_prefix_edges(
|
||||||
index,
|
index,
|
||||||
txn,
|
txn,
|
||||||
@ -172,37 +100,12 @@ pub fn build_step_visit_destination_node<'ctx, 'from_data>(
|
|||||||
// TODO: add safeguard in case the cartesian product is too large!
|
// TODO: add safeguard in case the cartesian product is too large!
|
||||||
// even if we restrict the word derivations to a maximum of 100, the size of the
|
// even if we restrict the word derivations to a maximum of 100, the size of the
|
||||||
// caterisan product could reach a maximum of 10_000 derivations, which is way too much.
|
// caterisan product could reach a maximum of 10_000 derivations, which is way too much.
|
||||||
// mMaybe prioritise the product of zero typo derivations, then the product of zero-typo/one-typo
|
// Maybe prioritise the product of zero typo derivations, then the product of zero-typo/one-typo
|
||||||
// + one-typo/zero-typo, then one-typo/one-typo, then ... until an arbitrary limit has been
|
// + one-typo/zero-typo, then one-typo/one-typo, then ... until an arbitrary limit has been
|
||||||
// reached
|
// reached
|
||||||
let first_word_right_phrase = if let Some(right_phrase_interned) = right_phrase {
|
|
||||||
let right_phrase = phrase_interner.get(right_phrase_interned);
|
|
||||||
right_phrase.words.first().copied().unwrap()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let right_single_word_iter: Vec<(Option<Interned<Phrase>>, Interned<String>)> =
|
|
||||||
right_derivations
|
|
||||||
.all_single_word_derivations_except_prefix_db()
|
|
||||||
.chain(first_word_right_phrase.iter().copied())
|
|
||||||
.map(|w| (right_phrase, w))
|
|
||||||
.collect();
|
|
||||||
let right_phrase_iter: Vec<(Option<Interned<Phrase>>, Interned<String>)> = right_derivations
|
|
||||||
.all_phrase_derivations()
|
|
||||||
.map(|right_phrase_interned: Interned<Phrase>| {
|
|
||||||
let right_phrase = phrase_interner.get(right_phrase_interned);
|
|
||||||
let first_word_right_phrase: Interned<String> =
|
|
||||||
right_phrase.words.first().unwrap().unwrap();
|
|
||||||
let r: (Option<Interned<Phrase>>, Interned<String>) =
|
|
||||||
(Some(right_phrase_interned), first_word_right_phrase);
|
|
||||||
r
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let mut right_word_iter = right_single_word_iter;
|
|
||||||
right_word_iter.extend(right_phrase_iter);
|
|
||||||
|
|
||||||
for (left_phrase, left_word) in left_derivations.iter().copied() {
|
for (left_phrase, left_word) in last_word_of_term_iter(left_term, phrase_interner) {
|
||||||
for (right_phrase, right_word) in right_word_iter.iter().copied() {
|
for (right_word, right_phrase) in first_word_of_term_iter(right_term, phrase_interner) {
|
||||||
add_non_prefix_edges(
|
add_non_prefix_edges(
|
||||||
index,
|
index,
|
||||||
txn,
|
txn,
|
||||||
|
@ -29,7 +29,7 @@ pub fn compute_docids<'ctx>(
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
if !docids.is_empty() {
|
if !docids.is_empty() {
|
||||||
for phrase in phrases {
|
for phrase in phrases {
|
||||||
docids &= ctx.query_term_docids.get_phrase_docids(
|
docids &= ctx.term_docids.get_phrase_docids(
|
||||||
index,
|
index,
|
||||||
txn,
|
txn,
|
||||||
db_cache,
|
db_cache,
|
||||||
@ -56,7 +56,7 @@ pub fn compute_docids<'ctx>(
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
if !docids.is_empty() {
|
if !docids.is_empty() {
|
||||||
for phrase in phrases {
|
for phrase in phrases {
|
||||||
docids &= ctx.query_term_docids.get_phrase_docids(
|
docids &= ctx.term_docids.get_phrase_docids(
|
||||||
index,
|
index,
|
||||||
txn,
|
txn,
|
||||||
db_cache,
|
db_cache,
|
||||||
|
@ -40,7 +40,6 @@ pub enum ProximityGraph {}
|
|||||||
|
|
||||||
impl RankingRuleGraphTrait for ProximityGraph {
|
impl RankingRuleGraphTrait for ProximityGraph {
|
||||||
type EdgeCondition = ProximityEdge;
|
type EdgeCondition = ProximityEdge;
|
||||||
type BuildVisitedFromNode = (Vec<(Option<Interned<Phrase>>, Interned<String>)>, i8);
|
|
||||||
|
|
||||||
fn label_for_edge_condition(edge: &Self::EdgeCondition) -> String {
|
fn label_for_edge_condition(edge: &Self::EdgeCondition) -> String {
|
||||||
let ProximityEdge { pairs, proximity } = edge;
|
let ProximityEdge { pairs, proximity } = edge;
|
||||||
@ -55,25 +54,13 @@ impl RankingRuleGraphTrait for ProximityGraph {
|
|||||||
compute_docids::compute_docids(ctx, edge, universe)
|
compute_docids::compute_docids(ctx, edge, universe)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_step_visit_source_node<'ctx>(
|
fn build_edges<'ctx>(
|
||||||
ctx: &mut SearchContext<'ctx>,
|
|
||||||
from_node: &QueryNode,
|
|
||||||
) -> Result<Option<Self::BuildVisitedFromNode>> {
|
|
||||||
build::visit_from_node(ctx, from_node)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_step_visit_destination_node<'from_data, 'ctx: 'from_data>(
|
|
||||||
ctx: &mut SearchContext<'ctx>,
|
ctx: &mut SearchContext<'ctx>,
|
||||||
conditions_interner: &mut Interner<Self::EdgeCondition>,
|
conditions_interner: &mut Interner<Self::EdgeCondition>,
|
||||||
|
source_node: &QueryNode,
|
||||||
dest_node: &QueryNode,
|
dest_node: &QueryNode,
|
||||||
source_node_data: &'from_data Self::BuildVisitedFromNode,
|
|
||||||
) -> Result<Vec<(u8, EdgeCondition<Self::EdgeCondition>)>> {
|
) -> Result<Vec<(u8, EdgeCondition<Self::EdgeCondition>)>> {
|
||||||
build::build_step_visit_destination_node(
|
build::build_edges(ctx, conditions_interner, source_node, dest_node)
|
||||||
ctx,
|
|
||||||
conditions_interner,
|
|
||||||
source_node_data,
|
|
||||||
dest_node,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn log_state(
|
fn log_state(
|
||||||
|
@ -4,28 +4,24 @@ use super::empty_paths_cache::EmptyPathsCache;
|
|||||||
use super::{EdgeCondition, RankingRuleGraph, RankingRuleGraphTrait};
|
use super::{EdgeCondition, RankingRuleGraph, RankingRuleGraphTrait};
|
||||||
use crate::search::new::interner::{Interned, Interner};
|
use crate::search::new::interner::{Interned, Interner};
|
||||||
use crate::search::new::logger::SearchLogger;
|
use crate::search::new::logger::SearchLogger;
|
||||||
use crate::search::new::query_term::{LocatedQueryTerm, Phrase, QueryTerm, WordDerivations};
|
use crate::search::new::query_term::{LocatedQueryTerm, QueryTerm};
|
||||||
use crate::search::new::small_bitmap::SmallBitmap;
|
use crate::search::new::small_bitmap::SmallBitmap;
|
||||||
use crate::search::new::{QueryGraph, QueryNode, SearchContext};
|
use crate::search::new::{QueryGraph, QueryNode, SearchContext};
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum TypoEdge {
|
pub struct TypoEdge {
|
||||||
Phrase { phrase: Interned<Phrase> },
|
term: Interned<QueryTerm>,
|
||||||
Word { derivations: Interned<WordDerivations>, nbr_typos: u8 },
|
nbr_typos: u8,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum TypoGraph {}
|
pub enum TypoGraph {}
|
||||||
|
|
||||||
impl RankingRuleGraphTrait for TypoGraph {
|
impl RankingRuleGraphTrait for TypoGraph {
|
||||||
type EdgeCondition = TypoEdge;
|
type EdgeCondition = TypoEdge;
|
||||||
type BuildVisitedFromNode = ();
|
|
||||||
|
|
||||||
fn label_for_edge_condition(edge: &Self::EdgeCondition) -> String {
|
fn label_for_edge_condition(edge: &Self::EdgeCondition) -> String {
|
||||||
match edge {
|
format!(", {} typos", edge.nbr_typos)
|
||||||
TypoEdge::Phrase { .. } => ", 0 typos".to_owned(),
|
|
||||||
TypoEdge::Word { nbr_typos, .. } => format!(", {nbr_typos} typos"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_edge_condition<'db_cache, 'ctx>(
|
fn resolve_edge_condition<'db_cache, 'ctx>(
|
||||||
@ -39,124 +35,101 @@ impl RankingRuleGraphTrait for TypoGraph {
|
|||||||
db_cache,
|
db_cache,
|
||||||
word_interner,
|
word_interner,
|
||||||
phrase_interner,
|
phrase_interner,
|
||||||
derivations_interner,
|
term_interner,
|
||||||
query_term_docids,
|
term_docids: query_term_docids,
|
||||||
} = ctx;
|
} = ctx;
|
||||||
match edge {
|
|
||||||
&TypoEdge::Phrase { phrase } => Ok(universe
|
|
||||||
& query_term_docids.get_phrase_docids(
|
|
||||||
index,
|
|
||||||
txn,
|
|
||||||
db_cache,
|
|
||||||
word_interner,
|
|
||||||
phrase_interner,
|
|
||||||
phrase,
|
|
||||||
)?),
|
|
||||||
TypoEdge::Word { derivations, .. } => {
|
|
||||||
let docids = universe
|
|
||||||
& query_term_docids.get_word_derivations_docids(
|
|
||||||
index,
|
|
||||||
txn,
|
|
||||||
db_cache,
|
|
||||||
word_interner,
|
|
||||||
derivations_interner,
|
|
||||||
phrase_interner,
|
|
||||||
*derivations,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(docids)
|
let docids = universe
|
||||||
}
|
& query_term_docids.get_query_term_docids(
|
||||||
}
|
index,
|
||||||
|
txn,
|
||||||
|
db_cache,
|
||||||
|
word_interner,
|
||||||
|
term_interner,
|
||||||
|
phrase_interner,
|
||||||
|
edge.term,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(docids)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_step_visit_source_node<'ctx>(
|
fn build_edges<'ctx>(
|
||||||
_ctx: &mut SearchContext<'ctx>,
|
|
||||||
_from_node: &QueryNode,
|
|
||||||
) -> Result<Option<Self::BuildVisitedFromNode>> {
|
|
||||||
Ok(Some(()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_step_visit_destination_node<'from_data, 'ctx: 'from_data>(
|
|
||||||
ctx: &mut SearchContext<'ctx>,
|
ctx: &mut SearchContext<'ctx>,
|
||||||
conditions_interner: &mut Interner<Self::EdgeCondition>,
|
conditions_interner: &mut Interner<Self::EdgeCondition>,
|
||||||
|
_from_node: &QueryNode,
|
||||||
to_node: &QueryNode,
|
to_node: &QueryNode,
|
||||||
_from_node_data: &'from_data Self::BuildVisitedFromNode,
|
|
||||||
) -> Result<Vec<(u8, EdgeCondition<Self::EdgeCondition>)>> {
|
) -> Result<Vec<(u8, EdgeCondition<Self::EdgeCondition>)>> {
|
||||||
let SearchContext { derivations_interner, .. } = ctx;
|
let SearchContext { term_interner, .. } = ctx;
|
||||||
match to_node {
|
match to_node {
|
||||||
QueryNode::Term(LocatedQueryTerm { value, positions }) => match *value {
|
QueryNode::Term(LocatedQueryTerm { value, positions }) => {
|
||||||
QueryTerm::Phrase { phrase } => Ok(vec![(
|
let mut edges = vec![];
|
||||||
0,
|
// Ngrams have a base typo cost
|
||||||
EdgeCondition::Conditional(
|
// 2-gram -> equivalent to 1 typo
|
||||||
conditions_interner.insert(TypoEdge::Phrase { phrase }),
|
// 3-gram -> equivalent to 2 typos
|
||||||
),
|
let base_cost = positions.len().max(2) as u8;
|
||||||
)]),
|
|
||||||
QueryTerm::Word { derivations } => {
|
|
||||||
let mut edges = vec![];
|
|
||||||
// Ngrams have a base typo cost
|
|
||||||
// 2-gram -> equivalent to 1 typo
|
|
||||||
// 3-gram -> equivalent to 2 typos
|
|
||||||
let base_cost = positions.len().max(2) as u8;
|
|
||||||
|
|
||||||
for nbr_typos in 0..=2 {
|
for nbr_typos in 0..=2 {
|
||||||
let derivations = derivations_interner.get(derivations).clone();
|
let term = term_interner.get(*value).clone();
|
||||||
let new_derivations = match nbr_typos {
|
let new_term = match nbr_typos {
|
||||||
0 => WordDerivations {
|
0 => QueryTerm {
|
||||||
original: derivations.original,
|
original: term.original,
|
||||||
is_prefix: derivations.is_prefix,
|
is_prefix: term.is_prefix,
|
||||||
zero_typo: derivations.zero_typo,
|
zero_typo: term.zero_typo,
|
||||||
prefix_of: derivations.prefix_of,
|
prefix_of: term.prefix_of,
|
||||||
synonyms: derivations.synonyms,
|
synonyms: term.synonyms,
|
||||||
|
split_words: None,
|
||||||
|
one_typo: Box::new([]),
|
||||||
|
two_typos: Box::new([]),
|
||||||
|
use_prefix_db: term.use_prefix_db,
|
||||||
|
is_ngram: term.is_ngram,
|
||||||
|
phrase: term.phrase,
|
||||||
|
},
|
||||||
|
1 => {
|
||||||
|
// What about split words and synonyms here?
|
||||||
|
QueryTerm {
|
||||||
|
original: term.original,
|
||||||
|
is_prefix: false,
|
||||||
|
zero_typo: None,
|
||||||
|
prefix_of: Box::new([]),
|
||||||
|
synonyms: Box::new([]),
|
||||||
|
split_words: term.split_words,
|
||||||
|
one_typo: term.one_typo,
|
||||||
|
two_typos: Box::new([]),
|
||||||
|
use_prefix_db: None, // false because all items from use_prefix_db have 0 typos
|
||||||
|
is_ngram: term.is_ngram,
|
||||||
|
phrase: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
2 => {
|
||||||
|
// What about split words and synonyms here?
|
||||||
|
QueryTerm {
|
||||||
|
original: term.original,
|
||||||
|
zero_typo: None,
|
||||||
|
is_prefix: false,
|
||||||
|
prefix_of: Box::new([]),
|
||||||
|
synonyms: Box::new([]),
|
||||||
split_words: None,
|
split_words: None,
|
||||||
one_typo: Box::new([]),
|
one_typo: Box::new([]),
|
||||||
two_typos: Box::new([]),
|
two_typos: term.two_typos,
|
||||||
use_prefix_db: derivations.use_prefix_db,
|
use_prefix_db: None, // false because all items from use_prefix_db have 0 typos
|
||||||
},
|
is_ngram: term.is_ngram,
|
||||||
1 => {
|
phrase: None,
|
||||||
// What about split words and synonyms here?
|
|
||||||
WordDerivations {
|
|
||||||
original: derivations.original,
|
|
||||||
is_prefix: false,
|
|
||||||
zero_typo: None,
|
|
||||||
prefix_of: Box::new([]),
|
|
||||||
synonyms: Box::new([]),
|
|
||||||
split_words: derivations.split_words,
|
|
||||||
one_typo: derivations.one_typo,
|
|
||||||
two_typos: Box::new([]),
|
|
||||||
use_prefix_db: None, // false because all items from use_prefix_db have 0 typos
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
2 => {
|
|
||||||
// What about split words and synonyms here?
|
|
||||||
WordDerivations {
|
|
||||||
original: derivations.original,
|
|
||||||
zero_typo: None,
|
|
||||||
is_prefix: false,
|
|
||||||
prefix_of: Box::new([]),
|
|
||||||
synonyms: Box::new([]),
|
|
||||||
split_words: None,
|
|
||||||
one_typo: Box::new([]),
|
|
||||||
two_typos: derivations.two_typos,
|
|
||||||
use_prefix_db: None, // false because all items from use_prefix_db have 0 typos
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => panic!(),
|
|
||||||
};
|
|
||||||
if !new_derivations.is_empty() {
|
|
||||||
edges.push((
|
|
||||||
nbr_typos as u8 + base_cost,
|
|
||||||
EdgeCondition::Conditional(conditions_interner.insert(
|
|
||||||
TypoEdge::Word {
|
|
||||||
derivations: derivations_interner.insert(new_derivations),
|
|
||||||
nbr_typos: nbr_typos as u8,
|
|
||||||
},
|
|
||||||
)),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
_ => panic!(),
|
||||||
|
};
|
||||||
|
if !new_term.is_empty() {
|
||||||
|
edges.push((
|
||||||
|
nbr_typos as u8 + base_cost,
|
||||||
|
EdgeCondition::Conditional(conditions_interner.insert(TypoEdge {
|
||||||
|
term: term_interner.insert(new_term),
|
||||||
|
nbr_typos: nbr_typos as u8,
|
||||||
|
})),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
Ok(edges)
|
|
||||||
}
|
}
|
||||||
},
|
Ok(edges)
|
||||||
|
}
|
||||||
QueryNode::End => Ok(vec![(0, EdgeCondition::Unconditional)]),
|
QueryNode::End => Ok(vec![(0, EdgeCondition::Unconditional)]),
|
||||||
QueryNode::Deleted | QueryNode::Start => panic!(),
|
QueryNode::Deleted | QueryNode::Start => panic!(),
|
||||||
}
|
}
|
||||||
|
@ -4,12 +4,12 @@ use std::collections::VecDeque;
|
|||||||
|
|
||||||
use fxhash::FxHashMap;
|
use fxhash::FxHashMap;
|
||||||
use heed::{BytesDecode, RoTxn};
|
use heed::{BytesDecode, RoTxn};
|
||||||
use roaring::{MultiOps, RoaringBitmap};
|
use roaring::RoaringBitmap;
|
||||||
|
|
||||||
use super::db_cache::DatabaseCache;
|
use super::db_cache::DatabaseCache;
|
||||||
use super::interner::{Interned, Interner};
|
use super::interner::{Interned, Interner};
|
||||||
use super::query_graph::QUERY_GRAPH_NODE_LENGTH_LIMIT;
|
use super::query_graph::QUERY_GRAPH_NODE_LENGTH_LIMIT;
|
||||||
use super::query_term::{Phrase, QueryTerm, WordDerivations};
|
use super::query_term::{Phrase, QueryTerm};
|
||||||
use super::small_bitmap::SmallBitmap;
|
use super::small_bitmap::SmallBitmap;
|
||||||
use super::{QueryGraph, QueryNode, SearchContext};
|
use super::{QueryGraph, QueryNode, SearchContext};
|
||||||
use crate::{CboRoaringBitmapCodec, Index, Result, RoaringBitmapCodec};
|
use crate::{CboRoaringBitmapCodec, Index, Result, RoaringBitmapCodec};
|
||||||
@ -17,7 +17,7 @@ use crate::{CboRoaringBitmapCodec, Index, Result, RoaringBitmapCodec};
|
|||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct QueryTermDocIdsCache {
|
pub struct QueryTermDocIdsCache {
|
||||||
pub phrases: FxHashMap<Interned<Phrase>, RoaringBitmap>,
|
pub phrases: FxHashMap<Interned<Phrase>, RoaringBitmap>,
|
||||||
pub derivations: FxHashMap<Interned<WordDerivations>, RoaringBitmap>,
|
pub terms: FxHashMap<Interned<QueryTerm>, RoaringBitmap>,
|
||||||
}
|
}
|
||||||
impl QueryTermDocIdsCache {
|
impl QueryTermDocIdsCache {
|
||||||
/// Get the document ids associated with the given phrase
|
/// Get the document ids associated with the given phrase
|
||||||
@ -38,109 +38,53 @@ impl QueryTermDocIdsCache {
|
|||||||
let docids = &self.phrases[&phrase];
|
let docids = &self.phrases[&phrase];
|
||||||
Ok(docids)
|
Ok(docids)
|
||||||
}
|
}
|
||||||
|
/// Get the document ids associated with the given term
|
||||||
/// Get the document ids associated with the given word derivations
|
pub fn get_query_term_docids<'s, 'ctx>(
|
||||||
pub fn get_word_derivations_docids<'s, 'ctx>(
|
|
||||||
&'s mut self,
|
&'s mut self,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
txn: &'ctx RoTxn,
|
txn: &'ctx RoTxn,
|
||||||
db_cache: &mut DatabaseCache<'ctx>,
|
db_cache: &mut DatabaseCache<'ctx>,
|
||||||
word_interner: &Interner<String>,
|
word_interner: &Interner<String>,
|
||||||
derivations_interner: &Interner<WordDerivations>,
|
term_interner: &Interner<QueryTerm>,
|
||||||
phrase_interner: &Interner<Phrase>,
|
phrase_interner: &Interner<Phrase>,
|
||||||
derivations: Interned<WordDerivations>,
|
term_interned: Interned<QueryTerm>,
|
||||||
) -> Result<&'s RoaringBitmap> {
|
) -> Result<&'s RoaringBitmap> {
|
||||||
if self.derivations.contains_key(&derivations) {
|
if self.terms.contains_key(&term_interned) {
|
||||||
return Ok(&self.derivations[&derivations]);
|
return Ok(&self.terms[&term_interned]);
|
||||||
};
|
};
|
||||||
let WordDerivations {
|
let mut docids = RoaringBitmap::new();
|
||||||
original: _,
|
|
||||||
is_prefix: _,
|
let term = term_interner.get(term_interned);
|
||||||
zero_typo,
|
for word in term.all_single_words_except_prefix_db() {
|
||||||
prefix_of,
|
|
||||||
synonyms,
|
|
||||||
split_words,
|
|
||||||
one_typo,
|
|
||||||
two_typos,
|
|
||||||
use_prefix_db,
|
|
||||||
} = derivations_interner.get(derivations);
|
|
||||||
let mut or_docids = vec![];
|
|
||||||
for word in zero_typo
|
|
||||||
.iter()
|
|
||||||
.chain(prefix_of.iter())
|
|
||||||
.chain(one_typo.iter())
|
|
||||||
.chain(two_typos.iter())
|
|
||||||
.copied()
|
|
||||||
{
|
|
||||||
if let Some(word_docids) = db_cache.get_word_docids(index, txn, word_interner, word)? {
|
if let Some(word_docids) = db_cache.get_word_docids(index, txn, word_interner, word)? {
|
||||||
or_docids.push(word_docids);
|
docids |=
|
||||||
|
RoaringBitmapCodec::bytes_decode(word_docids).ok_or(heed::Error::Decoding)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(prefix) = use_prefix_db {
|
for phrase in term.all_phrases() {
|
||||||
|
docids |= self.get_phrase_docids(
|
||||||
|
index,
|
||||||
|
txn,
|
||||||
|
db_cache,
|
||||||
|
word_interner,
|
||||||
|
phrase_interner,
|
||||||
|
phrase,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(prefix) = term.use_prefix_db {
|
||||||
if let Some(prefix_docids) =
|
if let Some(prefix_docids) =
|
||||||
db_cache.get_word_prefix_docids(index, txn, word_interner, *prefix)?
|
db_cache.get_word_prefix_docids(index, txn, word_interner, prefix)?
|
||||||
{
|
{
|
||||||
or_docids.push(prefix_docids);
|
docids |=
|
||||||
|
RoaringBitmapCodec::bytes_decode(prefix_docids).ok_or(heed::Error::Decoding)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut docids = or_docids
|
|
||||||
.into_iter()
|
|
||||||
.map(|slice| RoaringBitmapCodec::bytes_decode(slice).unwrap())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
for synonym in synonyms.iter().copied() {
|
|
||||||
// TODO: cache resolve_phrase?
|
|
||||||
docids.push(resolve_phrase(
|
|
||||||
index,
|
|
||||||
txn,
|
|
||||||
db_cache,
|
|
||||||
word_interner,
|
|
||||||
phrase_interner,
|
|
||||||
synonym,
|
|
||||||
)?);
|
|
||||||
}
|
|
||||||
if let Some(split_words) = split_words {
|
|
||||||
docids.push(resolve_phrase(
|
|
||||||
index,
|
|
||||||
txn,
|
|
||||||
db_cache,
|
|
||||||
word_interner,
|
|
||||||
phrase_interner,
|
|
||||||
*split_words,
|
|
||||||
)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
let docids = MultiOps::union(docids);
|
let _ = self.terms.insert(term_interned, docids);
|
||||||
let _ = self.derivations.insert(derivations, docids);
|
let docids = &self.terms[&term_interned];
|
||||||
let docids = &self.derivations[&derivations];
|
|
||||||
Ok(docids)
|
Ok(docids)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the document ids associated with the given query term.
|
|
||||||
fn get_query_term_docids<'s, 'ctx>(
|
|
||||||
&'s mut self,
|
|
||||||
index: &Index,
|
|
||||||
txn: &'ctx RoTxn,
|
|
||||||
db_cache: &mut DatabaseCache<'ctx>,
|
|
||||||
word_interner: &Interner<String>,
|
|
||||||
derivations_interner: &Interner<WordDerivations>,
|
|
||||||
phrase_interner: &Interner<Phrase>,
|
|
||||||
term: &QueryTerm,
|
|
||||||
) -> Result<&'s RoaringBitmap> {
|
|
||||||
match *term {
|
|
||||||
QueryTerm::Phrase { phrase } => {
|
|
||||||
self.get_phrase_docids(index, txn, db_cache, word_interner, phrase_interner, phrase)
|
|
||||||
}
|
|
||||||
QueryTerm::Word { derivations } => self.get_word_derivations_docids(
|
|
||||||
index,
|
|
||||||
txn,
|
|
||||||
db_cache,
|
|
||||||
word_interner,
|
|
||||||
derivations_interner,
|
|
||||||
phrase_interner,
|
|
||||||
derivations,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_query_graph<'ctx>(
|
pub fn resolve_query_graph<'ctx>(
|
||||||
@ -154,8 +98,8 @@ pub fn resolve_query_graph<'ctx>(
|
|||||||
db_cache,
|
db_cache,
|
||||||
word_interner,
|
word_interner,
|
||||||
phrase_interner,
|
phrase_interner,
|
||||||
derivations_interner,
|
term_interner,
|
||||||
query_term_docids,
|
term_docids: query_term_docids,
|
||||||
..
|
..
|
||||||
} = ctx;
|
} = ctx;
|
||||||
// TODO: there is a faster way to compute this big
|
// TODO: there is a faster way to compute this big
|
||||||
@ -183,16 +127,16 @@ pub fn resolve_query_graph<'ctx>(
|
|||||||
|
|
||||||
let node_docids = match n {
|
let node_docids = match n {
|
||||||
QueryNode::Term(located_term) => {
|
QueryNode::Term(located_term) => {
|
||||||
let derivations_docids = query_term_docids.get_query_term_docids(
|
let term_docids = query_term_docids.get_query_term_docids(
|
||||||
index,
|
index,
|
||||||
txn,
|
txn,
|
||||||
db_cache,
|
db_cache,
|
||||||
word_interner,
|
word_interner,
|
||||||
derivations_interner,
|
term_interner,
|
||||||
phrase_interner,
|
phrase_interner,
|
||||||
&located_term.value,
|
located_term.value,
|
||||||
)?;
|
)?;
|
||||||
predecessors_docids & derivations_docids
|
predecessors_docids & term_docids
|
||||||
}
|
}
|
||||||
QueryNode::Deleted => {
|
QueryNode::Deleted => {
|
||||||
panic!()
|
panic!()
|
||||||
|
Loading…
Reference in New Issue
Block a user