Make it work by avoid storing invalid stuff in the cache

This commit is contained in:
Clément Renault 2024-06-21 15:03:51 +02:00
parent 20862eceb3
commit 3fa49a993a
No known key found for this signature in database
GPG Key ID: F250A4C4E3AE5F5F
4 changed files with 9 additions and 9 deletions

View File

@ -247,14 +247,14 @@ fn resolve_negative_words(
#[tracing::instrument(level = "trace", skip_all, target = "search")]
fn resolve_negative_phrases(
ctx: &mut SearchContext,
universe: Option<&RoaringBitmap>,
_universe: Option<&RoaringBitmap>,
negative_phrases: &[LocatedQueryTerm],
) -> Result<RoaringBitmap> {
let mut negative_bitmap = RoaringBitmap::new();
for term in negative_phrases {
let query_term = ctx.term_interner.get(term.value);
if let Some(phrase) = query_term.original_phrase() {
negative_bitmap |= ctx.get_phrase_docids(universe, phrase)?;
negative_bitmap |= ctx.get_phrase_docids(None, phrase)?;
}
}
Ok(negative_bitmap)

View File

@ -29,7 +29,7 @@ fn compute_docids(
let candidates = match exact_term {
// TODO I move the intersection here
ExactTerm::Phrase(phrase) => ctx.get_phrase_docids(Some(universe), phrase)? & universe,
ExactTerm::Phrase(phrase) => ctx.get_phrase_docids(None, phrase)? & universe,
ExactTerm::Word(word) => {
ctx.word_docids(Some(universe), Word::Original(word))?.unwrap_or_default()
}

View File

@ -74,7 +74,7 @@ pub fn compute_docids(
if right_derivs.len() > 1 {
let universe = &universe;
if let Some(left_phrase) = left_phrase {
if universe.is_disjoint(ctx.get_phrase_docids(Some(universe), left_phrase)?) {
if universe.is_disjoint(ctx.get_phrase_docids(None, left_phrase)?) {
continue;
}
} else if let Some(left_word_docids) = ctx.word_docids(Some(universe), left_word)? {
@ -126,7 +126,7 @@ fn compute_prefix_edges(
// TODO we can clearly give the universe to this method
// Unfortunately, it is deserializing/computing stuff and
// keeping the result as a materialized bitmap.
let phrase_docids = ctx.get_phrase_docids(Some(&universe), phrase)?;
let phrase_docids = ctx.get_phrase_docids(None, phrase)?;
if !phrase_docids.is_empty() {
used_left_phrases.insert(phrase);
}
@ -184,7 +184,7 @@ fn compute_non_prefix_edges(
let mut universe = universe.clone();
for phrase in left_phrase.iter().chain(right_phrase.iter()).copied() {
universe &= ctx.get_phrase_docids(Some(&universe), phrase)?;
universe &= ctx.get_phrase_docids(None, phrase)?;
if universe.is_empty() {
return Ok(());
}

View File

@ -47,7 +47,7 @@ pub fn compute_query_term_subset_docids(
}
}
for phrase in term.all_phrases(ctx)? {
docids |= ctx.get_phrase_docids(universe, phrase)?;
docids |= ctx.get_phrase_docids(None, phrase)?;
}
if let Some(prefix) = term.use_prefix_db(ctx) {
@ -80,7 +80,7 @@ pub fn compute_query_term_subset_docids_within_field_id(
// guaranteed that all of its words are within a single fid.
if let Some(word) = phrase.words(ctx).iter().flatten().next() {
if let Some(word_fid_docids) = ctx.get_db_word_fid_docids(universe, *word, fid)? {
docids |= ctx.get_phrase_docids(Some(&word_fid_docids), phrase)?;
docids |= ctx.get_phrase_docids(None, phrase)? & word_fid_docids;
}
}
}
@ -118,7 +118,7 @@ pub fn compute_query_term_subset_docids_within_position(
if let Some(word_position_docids) =
ctx.get_db_word_position_docids(universe, *word, position)?
{
docids |= ctx.get_phrase_docids(Some(&word_position_docids), phrase)?;
docids |= ctx.get_phrase_docids(None, phrase)? & word_position_docids;
}
}
}