mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-09 22:48:54 +01:00
Make it work by avoid storing invalid stuff in the cache
This commit is contained in:
parent
41f51adbec
commit
cd7a20fa32
@ -247,14 +247,14 @@ fn resolve_negative_words(
|
|||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
||||||
fn resolve_negative_phrases(
|
fn resolve_negative_phrases(
|
||||||
ctx: &mut SearchContext<'_>,
|
ctx: &mut SearchContext<'_>,
|
||||||
universe: Option<&RoaringBitmap>,
|
_universe: Option<&RoaringBitmap>,
|
||||||
negative_phrases: &[LocatedQueryTerm],
|
negative_phrases: &[LocatedQueryTerm],
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
let mut negative_bitmap = RoaringBitmap::new();
|
let mut negative_bitmap = RoaringBitmap::new();
|
||||||
for term in negative_phrases {
|
for term in negative_phrases {
|
||||||
let query_term = ctx.term_interner.get(term.value);
|
let query_term = ctx.term_interner.get(term.value);
|
||||||
if let Some(phrase) = query_term.original_phrase() {
|
if let Some(phrase) = query_term.original_phrase() {
|
||||||
negative_bitmap |= ctx.get_phrase_docids(universe, phrase)?;
|
negative_bitmap |= ctx.get_phrase_docids(None, phrase)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(negative_bitmap)
|
Ok(negative_bitmap)
|
||||||
|
@ -29,7 +29,7 @@ fn compute_docids(
|
|||||||
|
|
||||||
let candidates = match exact_term {
|
let candidates = match exact_term {
|
||||||
// TODO I move the intersection here
|
// TODO I move the intersection here
|
||||||
ExactTerm::Phrase(phrase) => ctx.get_phrase_docids(Some(universe), phrase)? & universe,
|
ExactTerm::Phrase(phrase) => ctx.get_phrase_docids(None, phrase)? & universe,
|
||||||
ExactTerm::Word(word) => {
|
ExactTerm::Word(word) => {
|
||||||
ctx.word_docids(Some(universe), Word::Original(word))?.unwrap_or_default()
|
ctx.word_docids(Some(universe), Word::Original(word))?.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,7 @@ pub fn compute_docids(
|
|||||||
if right_derivs.len() > 1 {
|
if right_derivs.len() > 1 {
|
||||||
let universe = &universe;
|
let universe = &universe;
|
||||||
if let Some(left_phrase) = left_phrase {
|
if let Some(left_phrase) = left_phrase {
|
||||||
if universe.is_disjoint(ctx.get_phrase_docids(Some(universe), left_phrase)?) {
|
if universe.is_disjoint(ctx.get_phrase_docids(None, left_phrase)?) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
} else if let Some(left_word_docids) = ctx.word_docids(Some(universe), left_word)? {
|
} else if let Some(left_word_docids) = ctx.word_docids(Some(universe), left_word)? {
|
||||||
@ -126,7 +126,7 @@ fn compute_prefix_edges(
|
|||||||
// TODO we can clearly give the universe to this method
|
// TODO we can clearly give the universe to this method
|
||||||
// Unfortunately, it is deserializing/computing stuff and
|
// Unfortunately, it is deserializing/computing stuff and
|
||||||
// keeping the result as a materialized bitmap.
|
// keeping the result as a materialized bitmap.
|
||||||
let phrase_docids = ctx.get_phrase_docids(Some(&universe), phrase)?;
|
let phrase_docids = ctx.get_phrase_docids(None, phrase)?;
|
||||||
if !phrase_docids.is_empty() {
|
if !phrase_docids.is_empty() {
|
||||||
used_left_phrases.insert(phrase);
|
used_left_phrases.insert(phrase);
|
||||||
}
|
}
|
||||||
@ -184,7 +184,7 @@ fn compute_non_prefix_edges(
|
|||||||
let mut universe = universe.clone();
|
let mut universe = universe.clone();
|
||||||
|
|
||||||
for phrase in left_phrase.iter().chain(right_phrase.iter()).copied() {
|
for phrase in left_phrase.iter().chain(right_phrase.iter()).copied() {
|
||||||
universe &= ctx.get_phrase_docids(Some(&universe), phrase)?;
|
universe &= ctx.get_phrase_docids(None, phrase)?;
|
||||||
if universe.is_empty() {
|
if universe.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ pub fn compute_query_term_subset_docids(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
for phrase in term.all_phrases(ctx)? {
|
for phrase in term.all_phrases(ctx)? {
|
||||||
docids |= ctx.get_phrase_docids(universe, phrase)?;
|
docids |= ctx.get_phrase_docids(None, phrase)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(prefix) = term.use_prefix_db(ctx) {
|
if let Some(prefix) = term.use_prefix_db(ctx) {
|
||||||
@ -80,7 +80,7 @@ pub fn compute_query_term_subset_docids_within_field_id(
|
|||||||
// guaranteed that all of its words are within a single fid.
|
// guaranteed that all of its words are within a single fid.
|
||||||
if let Some(word) = phrase.words(ctx).iter().flatten().next() {
|
if let Some(word) = phrase.words(ctx).iter().flatten().next() {
|
||||||
if let Some(word_fid_docids) = ctx.get_db_word_fid_docids(universe, *word, fid)? {
|
if let Some(word_fid_docids) = ctx.get_db_word_fid_docids(universe, *word, fid)? {
|
||||||
docids |= ctx.get_phrase_docids(Some(&word_fid_docids), phrase)?;
|
docids |= ctx.get_phrase_docids(None, phrase)? & word_fid_docids;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -118,7 +118,7 @@ pub fn compute_query_term_subset_docids_within_position(
|
|||||||
if let Some(word_position_docids) =
|
if let Some(word_position_docids) =
|
||||||
ctx.get_db_word_position_docids(universe, *word, position)?
|
ctx.get_db_word_position_docids(universe, *word, position)?
|
||||||
{
|
{
|
||||||
docids |= ctx.get_phrase_docids(Some(&word_position_docids), phrase)?;
|
docids |= ctx.get_phrase_docids(None, phrase)? & word_position_docids;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user