4717: Implement intersection at end on the search pipeline r=Kerollmops a=Kerollmops

This PR is akin to #4713 and #4682 because it uses the new RoaringBitmap method to do the intersections directly on the serialized bytes for the bytes LMDB/heed returns. More work related to this issue can be done, and I listed that in #4780.

Running the following command shows where we use bitand/intersection operations and where we can potentially apply this optimization.
```sh
rg --type rust --vimgrep '\s&[=\s]' milli/src/search
```

Co-authored-by: Clément Renault <clement@meilisearch.com>
This commit is contained in:
meili-bors[bot] 2024-07-10 15:01:33 +00:00 committed by GitHub
commit f36f34c2f7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 220 additions and 132 deletions

View File

@ -46,36 +46,70 @@ pub struct DatabaseCache<'ctx> {
pub word_prefix_fids: FxHashMap<Interned<String>, Vec<u16>>,
}
impl<'ctx> DatabaseCache<'ctx> {
fn get_value<'v, K1, KC, DC>(
fn get_value<'v, K1, KC>(
txn: &'ctx RoTxn<'_>,
cache_key: K1,
db_key: &'v KC::EItem,
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
universe: Option<&RoaringBitmap>,
db: Database<KC, Bytes>,
) -> Result<Option<DC::DItem>>
) -> Result<Option<RoaringBitmap>>
where
K1: Copy + Eq + Hash,
KC: BytesEncode<'v>,
DC: BytesDecodeOwned,
{
if let Entry::Vacant(entry) = cache.entry(cache_key) {
let bitmap_ptr = db.get(txn, db_key)?.map(Cow::Borrowed);
entry.insert(bitmap_ptr);
}
match cache.get(&cache_key).unwrap() {
Some(Cow::Borrowed(bytes)) => DC::bytes_decode_owned(bytes)
let bitmap_bytes = match cache.get(&cache_key).unwrap() {
Some(Cow::Borrowed(bytes)) => bytes,
Some(Cow::Owned(bytes)) => bytes.as_slice(),
None => return Ok(None),
};
match (bitmap_bytes, universe) {
(bytes, Some(universe)) => {
CboRoaringBitmapCodec::intersection_with_serialized(bytes, universe)
.map(Some)
.map_err(Into::into)
}
(bytes, None) => CboRoaringBitmapCodec::bytes_decode_owned(bytes)
.map(Some)
.map_err(heed::Error::Decoding)
.map_err(Into::into),
Some(Cow::Owned(bytes)) => DC::bytes_decode_owned(bytes)
.map(Some)
.map_err(heed::Error::Decoding)
.map_err(Into::into),
None => Ok(None),
}
}
fn get_value_length<'v, K1, KC>(
txn: &'ctx RoTxn<'_>,
cache_key: K1,
db_key: &'v KC::EItem,
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
db: Database<KC, Bytes>,
) -> Result<Option<u64>>
where
K1: Copy + Eq + Hash,
KC: BytesEncode<'v>,
{
if let Entry::Vacant(entry) = cache.entry(cache_key) {
let bitmap_ptr = db.get(txn, db_key)?.map(Cow::Borrowed);
entry.insert(bitmap_ptr);
}
let bitmap_bytes = match cache.get(&cache_key).unwrap() {
Some(Cow::Borrowed(bytes)) => bytes,
Some(Cow::Owned(bytes)) => bytes.as_slice(),
None => return Ok(None),
};
CboRoaringBitmapLenCodec::bytes_decode_owned(bitmap_bytes)
.map(Some)
.map_err(heed::Error::Decoding)
.map_err(Into::into)
}
fn get_value_from_keys<'v, K1, KC, DC>(
txn: &'ctx RoTxn<'_>,
cache_key: K1,
@ -137,11 +171,15 @@ impl<'ctx> SearchContext<'ctx> {
}
}
pub fn word_docids(&mut self, word: Word) -> Result<Option<RoaringBitmap>> {
pub fn word_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word: Word,
) -> Result<Option<RoaringBitmap>> {
match word {
Word::Original(word) => {
let exact = self.get_db_exact_word_docids(word)?;
let tolerant = self.get_db_word_docids(word)?;
let exact = self.get_db_exact_word_docids(universe, word)?;
let tolerant = self.get_db_word_docids(universe, word)?;
Ok(match (exact, tolerant) {
(None, None) => None,
(None, Some(tolerant)) => Some(tolerant),
@ -153,12 +191,16 @@ impl<'ctx> SearchContext<'ctx> {
}
})
}
Word::Derived(word) => self.get_db_word_docids(word),
Word::Derived(word) => self.get_db_word_docids(universe, word),
}
}
/// Retrieve or insert the given value in the `word_docids` database.
fn get_db_word_docids(&mut self, word: Interned<String>) -> Result<Option<RoaringBitmap>> {
fn get_db_word_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word: Interned<String>,
) -> Result<Option<RoaringBitmap>> {
match &self.restricted_fids {
Some(restricted_fids) => {
let interned = self.word_interner.get(word).as_str();
@ -174,11 +216,12 @@ impl<'ctx> SearchContext<'ctx> {
merge_cbo_roaring_bitmaps,
)
}
None => DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
None => DatabaseCache::get_value::<_, _>(
self.txn,
word,
self.word_interner.get(word).as_str(),
&mut self.db_cache.word_docids,
universe,
self.index.word_docids.remap_data_type::<Bytes>(),
),
}
@ -186,6 +229,7 @@ impl<'ctx> SearchContext<'ctx> {
fn get_db_exact_word_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word: Interned<String>,
) -> Result<Option<RoaringBitmap>> {
match &self.restricted_fids {
@ -203,21 +247,26 @@ impl<'ctx> SearchContext<'ctx> {
merge_cbo_roaring_bitmaps,
)
}
None => DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
None => DatabaseCache::get_value::<_, _>(
self.txn,
word,
self.word_interner.get(word).as_str(),
&mut self.db_cache.exact_word_docids,
universe,
self.index.exact_word_docids.remap_data_type::<Bytes>(),
),
}
}
pub fn word_prefix_docids(&mut self, prefix: Word) -> Result<Option<RoaringBitmap>> {
pub fn word_prefix_docids(
&mut self,
universe: Option<&RoaringBitmap>,
prefix: Word,
) -> Result<Option<RoaringBitmap>> {
match prefix {
Word::Original(prefix) => {
let exact = self.get_db_exact_word_prefix_docids(prefix)?;
let tolerant = self.get_db_word_prefix_docids(prefix)?;
let exact = self.get_db_exact_word_prefix_docids(universe, prefix)?;
let tolerant = self.get_db_word_prefix_docids(universe, prefix)?;
Ok(match (exact, tolerant) {
(None, None) => None,
(None, Some(tolerant)) => Some(tolerant),
@ -229,13 +278,14 @@ impl<'ctx> SearchContext<'ctx> {
}
})
}
Word::Derived(prefix) => self.get_db_word_prefix_docids(prefix),
Word::Derived(prefix) => self.get_db_word_prefix_docids(universe, prefix),
}
}
/// Retrieve or insert the given value in the `word_prefix_docids` database.
fn get_db_word_prefix_docids(
&mut self,
universe: Option<&RoaringBitmap>,
prefix: Interned<String>,
) -> Result<Option<RoaringBitmap>> {
match &self.restricted_fids {
@ -253,11 +303,12 @@ impl<'ctx> SearchContext<'ctx> {
merge_cbo_roaring_bitmaps,
)
}
None => DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
None => DatabaseCache::get_value::<_, _>(
self.txn,
prefix,
self.word_interner.get(prefix).as_str(),
&mut self.db_cache.word_prefix_docids,
universe,
self.index.word_prefix_docids.remap_data_type::<Bytes>(),
),
}
@ -265,6 +316,7 @@ impl<'ctx> SearchContext<'ctx> {
fn get_db_exact_word_prefix_docids(
&mut self,
universe: Option<&RoaringBitmap>,
prefix: Interned<String>,
) -> Result<Option<RoaringBitmap>> {
match &self.restricted_fids {
@ -282,11 +334,12 @@ impl<'ctx> SearchContext<'ctx> {
merge_cbo_roaring_bitmaps,
)
}
None => DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
None => DatabaseCache::get_value::<_, _>(
self.txn,
prefix,
self.word_interner.get(prefix).as_str(),
&mut self.db_cache.exact_word_prefix_docids,
universe,
self.index.exact_word_prefix_docids.remap_data_type::<Bytes>(),
),
}
@ -294,6 +347,7 @@ impl<'ctx> SearchContext<'ctx> {
pub fn get_db_word_pair_proximity_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word1: Interned<String>,
word2: Interned<String>,
proximity: u8,
@ -320,8 +374,8 @@ impl<'ctx> SearchContext<'ctx> {
for fid in fids {
// for each field, intersect left word bitmap and right word bitmap,
// then merge the result in a global bitmap before storing it in the cache.
let word1_docids = self.get_db_word_fid_docids(word1, fid)?;
let word2_docids = self.get_db_word_fid_docids(word2, fid)?;
let word1_docids = self.get_db_word_fid_docids(universe, word1, fid)?;
let word2_docids = self.get_db_word_fid_docids(universe, word2, fid)?;
if let (Some(word1_docids), Some(word2_docids)) =
(word1_docids, word2_docids)
{
@ -341,7 +395,33 @@ impl<'ctx> SearchContext<'ctx> {
Ok(docids)
}
ProximityPrecision::ByWord => DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
ProximityPrecision::ByWord => DatabaseCache::get_value::<_, _>(
self.txn,
(proximity, word1, word2),
&(
proximity,
self.word_interner.get(word1).as_str(),
self.word_interner.get(word2).as_str(),
),
&mut self.db_cache.word_pair_proximity_docids,
universe,
self.index.word_pair_proximity_docids.remap_data_type::<Bytes>(),
),
}
}
pub fn get_db_word_pair_proximity_docids_len(
&mut self,
universe: Option<&RoaringBitmap>,
word1: Interned<String>,
word2: Interned<String>,
proximity: u8,
) -> Result<Option<u64>> {
match self.index.proximity_precision(self.txn)?.unwrap_or_default() {
ProximityPrecision::ByAttribute => Ok(self
.get_db_word_pair_proximity_docids(universe, word1, word2, proximity)?
.map(|d| d.len())),
ProximityPrecision::ByWord => DatabaseCache::get_value_length::<_, _>(
self.txn,
(proximity, word1, word2),
&(
@ -355,34 +435,9 @@ impl<'ctx> SearchContext<'ctx> {
}
}
pub fn get_db_word_pair_proximity_docids_len(
&mut self,
word1: Interned<String>,
word2: Interned<String>,
proximity: u8,
) -> Result<Option<u64>> {
match self.index.proximity_precision(self.txn)?.unwrap_or_default() {
ProximityPrecision::ByAttribute => Ok(self
.get_db_word_pair_proximity_docids(word1, word2, proximity)?
.map(|d| d.len())),
ProximityPrecision::ByWord => {
DatabaseCache::get_value::<_, _, CboRoaringBitmapLenCodec>(
self.txn,
(proximity, word1, word2),
&(
proximity,
self.word_interner.get(word1).as_str(),
self.word_interner.get(word2).as_str(),
),
&mut self.db_cache.word_pair_proximity_docids,
self.index.word_pair_proximity_docids.remap_data_type::<Bytes>(),
)
}
}
}
pub fn get_db_word_prefix_pair_proximity_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word1: Interned<String>,
prefix2: Interned<String>,
mut proximity: u8,
@ -409,8 +464,9 @@ impl<'ctx> SearchContext<'ctx> {
// for each field, intersect left word bitmap and right word bitmap,
// then merge the result in a global bitmap before storing it in the cache.
for fid in fids {
let word1_docids = self.get_db_word_fid_docids(word1, fid)?;
let prefix2_docids = self.get_db_word_prefix_fid_docids(prefix2, fid)?;
let word1_docids = self.get_db_word_fid_docids(universe, word1, fid)?;
let prefix2_docids =
self.get_db_word_prefix_fid_docids(universe, prefix2, fid)?;
if let (Some(word1_docids), Some(prefix2_docids)) =
(word1_docids, prefix2_docids)
{
@ -452,16 +508,18 @@ impl<'ctx> SearchContext<'ctx> {
pub fn get_db_prefix_word_pair_proximity_docids(
&mut self,
universe: Option<&RoaringBitmap>,
left_prefix: Interned<String>,
right: Interned<String>,
proximity: u8,
) -> Result<Option<RoaringBitmap>> {
// only accept exact matches on reverted positions
self.get_db_word_pair_proximity_docids(left_prefix, right, proximity)
self.get_db_word_pair_proximity_docids(universe, left_prefix, right, proximity)
}
pub fn get_db_word_fid_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word: Interned<String>,
fid: u16,
) -> Result<Option<RoaringBitmap>> {
@ -470,17 +528,19 @@ impl<'ctx> SearchContext<'ctx> {
return Ok(None);
}
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value::<_, _>(
self.txn,
(word, fid),
&(self.word_interner.get(word).as_str(), fid),
&mut self.db_cache.word_fid_docids,
universe,
self.index.word_fid_docids.remap_data_type::<Bytes>(),
)
}
pub fn get_db_word_prefix_fid_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word_prefix: Interned<String>,
fid: u16,
) -> Result<Option<RoaringBitmap>> {
@ -489,11 +549,12 @@ impl<'ctx> SearchContext<'ctx> {
return Ok(None);
}
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value::<_, _>(
self.txn,
(word_prefix, fid),
&(self.word_interner.get(word_prefix).as_str(), fid),
&mut self.db_cache.word_prefix_fid_docids,
universe,
self.index.word_prefix_fid_docids.remap_data_type::<Bytes>(),
)
}
@ -554,28 +615,32 @@ impl<'ctx> SearchContext<'ctx> {
pub fn get_db_word_position_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word: Interned<String>,
position: u16,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value::<_, _>(
self.txn,
(word, position),
&(self.word_interner.get(word).as_str(), position),
&mut self.db_cache.word_position_docids,
universe,
self.index.word_position_docids.remap_data_type::<Bytes>(),
)
}
pub fn get_db_word_prefix_position_docids(
&mut self,
universe: Option<&RoaringBitmap>,
word_prefix: Interned<String>,
position: u16,
) -> Result<Option<RoaringBitmap>> {
DatabaseCache::get_value::<_, _, CboRoaringBitmapCodec>(
DatabaseCache::get_value::<_, _>(
self.txn,
(word_prefix, position),
&(self.word_interner.get(word_prefix).as_str(), position),
&mut self.db_cache.word_prefix_position_docids,
universe,
self.index.word_prefix_position_docids.remap_data_type::<Bytes>(),
)
}

View File

@ -1,3 +1,4 @@
use heed::types::Bytes;
use roaring::{MultiOps, RoaringBitmap};
use super::query_graph::QueryGraph;
@ -5,7 +6,7 @@ use super::ranking_rules::{RankingRule, RankingRuleOutput};
use crate::score_details::{self, ScoreDetails};
use crate::search::new::query_graph::QueryNodeData;
use crate::search::new::query_term::ExactTerm;
use crate::{Result, SearchContext, SearchLogger};
use crate::{CboRoaringBitmapCodec, Result, SearchContext, SearchLogger};
/// A ranking rule that produces 3 disjoint buckets:
///
@ -171,9 +172,9 @@ impl State {
// Note: Since the position is stored bucketed in word_position_docids, for queries with a lot of
// longer phrases we'll be losing on precision here.
let bucketed_position = crate::bucketed_position(position + offset);
let word_position_docids =
ctx.get_db_word_position_docids(*word, bucketed_position)?.unwrap_or_default()
& universe;
let word_position_docids = ctx
.get_db_word_position_docids(Some(universe), *word, bucketed_position)?
.unwrap_or_default();
candidates &= word_position_docids;
if candidates.is_empty() {
return Ok(State::Empty(query_graph.clone()));
@ -192,26 +193,34 @@ impl State {
let mut candidates_per_attribute = Vec::with_capacity(searchable_fields_ids.len());
// then check that there exists at least one attribute that has all of the terms
for fid in searchable_fields_ids {
let mut intersection = MultiOps::intersection(
let intersection = MultiOps::intersection(
words_positions
.iter()
.flat_map(|(words, ..)| words.iter())
// ignore stop words words in phrases
.flatten()
.map(|word| -> Result<_> {
Ok(ctx.get_db_word_fid_docids(*word, fid)?.unwrap_or_default())
Ok(ctx
.get_db_word_fid_docids(Some(&candidates), *word, fid)?
.unwrap_or_default())
}),
)?;
intersection &= &candidates;
if !intersection.is_empty() {
// Although not really worth it in terms of performance,
// if would be good to put this in cache for the sake of consistency
let candidates_with_exact_word_count = if count_all_positions < u8::MAX as usize {
ctx.index
let bitmap_bytes = ctx
.index
.field_id_word_count_docids
.get(ctx.txn, &(fid, count_all_positions as u8))?
.unwrap_or_default()
& universe
.remap_data_type::<Bytes>()
.get(ctx.txn, &(fid, count_all_positions as u8))?;
match bitmap_bytes {
Some(bytes) => {
CboRoaringBitmapCodec::intersection_with_serialized(bytes, universe)?
}
None => RoaringBitmap::default(),
}
} else {
RoaringBitmap::default()
};
@ -234,6 +243,8 @@ impl State {
let (state, output) = match state {
State::Uninitialized => (state, None),
State::ExactAttribute(query_graph, candidates_per_attribute) => {
// TODO it can be much faster to do the intersections before the unions...
// or maybe the candidates_per_attribute are not containing anything outside universe
let mut candidates = MultiOps::union(candidates_per_attribute.iter().map(
|FieldCandidates { start_with_exact, exact_word_count }| {
start_with_exact & exact_word_count
@ -252,6 +263,8 @@ impl State {
)
}
State::AttributeStarts(query_graph, candidates_per_attribute) => {
// TODO it can be much faster to do the intersections before the unions...
// or maybe the candidates_per_attribute are not containing anything outside universe
let mut candidates = MultiOps::union(candidates_per_attribute.into_iter().map(
|FieldCandidates { mut start_with_exact, exact_word_count }| {
start_with_exact -= exact_word_count;

View File

@ -232,11 +232,12 @@ fn resolve_universe(
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
fn resolve_negative_words(
ctx: &mut SearchContext<'_>,
universe: Option<&RoaringBitmap>,
negative_words: &[Word],
) -> Result<RoaringBitmap> {
let mut negative_bitmap = RoaringBitmap::new();
for &word in negative_words {
if let Some(bitmap) = ctx.word_docids(word)? {
if let Some(bitmap) = ctx.word_docids(universe, word)? {
negative_bitmap |= bitmap;
}
}
@ -686,7 +687,7 @@ pub fn execute_search(
located_query_terms_from_tokens(ctx, tokens, words_limit)?;
used_negative_operator = !negative_words.is_empty() || !negative_phrases.is_empty();
let ignored_documents = resolve_negative_words(ctx, &negative_words)?;
let ignored_documents = resolve_negative_words(ctx, Some(&universe), &negative_words)?;
let ignored_phrases = resolve_negative_phrases(ctx, &negative_phrases)?;
universe -= ignored_documents;

View File

@ -302,7 +302,7 @@ impl QueryGraph {
for (_, node) in self.nodes.iter() {
match &node.data {
QueryNodeData::Term(t) => {
let docids = compute_query_term_subset_docids(ctx, &t.term_subset)?;
let docids = compute_query_term_subset_docids(ctx, None, &t.term_subset)?;
for id in t.term_ids.clone() {
term_docids
.entry(id)

View File

@ -417,7 +417,7 @@ fn split_best_frequency(
let left = ctx.word_interner.insert(left.to_owned());
let right = ctx.word_interner.insert(right.to_owned());
if let Some(frequency) = ctx.get_db_word_pair_proximity_docids_len(left, right, 1)? {
if let Some(frequency) = ctx.get_db_word_pair_proximity_docids_len(None, left, right, 1)? {
if best.map_or(true, |(old, _, _)| frequency > old) {
best = Some((frequency, left, right));
}

View File

@ -26,18 +26,15 @@ fn compute_docids(
} else {
return Ok(Default::default());
};
let mut candidates = match exact_term {
ExactTerm::Phrase(phrase) => ctx.get_phrase_docids(phrase)?.clone(),
let candidates = match exact_term {
// TODO I move the intersection here
ExactTerm::Phrase(phrase) => ctx.get_phrase_docids(phrase)? & universe,
ExactTerm::Word(word) => {
if let Some(word_candidates) = ctx.word_docids(Word::Original(word))? {
word_candidates
} else {
return Ok(Default::default());
}
ctx.word_docids(Some(universe), Word::Original(word))?.unwrap_or_default()
}
};
candidates &= universe;
Ok(candidates)
}
@ -59,7 +56,7 @@ impl RankingRuleGraphTrait for ExactnessGraph {
}
ExactnessCondition::Any(dest_node) => {
let docids =
universe & compute_query_term_subset_docids(ctx, &dest_node.term_subset)?;
compute_query_term_subset_docids(ctx, Some(universe), &dest_node.term_subset)?;
(docids, dest_node.clone())
}
};

View File

@ -29,10 +29,12 @@ impl RankingRuleGraphTrait for FidGraph {
let FidCondition { term, .. } = condition;
let docids = if let Some(fid) = condition.fid {
// maybe compute_query_term_subset_docids_within_field_id should accept a universe as argument
let docids =
compute_query_term_subset_docids_within_field_id(ctx, &term.term_subset, fid)?;
docids & universe
compute_query_term_subset_docids_within_field_id(
ctx,
Some(universe),
&term.term_subset,
fid,
)?
} else {
RoaringBitmap::new()
};

View File

@ -28,14 +28,15 @@ impl RankingRuleGraphTrait for PositionGraph {
) -> Result<ComputedCondition> {
let PositionCondition { term, positions } = condition;
let mut docids = RoaringBitmap::new();
// TODO use MultiOps to do the big union
for position in positions {
// maybe compute_query_term_subset_docids_within_position should accept a universe as argument
docids |= universe
& compute_query_term_subset_docids_within_position(
ctx,
&term.term_subset,
*position,
)?;
docids |= compute_query_term_subset_docids_within_position(
ctx,
Some(universe),
&term.term_subset,
*position,
)?;
}
Ok(ComputedCondition {
docids,

View File

@ -22,10 +22,8 @@ pub fn compute_docids(
(left_term, right_term, *cost)
}
ProximityCondition::Term { term } => {
let mut docids = compute_query_term_subset_docids(ctx, &term.term_subset)?;
docids &= universe;
return Ok(ComputedCondition {
docids,
docids: compute_query_term_subset_docids(ctx, Some(universe), &term.term_subset)?,
universe_len: universe.len(),
start_term_subset: None,
end_term_subset: term.clone(),
@ -79,8 +77,8 @@ pub fn compute_docids(
if universe.is_disjoint(ctx.get_phrase_docids(left_phrase)?) {
continue;
}
} else if let Some(left_word_docids) = ctx.word_docids(left_word)? {
if universe.is_disjoint(&left_word_docids) {
} else if let Some(left_word_docids) = ctx.word_docids(Some(universe), left_word)? {
if left_word_docids.is_empty() {
continue;
}
}
@ -125,6 +123,9 @@ fn compute_prefix_edges(
let mut universe = universe.clone();
if let Some(phrase) = left_phrase {
// TODO we can clearly give the universe to this method
// Unfortunately, it is deserializing/computing stuff and
// keeping the result as a materialized bitmap.
let phrase_docids = ctx.get_phrase_docids(phrase)?;
if !phrase_docids.is_empty() {
used_left_phrases.insert(phrase);
@ -135,10 +136,12 @@ fn compute_prefix_edges(
}
}
if let Some(new_docids) =
ctx.get_db_word_prefix_pair_proximity_docids(left_word, right_prefix, forward_proximity)?
{
let new_docids = &universe & new_docids;
if let Some(new_docids) = ctx.get_db_word_prefix_pair_proximity_docids(
Some(&universe),
left_word,
right_prefix,
forward_proximity,
)? {
if !new_docids.is_empty() {
used_left_words.insert(left_word);
used_right_prefix.insert(right_prefix);
@ -149,11 +152,11 @@ fn compute_prefix_edges(
// No swapping when computing the proximity between a phrase and a word
if left_phrase.is_none() {
if let Some(new_docids) = ctx.get_db_prefix_word_pair_proximity_docids(
Some(&universe),
right_prefix,
left_word,
backward_proximity,
)? {
let new_docids = &universe & new_docids;
if !new_docids.is_empty() {
used_left_words.insert(left_word);
used_right_prefix.insert(right_prefix);
@ -179,26 +182,26 @@ fn compute_non_prefix_edges(
let mut universe = universe.clone();
for phrase in left_phrase.iter().chain(right_phrase.iter()).copied() {
let phrase_docids = ctx.get_phrase_docids(phrase)?;
universe &= phrase_docids;
universe &= ctx.get_phrase_docids(phrase)?;
if universe.is_empty() {
return Ok(());
}
}
if let Some(new_docids) =
ctx.get_db_word_pair_proximity_docids(word1, word2, forward_proximity)?
ctx.get_db_word_pair_proximity_docids(Some(&universe), word1, word2, forward_proximity)?
{
let new_docids = &universe & new_docids;
if !new_docids.is_empty() {
*docids |= new_docids;
}
}
if backward_proximity >= 1 && left_phrase.is_none() && right_phrase.is_none() {
if let Some(new_docids) =
ctx.get_db_word_pair_proximity_docids(word2, word1, backward_proximity)?
{
let new_docids = &universe & new_docids;
if let Some(new_docids) = ctx.get_db_word_pair_proximity_docids(
Some(&universe),
word2,
word1,
backward_proximity,
)? {
if !new_docids.is_empty() {
*docids |= new_docids;
}

View File

@ -27,8 +27,7 @@ impl RankingRuleGraphTrait for TypoGraph {
) -> Result<ComputedCondition> {
let TypoCondition { term, .. } = condition;
// maybe compute_query_term_subset_docids should accept a universe as argument
let mut docids = compute_query_term_subset_docids(ctx, &term.term_subset)?;
docids &= universe;
let docids = compute_query_term_subset_docids(ctx, Some(universe), &term.term_subset)?;
Ok(ComputedCondition {
docids,

View File

@ -26,8 +26,7 @@ impl RankingRuleGraphTrait for WordsGraph {
) -> Result<ComputedCondition> {
let WordsCondition { term, .. } = condition;
// maybe compute_query_term_subset_docids should accept a universe as argument
let mut docids = compute_query_term_subset_docids(ctx, &term.term_subset)?;
docids &= universe;
let docids = compute_query_term_subset_docids(ctx, Some(universe), &term.term_subset)?;
Ok(ComputedCondition {
docids,

View File

@ -24,6 +24,7 @@ impl<'ctx> SearchContext<'ctx> {
return Ok(&self.phrase_docids.cache[&phrase]);
};
let docids = compute_phrase_docids(self, phrase)?;
// TODO can we improve that? Because there is an issue, we keep that in cache...
let _ = self.phrase_docids.cache.insert(phrase, docids);
let docids = &self.phrase_docids.cache[&phrase];
Ok(docids)
@ -31,11 +32,13 @@ impl<'ctx> SearchContext<'ctx> {
}
pub fn compute_query_term_subset_docids(
ctx: &mut SearchContext<'_>,
universe: Option<&RoaringBitmap>,
term: &QueryTermSubset,
) -> Result<RoaringBitmap> {
let mut docids = RoaringBitmap::new();
// TODO use the MultiOps trait to do large intersections
for word in term.all_single_words_except_prefix_db(ctx)? {
if let Some(word_docids) = ctx.word_docids(word)? {
if let Some(word_docids) = ctx.word_docids(universe, word)? {
docids |= word_docids;
}
}
@ -44,22 +47,26 @@ pub fn compute_query_term_subset_docids(
}
if let Some(prefix) = term.use_prefix_db(ctx) {
if let Some(prefix_docids) = ctx.word_prefix_docids(prefix)? {
if let Some(prefix_docids) = ctx.word_prefix_docids(universe, prefix)? {
docids |= prefix_docids;
}
}
Ok(docids)
match universe {
Some(universe) => Ok(docids & universe),
None => Ok(docids),
}
}
pub fn compute_query_term_subset_docids_within_field_id(
ctx: &mut SearchContext<'_>,
universe: Option<&RoaringBitmap>,
term: &QueryTermSubset,
fid: u16,
) -> Result<RoaringBitmap> {
let mut docids = RoaringBitmap::new();
for word in term.all_single_words_except_prefix_db(ctx)? {
if let Some(word_fid_docids) = ctx.get_db_word_fid_docids(word.interned(), fid)? {
if let Some(word_fid_docids) = ctx.get_db_word_fid_docids(universe, word.interned(), fid)? {
docids |= word_fid_docids;
}
}
@ -68,7 +75,7 @@ pub fn compute_query_term_subset_docids_within_field_id(
// There may be false positives when resolving a phrase, so we're not
// guaranteed that all of its words are within a single fid.
if let Some(word) = phrase.words(ctx).iter().flatten().next() {
if let Some(word_fid_docids) = ctx.get_db_word_fid_docids(*word, fid)? {
if let Some(word_fid_docids) = ctx.get_db_word_fid_docids(universe, *word, fid)? {
docids |= ctx.get_phrase_docids(phrase)? & word_fid_docids;
}
}
@ -76,7 +83,7 @@ pub fn compute_query_term_subset_docids_within_field_id(
if let Some(word_prefix) = term.use_prefix_db(ctx) {
if let Some(word_fid_docids) =
ctx.get_db_word_prefix_fid_docids(word_prefix.interned(), fid)?
ctx.get_db_word_prefix_fid_docids(universe, word_prefix.interned(), fid)?
{
docids |= word_fid_docids;
}
@ -87,13 +94,14 @@ pub fn compute_query_term_subset_docids_within_field_id(
pub fn compute_query_term_subset_docids_within_position(
ctx: &mut SearchContext<'_>,
universe: Option<&RoaringBitmap>,
term: &QueryTermSubset,
position: u16,
) -> Result<RoaringBitmap> {
let mut docids = RoaringBitmap::new();
for word in term.all_single_words_except_prefix_db(ctx)? {
if let Some(word_position_docids) =
ctx.get_db_word_position_docids(word.interned(), position)?
ctx.get_db_word_position_docids(universe, word.interned(), position)?
{
docids |= word_position_docids;
}
@ -103,15 +111,17 @@ pub fn compute_query_term_subset_docids_within_position(
// It's difficult to know the expected position of the words in the phrase,
// so instead we just check the first one.
if let Some(word) = phrase.words(ctx).iter().flatten().next() {
if let Some(word_position_docids) = ctx.get_db_word_position_docids(*word, position)? {
docids |= ctx.get_phrase_docids(phrase)? & word_position_docids
if let Some(word_position_docids) =
ctx.get_db_word_position_docids(universe, *word, position)?
{
docids |= ctx.get_phrase_docids(phrase)? & word_position_docids;
}
}
}
if let Some(word_prefix) = term.use_prefix_db(ctx) {
if let Some(word_position_docids) =
ctx.get_db_word_prefix_position_docids(word_prefix.interned(), position)?
ctx.get_db_word_prefix_position_docids(universe, word_prefix.interned(), position)?
{
docids |= word_position_docids;
}
@ -147,10 +157,7 @@ pub fn compute_query_graph_docids(
term_subset,
positions: _,
term_ids: _,
}) => {
let node_docids = compute_query_term_subset_docids(ctx, term_subset)?;
predecessors_docids & node_docids
}
}) => compute_query_term_subset_docids(ctx, Some(&predecessors_docids), term_subset)?,
QueryNodeData::Deleted => {
panic!()
}
@ -188,7 +195,7 @@ pub fn compute_phrase_docids(
}
let mut candidates = RoaringBitmap::new();
for word in words.iter().flatten().copied() {
if let Some(word_docids) = ctx.word_docids(Word::Original(word))? {
if let Some(word_docids) = ctx.word_docids(None, Word::Original(word))? {
candidates |= word_docids;
} else {
return Ok(RoaringBitmap::new());
@ -212,7 +219,7 @@ pub fn compute_phrase_docids(
.filter_map(|(index, word)| word.as_ref().map(|word| (index, word)))
{
if dist == 0 {
match ctx.get_db_word_pair_proximity_docids(s1, s2, 1)? {
match ctx.get_db_word_pair_proximity_docids(None, s1, s2, 1)? {
Some(m) => bitmaps.push(m),
// If there are no documents for this pair, there will be no
// results for the phrase query.
@ -222,7 +229,7 @@ pub fn compute_phrase_docids(
let mut bitmap = RoaringBitmap::new();
for dist in 0..=dist {
if let Some(m) =
ctx.get_db_word_pair_proximity_docids(s1, s2, dist as u8 + 1)?
ctx.get_db_word_pair_proximity_docids(None, s1, s2, dist as u8 + 1)?
{
bitmap |= m;
}
@ -239,6 +246,7 @@ pub fn compute_phrase_docids(
// We sort the bitmaps so that we perform the small intersections first, which is faster.
bitmaps.sort_unstable_by_key(|a| a.len());
// TODO use MultiOps intersection which and remove the above sort
for bitmap in bitmaps {
candidates &= bitmap;