mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-23 05:14:27 +01:00
Speed-up facet aggregation by using a FacetIter
This commit is contained in:
parent
33945a3115
commit
d893e83622
@ -9,7 +9,7 @@ use serde_json::Value;
|
||||
use crate::facet::FacetType;
|
||||
use crate::heed_codec::facet::{FacetValueStringCodec, FacetLevelValueF64Codec, FacetLevelValueI64Codec};
|
||||
use crate::heed_codec::facet::{FieldDocIdFacetStringCodec, FieldDocIdFacetF64Codec, FieldDocIdFacetI64Codec};
|
||||
use crate::search::facet::FacetRange;
|
||||
use crate::search::facet::{FacetIter, FacetRange};
|
||||
use crate::{Index, FieldId};
|
||||
|
||||
pub struct FacetDistribution<'a> {
|
||||
@ -41,7 +41,8 @@ impl<'a> FacetDistribution<'a> {
|
||||
}
|
||||
|
||||
fn facet_values(&self, field_id: FieldId, facet_type: FacetType) -> heed::Result<Vec<Value>> {
|
||||
if let Some(candidates) = self.candidates.as_ref().filter(|c| c.len() <= 1000) {
|
||||
if let Some(candidates) = self.candidates.as_ref() {
|
||||
if candidates.len() <= 1000 {
|
||||
let mut key_buffer = vec![field_id];
|
||||
match facet_type {
|
||||
FacetType::Float => {
|
||||
@ -90,12 +91,49 @@ impl<'a> FacetDistribution<'a> {
|
||||
Ok(facet_values.into_iter().map(Value::from).collect())
|
||||
},
|
||||
}
|
||||
} else {
|
||||
let iter = match facet_type {
|
||||
FacetType::String => {
|
||||
let db = self.index.facet_field_id_value_docids;
|
||||
let iter = db
|
||||
.prefix_iter(self.rtxn, &[field_id])?
|
||||
.remap_key_type::<FacetValueStringCodec>()
|
||||
.map(|r| r.map(|((_, v), docids)| (Value::from(v), docids)));
|
||||
Box::new(iter) as Box::<dyn Iterator<Item=_>>
|
||||
},
|
||||
FacetType::Integer => {
|
||||
let iter = FacetIter::<i64, FacetLevelValueI64Codec>::new_non_reducing(
|
||||
self.rtxn, self.index, field_id, candidates.clone(),
|
||||
)?;
|
||||
Box::new(iter.map(|r| r.map(|(v, docids)| (Value::from(v), docids))))
|
||||
},
|
||||
FacetType::Float => {
|
||||
let iter = FacetIter::<f64, FacetLevelValueF64Codec>::new_non_reducing(
|
||||
self.rtxn, self.index, field_id, candidates.clone(),
|
||||
)?;
|
||||
Box::new(iter.map(|r| r.map(|(v, docids)| (Value::from(v), docids))))
|
||||
},
|
||||
};
|
||||
|
||||
let mut facet_values = Vec::new();
|
||||
for result in iter {
|
||||
let (value, docids) = result?;
|
||||
if self.candidates.as_ref().map_or(true, |c| docids.is_disjoint(c)) {
|
||||
facet_values.push(value);
|
||||
}
|
||||
if facet_values.len() == self.max_values_by_facet {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(facet_values)
|
||||
}
|
||||
} else {
|
||||
let db = self.index.facet_field_id_value_docids;
|
||||
let iter = match facet_type {
|
||||
FacetType::String => {
|
||||
let iter = db
|
||||
.prefix_iter(&self.rtxn, &[field_id])?
|
||||
.prefix_iter(self.rtxn, &[field_id])?
|
||||
.remap_key_type::<FacetValueStringCodec>()
|
||||
.map(|r| r.map(|((_, v), docids)| (Value::from(v), docids)));
|
||||
Box::new(iter) as Box::<dyn Iterator<Item=_>>
|
||||
@ -119,11 +157,8 @@ impl<'a> FacetDistribution<'a> {
|
||||
let mut facet_values = Vec::new();
|
||||
for result in iter {
|
||||
let (value, docids) = result?;
|
||||
match &self.candidates {
|
||||
Some(candidates) => if !docids.is_disjoint(candidates) {
|
||||
if self.candidates.as_ref().map_or(true, |c| docids.is_disjoint(c)) {
|
||||
facet_values.push(value);
|
||||
},
|
||||
None => facet_values.push(value),
|
||||
}
|
||||
if facet_values.len() == self.max_values_by_facet {
|
||||
break;
|
||||
|
@ -147,6 +147,7 @@ pub struct FacetIter<'t, T: 't, KC> {
|
||||
db: Database<KC, CboRoaringBitmapCodec>,
|
||||
field_id: FieldId,
|
||||
level_iters: Vec<(RoaringBitmap, Either<FacetRange<'t, T, KC>, FacetRevRange<'t, T, KC>>)>,
|
||||
must_reduce: bool,
|
||||
}
|
||||
|
||||
impl<'t, T, KC> FacetIter<'t, T, KC>
|
||||
@ -155,7 +156,10 @@ where
|
||||
KC: for<'a> BytesEncode<'a, EItem = (FieldId, u8, T, T)>,
|
||||
T: PartialOrd + Copy + Bounded,
|
||||
{
|
||||
pub fn new(
|
||||
/// Create a `FacetIter` that will iterate on the different facet entries
|
||||
/// (facet value + documents ids) and that will reduce the given documents ids
|
||||
/// while iterating on the different facet levels.
|
||||
pub fn new_reducing(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
index: &'t Index,
|
||||
field_id: FieldId,
|
||||
@ -165,10 +169,14 @@ where
|
||||
let db = index.facet_field_id_value_docids.remap_key_type::<KC>();
|
||||
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
||||
let highest_iter = FacetRange::new(rtxn, db, field_id, highest_level, Unbounded, Unbounded)?;
|
||||
Ok(FacetIter { rtxn, db, field_id, level_iters: vec![(documents_ids, Left(highest_iter))] })
|
||||
let level_iters = vec![(documents_ids, Left(highest_iter))];
|
||||
Ok(FacetIter { rtxn, db, field_id, level_iters, must_reduce: true })
|
||||
}
|
||||
|
||||
pub fn new_reverse(
|
||||
/// Create a `FacetIter` that will iterate on the different facet entries in reverse
|
||||
/// (facet value + documents ids) and that will reduce the given documents ids
|
||||
/// while iterating on the different facet levels.
|
||||
pub fn new_reverse_reducing(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
index: &'t Index,
|
||||
field_id: FieldId,
|
||||
@ -178,7 +186,26 @@ where
|
||||
let db = index.facet_field_id_value_docids.remap_key_type::<KC>();
|
||||
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
||||
let highest_iter = FacetRevRange::new(rtxn, db, field_id, highest_level, Unbounded, Unbounded)?;
|
||||
Ok(FacetIter { rtxn, db, field_id, level_iters: vec![(documents_ids, Right(highest_iter))] })
|
||||
let level_iters = vec![(documents_ids, Right(highest_iter))];
|
||||
Ok(FacetIter { rtxn, db, field_id, level_iters, must_reduce: true })
|
||||
}
|
||||
|
||||
/// Create a `FacetIter` that will iterate on the different facet entries
|
||||
/// (facet value + documents ids) and that will not reduce the given documents ids
|
||||
/// while iterating on the different facet levels, possibly returning multiple times
|
||||
/// a document id associated with multiple facet values.
|
||||
pub fn new_non_reducing(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
index: &'t Index,
|
||||
field_id: FieldId,
|
||||
documents_ids: RoaringBitmap,
|
||||
) -> heed::Result<FacetIter<'t, T, KC>>
|
||||
{
|
||||
let db = index.facet_field_id_value_docids.remap_key_type::<KC>();
|
||||
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
||||
let highest_iter = FacetRange::new(rtxn, db, field_id, highest_level, Unbounded, Unbounded)?;
|
||||
let level_iters = vec![(documents_ids, Left(highest_iter))];
|
||||
Ok(FacetIter { rtxn, db, field_id, level_iters, must_reduce: false })
|
||||
}
|
||||
|
||||
fn highest_level<X>(rtxn: &'t heed::RoTxn, db: Database<KC, X>, fid: FieldId) -> heed::Result<Option<u8>> {
|
||||
@ -216,7 +243,9 @@ where
|
||||
|
||||
docids.intersect_with(&documents_ids);
|
||||
if !docids.is_empty() {
|
||||
if self.must_reduce {
|
||||
documents_ids.difference_with(&docids);
|
||||
}
|
||||
|
||||
if level == 0 {
|
||||
debug!("found {:?} at {:?}", docids, left);
|
||||
|
@ -189,9 +189,9 @@ impl<'a> Search<'a> {
|
||||
}
|
||||
} else {
|
||||
let facet_fn = if ascending {
|
||||
FacetIter::<f64, FacetLevelValueF64Codec>::new
|
||||
FacetIter::<f64, FacetLevelValueF64Codec>::new_reducing
|
||||
} else {
|
||||
FacetIter::<f64, FacetLevelValueF64Codec>::new_reverse
|
||||
FacetIter::<f64, FacetLevelValueF64Codec>::new_reverse_reducing
|
||||
};
|
||||
let mut limit_tmp = limit;
|
||||
let mut output = Vec::new();
|
||||
@ -226,9 +226,9 @@ impl<'a> Search<'a> {
|
||||
}
|
||||
} else {
|
||||
let facet_fn = if ascending {
|
||||
FacetIter::<i64, FacetLevelValueI64Codec>::new
|
||||
FacetIter::<i64, FacetLevelValueI64Codec>::new_reducing
|
||||
} else {
|
||||
FacetIter::<i64, FacetLevelValueI64Codec>::new_reverse
|
||||
FacetIter::<i64, FacetLevelValueI64Codec>::new_reverse_reducing
|
||||
};
|
||||
let mut limit_tmp = limit;
|
||||
let mut output = Vec::new();
|
||||
|
Loading…
Reference in New Issue
Block a user