Refactor indexation of the "facet-id-exists-docids" database

The idea is to directly create a sorted and merged list of bitmaps
in the form of a BTreeMap<FieldId, RoaringBitmap> instead of creating
a grenad::Reader where the keys are field_id and the values are docids.

Then we send that BTreeMap to the thing that handles TypedChunks, which
inserts its content into the database.
This commit is contained in:
Loïc Lecrenier 2022-07-19 09:57:28 +02:00
parent 1eb1e73bb3
commit aed8c69bcb
3 changed files with 92 additions and 39 deletions

View file

@ -1,11 +1,12 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::convert::TryInto;
use std::fs::File;
use std::io;
use grenad::MergerBuilder;
use heed::types::ByteSlice;
use heed::{BytesDecode, RwTxn};
use heed::{BytesDecode, BytesEncode, RwTxn};
use roaring::RoaringBitmap;
use super::helpers::{
@ -16,8 +17,8 @@ use super::{ClonableMmap, MergeFn};
use crate::heed_codec::facet::{decode_prefix_string, encode_prefix_string};
use crate::update::index_documents::helpers::as_cloneable_grenad;
use crate::{
lat_lng_to_xyz, BoRoaringBitmapCodec, CboRoaringBitmapCodec, DocumentId, GeoPoint, Index,
Result,
error, lat_lng_to_xyz, BoRoaringBitmapCodec, CboRoaringBitmapCodec, DocumentId, FieldId,
GeoPoint, Index, Result, BEU16,
};
pub(crate) enum TypedChunk {
@ -35,7 +36,7 @@ pub(crate) enum TypedChunk {
WordPairProximityDocids(grenad::Reader<File>),
FieldIdFacetStringDocids(grenad::Reader<File>),
FieldIdFacetNumberDocids(grenad::Reader<File>),
FieldIdFacetExistsDocids(grenad::Reader<File>),
FieldIdFacetExistsDocids(BTreeMap<FieldId, RoaringBitmap>),
GeoPoints(grenad::Reader<File>),
}
@ -147,16 +148,14 @@ pub(crate) fn write_typed_chunk_into_index(
)?;
is_merged_database = true;
}
TypedChunk::FieldIdFacetExistsDocids(facet_id_exists_docids_iter) => {
append_entries_into_database(
facet_id_exists_docids_iter,
TypedChunk::FieldIdFacetExistsDocids(facet_id_exists_docids) => {
write_sorted_iterator_into_database(
facet_id_exists_docids.into_iter().map(|(k, v)| (BEU16::new(k), v)),
&index.facet_id_exists_docids,
"facet-id-exists-docids",
wtxn,
index_is_empty,
|value, _buffer| Ok(value),
merge_cbo_roaring_bitmaps,
)
.unwrap();
)?;
is_merged_database = true;
}
TypedChunk::WordPairProximityDocids(word_pair_proximity_docids_iter) => {
@ -270,6 +269,58 @@ fn merge_cbo_roaring_bitmaps(
)?)
}
fn write_sorted_iterator_into_database<Iter, Key, Value, KeyCodec, ValueCodec, Merge>(
mut iterator: Iter,
database: &heed::Database<KeyCodec, ValueCodec>,
database_name: &'static str,
wtxn: &mut RwTxn,
merge_values: Merge,
) -> Result<()>
where
for<'a> KeyCodec: BytesEncode<'a, EItem = Key>,
for<'a> ValueCodec: BytesEncode<'a, EItem = Value> + BytesDecode<'a, DItem = Value>,
Iter: Iterator<Item = (Key, Value)>,
Merge: Fn(&[u8], &[u8], &mut Vec<u8>) -> Result<()>,
{
if database.is_empty(wtxn)? {
let mut database = database.iter_mut(wtxn)?.remap_types::<ByteSlice, ByteSlice>();
while let Some((key, value)) = iterator.next() {
let key = KeyCodec::bytes_encode(&key)
.ok_or(error::SerializationError::Encoding { db_name: Some(database_name) })?;
if valid_lmdb_key(&key) {
let value = ValueCodec::bytes_encode(&value)
.ok_or(error::SerializationError::Encoding { db_name: Some(database_name) })?;
unsafe { database.append(&key, &value)? };
}
}
Ok(())
} else {
let database = database.remap_types::<ByteSlice, ByteSlice>();
let mut buffer = Vec::new();
while let Some((key, value)) = iterator.next() {
let key = KeyCodec::bytes_encode(&key)
.ok_or(error::SerializationError::Encoding { db_name: Some(database_name) })?;
if valid_lmdb_key(&key) {
let value = ValueCodec::bytes_encode(&value)
.ok_or(error::SerializationError::Encoding { db_name: Some(database_name) })?;
let value = match database.get(wtxn, &key)? {
Some(prev_value) => {
merge_values(&value, &prev_value, &mut buffer)?;
&buffer[..]
}
None => &value,
};
database.put(wtxn, &key, value)?;
}
}
Ok(())
}
}
/// Write provided entries in database using serialize_value function.
/// merge_values function is used if an entry already exist in the database.
fn write_entries_into_database<R, K, V, FS, FM>(