mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 20:37:15 +02:00
Change the project to become a workspace with milli as a default-member
This commit is contained in:
parent
d450b971f9
commit
e8639517da
56 changed files with 1053 additions and 2617 deletions
65
milli/src/criterion.rs
Normal file
65
milli/src/criterion.rs
Normal file
|
@ -0,0 +1,65 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::{Context, bail};
|
||||
use regex::Regex;
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use crate::facet::FacetType;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
pub enum Criterion {
|
||||
/// Sorted by increasing number of typos.
|
||||
Typo,
|
||||
/// Sorted by decreasing number of matched query terms.
|
||||
Words,
|
||||
/// Sorted by increasing distance between matched query terms.
|
||||
Proximity,
|
||||
/// Documents with quey words contained in more important
|
||||
/// attributes are considred better.
|
||||
Attribute,
|
||||
/// Documents with query words at the front of an attribute is
|
||||
/// considered better than if it was at the back.
|
||||
WordsPosition,
|
||||
/// Sorted by the similarity of the matched words with the query words.
|
||||
Exactness,
|
||||
/// Sorted by the increasing value of the field specified.
|
||||
Asc(String),
|
||||
/// Sorted by the decreasing value of the field specified.
|
||||
Desc(String),
|
||||
}
|
||||
|
||||
impl Criterion {
|
||||
pub fn from_str(faceted_attributes: &HashMap<String, FacetType>, txt: &str) -> anyhow::Result<Criterion> {
|
||||
match txt {
|
||||
"typo" => Ok(Criterion::Typo),
|
||||
"words" => Ok(Criterion::Words),
|
||||
"proximity" => Ok(Criterion::Proximity),
|
||||
"attribute" => Ok(Criterion::Attribute),
|
||||
"wordsposition" => Ok(Criterion::WordsPosition),
|
||||
"exactness" => Ok(Criterion::Exactness),
|
||||
text => {
|
||||
let re = Regex::new(r#"(asc|desc)\(([\w_-]+)\)"#)?;
|
||||
let caps = re.captures(text).with_context(|| format!("unknown criterion name: {}", text))?;
|
||||
let order = caps.get(1).unwrap().as_str();
|
||||
let field_name = caps.get(2).unwrap().as_str();
|
||||
faceted_attributes.get(field_name).with_context(|| format!("Can't use {:?} as a criterion as it isn't a faceted field.", field_name))?;
|
||||
match order {
|
||||
"asc" => Ok(Criterion::Asc(field_name.to_string())),
|
||||
"desc" => Ok(Criterion::Desc(field_name.to_string())),
|
||||
otherwise => bail!("unknown criterion name: {}", otherwise),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_criteria() -> Vec<Criterion> {
|
||||
vec![
|
||||
Criterion::Typo,
|
||||
Criterion::Words,
|
||||
Criterion::Proximity,
|
||||
Criterion::Attribute,
|
||||
Criterion::WordsPosition,
|
||||
Criterion::Exactness,
|
||||
]
|
||||
}
|
156
milli/src/external_documents_ids.rs
Normal file
156
milli/src/external_documents_ids.rs
Normal file
|
@ -0,0 +1,156 @@
|
|||
use std::borrow::Cow;
|
||||
use std::convert::TryInto;
|
||||
use fst::{Streamer, IntoStreamer};
|
||||
|
||||
pub struct ExternalDocumentsIds<'a> {
|
||||
pub(crate) hard: fst::Map<Cow<'a, [u8]>>,
|
||||
pub(crate) soft: fst::Map<Cow<'a, [u8]>>,
|
||||
}
|
||||
|
||||
impl<'a> ExternalDocumentsIds<'a> {
|
||||
pub fn new(hard: fst::Map<Cow<'a, [u8]>>, soft: fst::Map<Cow<'a, [u8]>>) -> ExternalDocumentsIds<'a> {
|
||||
ExternalDocumentsIds { hard, soft }
|
||||
}
|
||||
|
||||
pub fn into_static(self) -> ExternalDocumentsIds<'static> {
|
||||
ExternalDocumentsIds {
|
||||
hard: self.hard.map_data(|c| Cow::Owned(c.into_owned())).unwrap(),
|
||||
soft: self.soft.map_data(|c| Cow::Owned(c.into_owned())).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<A: AsRef<[u8]>>(&self, external_id: A) -> Option<u32> {
|
||||
let external_id = external_id.as_ref();
|
||||
match self.soft.get(external_id).or_else(|| self.hard.get(external_id)) {
|
||||
// u64 MAX means deleted in the soft fst map
|
||||
Some(id) if id != u64::MAX => Some(id.try_into().unwrap()),
|
||||
_otherwise => None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_ids<A: AsRef<[u8]>>(&mut self, other: fst::Set<A>) -> fst::Result<()> {
|
||||
let other = fst::Map::from(other.into_fst());
|
||||
let union_op = self.soft.op().add(&other).r#union();
|
||||
|
||||
let mut iter = union_op.into_stream();
|
||||
let mut new_soft_builder = fst::MapBuilder::memory();
|
||||
while let Some((external_id, docids)) = iter.next() {
|
||||
if docids.iter().any(|v| v.index == 1) {
|
||||
// If the `other` set returns a value here it means
|
||||
// that it must be marked as deleted.
|
||||
new_soft_builder.insert(external_id, u64::MAX)?;
|
||||
} else {
|
||||
new_soft_builder.insert(external_id, docids[0].value)?;
|
||||
}
|
||||
}
|
||||
|
||||
drop(iter);
|
||||
|
||||
// We save this new map as the new soft map.
|
||||
self.soft = new_soft_builder.into_map().map_data(Cow::Owned)?;
|
||||
self.merge_soft_into_hard()
|
||||
}
|
||||
|
||||
pub fn insert_ids<A: AsRef<[u8]>>(&mut self, other: &fst::Map<A>) -> fst::Result<()> {
|
||||
let union_op = self.soft.op().add(other).r#union();
|
||||
|
||||
let mut new_soft_builder = fst::MapBuilder::memory();
|
||||
let mut iter = union_op.into_stream();
|
||||
while let Some((external_id, docids)) = iter.next() {
|
||||
let id = docids.last().unwrap().value;
|
||||
new_soft_builder.insert(external_id, id)?;
|
||||
}
|
||||
|
||||
drop(iter);
|
||||
|
||||
// We save the new map as the new soft map.
|
||||
self.soft = new_soft_builder.into_map().map_data(Cow::Owned)?;
|
||||
self.merge_soft_into_hard()
|
||||
}
|
||||
|
||||
fn merge_soft_into_hard(&mut self) -> fst::Result<()> {
|
||||
if self.soft.len() >= self.hard.len() / 2 {
|
||||
let union_op = self.hard.op().add(&self.soft).r#union();
|
||||
|
||||
let mut iter = union_op.into_stream();
|
||||
let mut new_hard_builder = fst::MapBuilder::memory();
|
||||
while let Some((external_id, docids)) = iter.next() {
|
||||
if docids.len() == 2 {
|
||||
if docids[1].value != u64::MAX {
|
||||
new_hard_builder.insert(external_id, docids[1].value)?;
|
||||
}
|
||||
} else {
|
||||
new_hard_builder.insert(external_id, docids[0].value)?;
|
||||
}
|
||||
}
|
||||
|
||||
drop(iter);
|
||||
|
||||
self.hard = new_hard_builder.into_map().map_data(Cow::Owned)?;
|
||||
self.soft = fst::Map::default().map_data(Cow::Owned)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ExternalDocumentsIds<'static> {
|
||||
fn default() -> Self {
|
||||
ExternalDocumentsIds {
|
||||
hard: fst::Map::default().map_data(Cow::Owned).unwrap(),
|
||||
soft: fst::Map::default().map_data(Cow::Owned).unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn simple_insert_delete_ids() {
|
||||
let mut external_documents_ids = ExternalDocumentsIds::default();
|
||||
|
||||
let new_ids = fst::Map::from_iter(vec![("a", 1), ("b", 2), ("c", 3), ("d", 4)]).unwrap();
|
||||
external_documents_ids.insert_ids(&new_ids).unwrap();
|
||||
|
||||
assert_eq!(external_documents_ids.get("a"), Some(1));
|
||||
assert_eq!(external_documents_ids.get("b"), Some(2));
|
||||
assert_eq!(external_documents_ids.get("c"), Some(3));
|
||||
assert_eq!(external_documents_ids.get("d"), Some(4));
|
||||
|
||||
let new_ids = fst::Map::from_iter(vec![("e", 5), ("f", 6), ("g", 7)]).unwrap();
|
||||
external_documents_ids.insert_ids(&new_ids).unwrap();
|
||||
|
||||
assert_eq!(external_documents_ids.get("a"), Some(1));
|
||||
assert_eq!(external_documents_ids.get("b"), Some(2));
|
||||
assert_eq!(external_documents_ids.get("c"), Some(3));
|
||||
assert_eq!(external_documents_ids.get("d"), Some(4));
|
||||
assert_eq!(external_documents_ids.get("e"), Some(5));
|
||||
assert_eq!(external_documents_ids.get("f"), Some(6));
|
||||
assert_eq!(external_documents_ids.get("g"), Some(7));
|
||||
|
||||
let del_ids = fst::Set::from_iter(vec!["a", "c", "f"]).unwrap();
|
||||
external_documents_ids.delete_ids(del_ids).unwrap();
|
||||
|
||||
assert_eq!(external_documents_ids.get("a"), None);
|
||||
assert_eq!(external_documents_ids.get("b"), Some(2));
|
||||
assert_eq!(external_documents_ids.get("c"), None);
|
||||
assert_eq!(external_documents_ids.get("d"), Some(4));
|
||||
assert_eq!(external_documents_ids.get("e"), Some(5));
|
||||
assert_eq!(external_documents_ids.get("f"), None);
|
||||
assert_eq!(external_documents_ids.get("g"), Some(7));
|
||||
|
||||
let new_ids = fst::Map::from_iter(vec![("a", 5), ("b", 6), ("h", 8)]).unwrap();
|
||||
external_documents_ids.insert_ids(&new_ids).unwrap();
|
||||
|
||||
assert_eq!(external_documents_ids.get("a"), Some(5));
|
||||
assert_eq!(external_documents_ids.get("b"), Some(6));
|
||||
assert_eq!(external_documents_ids.get("c"), None);
|
||||
assert_eq!(external_documents_ids.get("d"), Some(4));
|
||||
assert_eq!(external_documents_ids.get("e"), Some(5));
|
||||
assert_eq!(external_documents_ids.get("f"), None);
|
||||
assert_eq!(external_documents_ids.get("g"), Some(7));
|
||||
assert_eq!(external_documents_ids.get("h"), Some(8));
|
||||
}
|
||||
}
|
50
milli/src/facet/facet_type.rs
Normal file
50
milli/src/facet/facet_type.rs
Normal file
|
@ -0,0 +1,50 @@
|
|||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum FacetType {
|
||||
String,
|
||||
Float,
|
||||
Integer,
|
||||
}
|
||||
|
||||
impl fmt::Display for FacetType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
FacetType::String => f.write_str("string"),
|
||||
FacetType::Float => f.write_str("float"),
|
||||
FacetType::Integer => f.write_str("integer"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for FacetType {
|
||||
type Err = InvalidFacetType;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if s.eq_ignore_ascii_case("string") {
|
||||
Ok(FacetType::String)
|
||||
} else if s.eq_ignore_ascii_case("float") {
|
||||
Ok(FacetType::Float)
|
||||
} else if s.eq_ignore_ascii_case("integer") {
|
||||
Ok(FacetType::Integer)
|
||||
} else {
|
||||
Err(InvalidFacetType)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
pub struct InvalidFacetType;
|
||||
|
||||
impl fmt::Display for InvalidFacetType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(r#"Invalid facet type, must be "string", "float" or "integer""#)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for InvalidFacetType { }
|
60
milli/src/facet/facet_value.rs
Normal file
60
milli/src/facet/facet_value.rs
Normal file
|
@ -0,0 +1,60 @@
|
|||
use ordered_float::OrderedFloat;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
pub enum FacetValue {
|
||||
String(String),
|
||||
Float(OrderedFloat<f64>),
|
||||
Integer(i64),
|
||||
}
|
||||
|
||||
impl From<String> for FacetValue {
|
||||
fn from(string: String) -> FacetValue {
|
||||
FacetValue::String(string)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for FacetValue {
|
||||
fn from(string: &str) -> FacetValue {
|
||||
FacetValue::String(string.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<f64> for FacetValue {
|
||||
fn from(float: f64) -> FacetValue {
|
||||
FacetValue::Float(OrderedFloat(float))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OrderedFloat<f64>> for FacetValue {
|
||||
fn from(float: OrderedFloat<f64>) -> FacetValue {
|
||||
FacetValue::Float(float)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for FacetValue {
|
||||
fn from(integer: i64) -> FacetValue {
|
||||
FacetValue::Integer(integer)
|
||||
}
|
||||
}
|
||||
|
||||
/// We implement Serialize ourselves because we need to always serialize it as a string,
|
||||
/// JSON object keys must be strings not numbers.
|
||||
impl Serialize for FacetValue {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
FacetValue::String(string) => serializer.serialize_str(string),
|
||||
FacetValue::Float(float) => {
|
||||
let string = float.to_string();
|
||||
serializer.serialize_str(&string)
|
||||
},
|
||||
FacetValue::Integer(integer) => {
|
||||
let string = integer.to_string();
|
||||
serializer.serialize_str(&string)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
6
milli/src/facet/mod.rs
Normal file
6
milli/src/facet/mod.rs
Normal file
|
@ -0,0 +1,6 @@
|
|||
mod facet_type;
|
||||
mod facet_value;
|
||||
pub mod value_encoding;
|
||||
|
||||
pub use self::facet_type::FacetType;
|
||||
pub use self::facet_value::FacetValue;
|
69
milli/src/facet/value_encoding.rs
Normal file
69
milli/src/facet/value_encoding.rs
Normal file
|
@ -0,0 +1,69 @@
|
|||
// https://stackoverflow.com/a/43305015/1941280
|
||||
#[inline]
|
||||
pub fn f64_into_bytes(float: f64) -> Option<[u8; 8]> {
|
||||
if float.is_finite() {
|
||||
if float == 0.0 || float == -0.0 {
|
||||
return Some(xor_first_bit(0.0_f64.to_be_bytes()));
|
||||
} else if float.is_sign_negative() {
|
||||
return Some(xor_all_bits(float.to_be_bytes()));
|
||||
} else if float.is_sign_positive() {
|
||||
return Some(xor_first_bit(float.to_be_bytes()));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn i64_into_bytes(int: i64) -> [u8; 8] {
|
||||
xor_first_bit(int.to_be_bytes())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn i64_from_bytes(bytes: [u8; 8]) -> i64 {
|
||||
i64::from_be_bytes(xor_first_bit(bytes))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn xor_first_bit(mut x: [u8; 8]) -> [u8; 8] {
|
||||
x[0] ^= 0x80;
|
||||
x
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn xor_all_bits(mut x: [u8; 8]) -> [u8; 8] {
|
||||
x.iter_mut().for_each(|b| *b ^= 0xff);
|
||||
x
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::cmp::Ordering::Less;
|
||||
use super::*;
|
||||
|
||||
fn is_sorted<T: Ord>(x: &[T]) -> bool {
|
||||
x.windows(2).map(|x| x[0].cmp(&x[1])).all(|o| o == Less)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ordered_f64_bytes() {
|
||||
let a = -13_f64;
|
||||
let b = -10.0;
|
||||
let c = -0.0;
|
||||
let d = 1.0;
|
||||
let e = 43.0;
|
||||
|
||||
let vec: Vec<_> = [a, b, c, d, e].iter().cloned().map(f64_into_bytes).collect();
|
||||
assert!(is_sorted(&vec), "{:?}", vec);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ordered_i64_bytes() {
|
||||
let a = -10_i64;
|
||||
let b = -0_i64;
|
||||
let c = 1_i64;
|
||||
let d = 43_i64;
|
||||
|
||||
let vec: Vec<_> = [a, b, c, d].iter().cloned().map(i64_into_bytes).collect();
|
||||
assert!(is_sorted(&vec), "{:?}", vec);
|
||||
}
|
||||
}
|
115
milli/src/fields_ids_map.rs
Normal file
115
milli/src/fields_ids_map.rs
Normal file
|
@ -0,0 +1,115 @@
|
|||
use std::collections::BTreeMap;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use crate::FieldId;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FieldsIdsMap {
|
||||
names_ids: BTreeMap<String, FieldId>,
|
||||
ids_names: BTreeMap<FieldId, String>,
|
||||
next_id: Option<FieldId>,
|
||||
}
|
||||
|
||||
impl FieldsIdsMap {
|
||||
pub fn new() -> FieldsIdsMap {
|
||||
FieldsIdsMap {
|
||||
names_ids: BTreeMap::new(),
|
||||
ids_names: BTreeMap::new(),
|
||||
next_id: Some(0),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the number of fields ids in the map.
|
||||
pub fn len(&self) -> usize {
|
||||
self.names_ids.len()
|
||||
}
|
||||
|
||||
/// Returns `true` if the map is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.names_ids.is_empty()
|
||||
}
|
||||
|
||||
/// Returns the field id related to a field name, it will create a new field id if the
|
||||
/// name is not already known. Returns `None` if the maximum field id as been reached.
|
||||
pub fn insert(&mut self, name: &str) -> Option<FieldId> {
|
||||
match self.names_ids.get(name) {
|
||||
Some(id) => Some(*id),
|
||||
None => {
|
||||
let id = self.next_id?;
|
||||
self.next_id = id.checked_add(1);
|
||||
self.names_ids.insert(name.to_owned(), id);
|
||||
self.ids_names.insert(id, name.to_owned());
|
||||
Some(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the id of a field based on its name.
|
||||
pub fn id(&self, name: &str) -> Option<FieldId> {
|
||||
self.names_ids.get(name).copied()
|
||||
}
|
||||
|
||||
/// Get the name of a field based on its id.
|
||||
pub fn name(&self, id: FieldId) -> Option<&str> {
|
||||
self.ids_names.get(&id).map(String::as_str)
|
||||
}
|
||||
|
||||
/// Remove a field name and id based on its name.
|
||||
pub fn remove(&mut self, name: &str) -> Option<FieldId> {
|
||||
match self.names_ids.remove(name) {
|
||||
Some(id) => self.ids_names.remove_entry(&id).map(|(id, _)| id),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over the ids and names in the ids order.
|
||||
pub fn iter(&self) -> impl Iterator<Item=(FieldId, &str)> {
|
||||
self.ids_names.iter().map(|(id, name)| (*id, name.as_str()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FieldsIdsMap {
|
||||
fn default() -> FieldsIdsMap {
|
||||
FieldsIdsMap::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn fields_ids_map() {
|
||||
let mut map = FieldsIdsMap::new();
|
||||
|
||||
assert_eq!(map.insert("id"), Some(0));
|
||||
assert_eq!(map.insert("title"), Some(1));
|
||||
assert_eq!(map.insert("description"), Some(2));
|
||||
assert_eq!(map.insert("id"), Some(0));
|
||||
assert_eq!(map.insert("title"), Some(1));
|
||||
assert_eq!(map.insert("description"), Some(2));
|
||||
|
||||
assert_eq!(map.id("id"), Some(0));
|
||||
assert_eq!(map.id("title"), Some(1));
|
||||
assert_eq!(map.id("description"), Some(2));
|
||||
assert_eq!(map.id("date"), None);
|
||||
|
||||
assert_eq!(map.len(), 3);
|
||||
|
||||
assert_eq!(map.name(0), Some("id"));
|
||||
assert_eq!(map.name(1), Some("title"));
|
||||
assert_eq!(map.name(2), Some("description"));
|
||||
assert_eq!(map.name(4), None);
|
||||
|
||||
assert_eq!(map.remove("title"), Some(1));
|
||||
|
||||
assert_eq!(map.id("title"), None);
|
||||
assert_eq!(map.insert("title"), Some(3));
|
||||
assert_eq!(map.len(), 3);
|
||||
|
||||
let mut iter = map.iter();
|
||||
assert_eq!(iter.next(), Some((0, "id")));
|
||||
assert_eq!(iter.next(), Some((2, "description")));
|
||||
assert_eq!(iter.next(), Some((3, "title")));
|
||||
assert_eq!(iter.next(), None);
|
||||
}
|
||||
}
|
27
milli/src/heed_codec/beu32_str_codec.rs
Normal file
27
milli/src/heed_codec/beu32_str_codec.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
use std::borrow::Cow;
|
||||
use std::convert::TryInto;
|
||||
use std::str;
|
||||
|
||||
pub struct BEU32StrCodec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for BEU32StrCodec {
|
||||
type DItem = (u32, &'a str);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (n_bytes, str_bytes) = bytes.split_at(4);
|
||||
let n = n_bytes.try_into().map(u32::from_be_bytes).ok()?;
|
||||
let s = str::from_utf8(str_bytes).ok()?;
|
||||
Some((n, s))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for BEU32StrCodec {
|
||||
type EItem = (u32, &'a str);
|
||||
|
||||
fn bytes_encode((n, s): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::with_capacity(s.len() + 4);
|
||||
bytes.extend_from_slice(&n.to_be_bytes());
|
||||
bytes.extend_from_slice(s.as_bytes());
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
29
milli/src/heed_codec/bo_roaring_bitmap_codec.rs
Normal file
29
milli/src/heed_codec/bo_roaring_bitmap_codec.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use std::borrow::Cow;
|
||||
use byteorder::{NativeEndian, ReadBytesExt, WriteBytesExt};
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
pub struct BoRoaringBitmapCodec;
|
||||
|
||||
impl heed::BytesDecode<'_> for BoRoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode(mut bytes: &[u8]) -> Option<Self::DItem> {
|
||||
let mut bitmap = RoaringBitmap::new();
|
||||
while let Ok(integer) = bytes.read_u32::<NativeEndian>() {
|
||||
bitmap.insert(integer);
|
||||
}
|
||||
Some(bitmap)
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for BoRoaringBitmapCodec {
|
||||
type EItem = RoaringBitmap;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::with_capacity(item.len() as usize * 4);
|
||||
for integer in item.iter() {
|
||||
bytes.write_u32::<NativeEndian>(integer).ok()?;
|
||||
}
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
108
milli/src/heed_codec/cbo_roaring_bitmap_codec.rs
Normal file
108
milli/src/heed_codec/cbo_roaring_bitmap_codec.rs
Normal file
|
@ -0,0 +1,108 @@
|
|||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
use std::mem::size_of;
|
||||
|
||||
use byteorder::{NativeEndian, ReadBytesExt, WriteBytesExt};
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
/// This is the limit where using a byteorder became less size efficient
|
||||
/// than using a direct roaring encoding, it is also the point where we are able
|
||||
/// to determine the encoding used only by using the array of bytes length.
|
||||
const THRESHOLD: usize = 7;
|
||||
|
||||
/// A conditionnal codec that either use the RoaringBitmap
|
||||
/// or a lighter ByteOrder en/decoding method.
|
||||
pub struct CboRoaringBitmapCodec;
|
||||
|
||||
impl CboRoaringBitmapCodec {
|
||||
pub fn serialized_size(roaring: &RoaringBitmap) -> usize {
|
||||
if roaring.len() <= THRESHOLD as u64 {
|
||||
roaring.len() as usize * size_of::<u32>()
|
||||
} else {
|
||||
roaring.serialized_size()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_into(roaring: &RoaringBitmap, vec: &mut Vec<u8>) -> io::Result<()> {
|
||||
if roaring.len() <= THRESHOLD as u64 {
|
||||
// If the number of items (u32s) to encode is less than or equal to the threshold
|
||||
// it means that it would weigh the same or less than the RoaringBitmap
|
||||
// header, so we directly encode them using ByteOrder instead.
|
||||
for integer in roaring {
|
||||
vec.write_u32::<NativeEndian>(integer)?;
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
// Otherwise, we use the classic RoaringBitmapCodec that writes a header.
|
||||
roaring.serialize_into(vec)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize_from(mut bytes: &[u8]) -> io::Result<RoaringBitmap> {
|
||||
if bytes.len() <= THRESHOLD * size_of::<u32>() {
|
||||
// If there is threshold or less than threshold integers that can fit into this array
|
||||
// of bytes it means that we used the ByteOrder codec serializer.
|
||||
let mut bitmap = RoaringBitmap::new();
|
||||
while let Ok(integer) = bytes.read_u32::<NativeEndian>() {
|
||||
bitmap.insert(integer);
|
||||
}
|
||||
Ok(bitmap)
|
||||
} else {
|
||||
// Otherwise, it means we used the classic RoaringBitmapCodec and
|
||||
// that the header takes threshold integers.
|
||||
RoaringBitmap::deserialize_from(bytes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesDecode<'_> for CboRoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
Self::deserialize_from(bytes).ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for CboRoaringBitmapCodec {
|
||||
type EItem = RoaringBitmap;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut vec = Vec::with_capacity(Self::serialized_size(item));
|
||||
Self::serialize_into(item, &mut vec).ok()?;
|
||||
Some(Cow::Owned(vec))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::iter::FromIterator;
|
||||
use heed::{BytesEncode, BytesDecode};
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn verify_encoding_decoding() {
|
||||
let input = RoaringBitmap::from_iter(0..THRESHOLD as u32);
|
||||
let bytes = CboRoaringBitmapCodec::bytes_encode(&input).unwrap();
|
||||
let output = CboRoaringBitmapCodec::bytes_decode(&bytes).unwrap();
|
||||
assert_eq!(input, output);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_threshold() {
|
||||
let input = RoaringBitmap::from_iter(0..THRESHOLD as u32);
|
||||
|
||||
// use roaring bitmap
|
||||
let mut bytes = Vec::new();
|
||||
input.serialize_into(&mut bytes).unwrap();
|
||||
let roaring_size = bytes.len();
|
||||
|
||||
// use byteorder directly
|
||||
let mut bytes = Vec::new();
|
||||
for integer in input {
|
||||
bytes.write_u32::<NativeEndian>(integer).unwrap();
|
||||
}
|
||||
let bo_size = bytes.len();
|
||||
|
||||
assert!(roaring_size > bo_size);
|
||||
}
|
||||
}
|
87
milli/src/heed_codec/facet/facet_level_value_f64_codec.rs
Normal file
87
milli/src/heed_codec/facet/facet_level_value_f64_codec.rs
Normal file
|
@ -0,0 +1,87 @@
|
|||
use std::borrow::Cow;
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::facet::value_encoding::f64_into_bytes;
|
||||
use crate::FieldId;
|
||||
|
||||
// TODO do not de/serialize right bound when level = 0
|
||||
pub struct FacetLevelValueF64Codec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for FacetLevelValueF64Codec {
|
||||
type DItem = (FieldId, u8, f64, f64);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (field_id, bytes) = bytes.split_first()?;
|
||||
let (level, bytes) = bytes.split_first()?;
|
||||
|
||||
let (left, right) = if *level != 0 {
|
||||
let left = bytes[16..24].try_into().ok().map(f64::from_be_bytes)?;
|
||||
let right = bytes[24..].try_into().ok().map(f64::from_be_bytes)?;
|
||||
(left, right)
|
||||
} else {
|
||||
let left = bytes[8..].try_into().ok().map(f64::from_be_bytes)?;
|
||||
(left, left)
|
||||
};
|
||||
|
||||
Some((*field_id, *level, left, right))
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for FacetLevelValueF64Codec {
|
||||
type EItem = (FieldId, u8, f64, f64);
|
||||
|
||||
fn bytes_encode((field_id, level, left, right): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut buffer = [0u8; 32];
|
||||
|
||||
let len = if *level != 0 {
|
||||
// Write the globally ordered floats.
|
||||
let bytes = f64_into_bytes(*left)?;
|
||||
buffer[..8].copy_from_slice(&bytes[..]);
|
||||
|
||||
let bytes = f64_into_bytes(*right)?;
|
||||
buffer[8..16].copy_from_slice(&bytes[..]);
|
||||
|
||||
// Then the f64 values just to be able to read them back.
|
||||
let bytes = left.to_be_bytes();
|
||||
buffer[16..24].copy_from_slice(&bytes[..]);
|
||||
|
||||
let bytes = right.to_be_bytes();
|
||||
buffer[24..].copy_from_slice(&bytes[..]);
|
||||
|
||||
32 // length
|
||||
} else {
|
||||
// Write the globally ordered floats.
|
||||
let bytes = f64_into_bytes(*left)?;
|
||||
buffer[..8].copy_from_slice(&bytes[..]);
|
||||
|
||||
// Then the f64 values just to be able to read them back.
|
||||
let bytes = left.to_be_bytes();
|
||||
buffer[8..16].copy_from_slice(&bytes[..]);
|
||||
|
||||
16 // length
|
||||
};
|
||||
|
||||
let mut bytes = Vec::with_capacity(len + 2);
|
||||
bytes.push(*field_id);
|
||||
bytes.push(*level);
|
||||
bytes.extend_from_slice(&buffer[..len]);
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use heed::{BytesEncode, BytesDecode};
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn globally_ordered_f64() {
|
||||
let bytes = FacetLevelValueF64Codec::bytes_encode(&(3, 0, 32.0, 0.0)).unwrap();
|
||||
let (name, level, left, right) = FacetLevelValueF64Codec::bytes_decode(&bytes).unwrap();
|
||||
assert_eq!((name, level, left, right), (3, 0, 32.0, 32.0));
|
||||
|
||||
let bytes = FacetLevelValueF64Codec::bytes_encode(&(3, 1, -32.0, 32.0)).unwrap();
|
||||
let (name, level, left, right) = FacetLevelValueF64Codec::bytes_decode(&bytes).unwrap();
|
||||
assert_eq!((name, level, left, right), (3, 1, -32.0, 32.0));
|
||||
}
|
||||
}
|
44
milli/src/heed_codec/facet/facet_level_value_i64_codec.rs
Normal file
44
milli/src/heed_codec/facet/facet_level_value_i64_codec.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use std::borrow::Cow;
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::facet::value_encoding::{i64_from_bytes, i64_into_bytes};
|
||||
use crate::FieldId;
|
||||
|
||||
pub struct FacetLevelValueI64Codec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for FacetLevelValueI64Codec {
|
||||
type DItem = (FieldId, u8, i64, i64);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (field_id, bytes) = bytes.split_first()?;
|
||||
let (level, bytes) = bytes.split_first()?;
|
||||
|
||||
let left = bytes[..8].try_into().map(i64_from_bytes).ok()?;
|
||||
let right = if *level != 0 {
|
||||
bytes[8..].try_into().map(i64_from_bytes).ok()?
|
||||
} else {
|
||||
left
|
||||
};
|
||||
|
||||
Some((*field_id, *level, left, right))
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for FacetLevelValueI64Codec {
|
||||
type EItem = (FieldId, u8, i64, i64);
|
||||
|
||||
fn bytes_encode((field_id, level, left, right): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let left = i64_into_bytes(*left);
|
||||
let right = i64_into_bytes(*right);
|
||||
|
||||
let mut bytes = Vec::with_capacity(2 + left.len() + right.len());
|
||||
bytes.push(*field_id);
|
||||
bytes.push(*level);
|
||||
bytes.extend_from_slice(&left[..]);
|
||||
if *level != 0 {
|
||||
bytes.extend_from_slice(&right[..]);
|
||||
}
|
||||
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
27
milli/src/heed_codec/facet/facet_value_string_codec.rs
Normal file
27
milli/src/heed_codec/facet/facet_value_string_codec.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
use std::borrow::Cow;
|
||||
use std::str;
|
||||
|
||||
use crate::FieldId;
|
||||
|
||||
pub struct FacetValueStringCodec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for FacetValueStringCodec {
|
||||
type DItem = (FieldId, &'a str);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (field_id, bytes) = bytes.split_first()?;
|
||||
let value = str::from_utf8(bytes).ok()?;
|
||||
Some((*field_id, value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for FacetValueStringCodec {
|
||||
type EItem = (FieldId, &'a str);
|
||||
|
||||
fn bytes_encode((field_id, value): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::with_capacity(value.len() + 1);
|
||||
bytes.push(*field_id);
|
||||
bytes.extend_from_slice(value.as_bytes());
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
36
milli/src/heed_codec/facet/field_doc_id_facet_f64_codec.rs
Normal file
36
milli/src/heed_codec/facet/field_doc_id_facet_f64_codec.rs
Normal file
|
@ -0,0 +1,36 @@
|
|||
use std::borrow::Cow;
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::{FieldId, DocumentId};
|
||||
use crate::facet::value_encoding::f64_into_bytes;
|
||||
|
||||
pub struct FieldDocIdFacetF64Codec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for FieldDocIdFacetF64Codec {
|
||||
type DItem = (FieldId, DocumentId, f64);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (field_id, bytes) = bytes.split_first()?;
|
||||
|
||||
let (document_id_bytes, bytes) = bytes.split_at(4);
|
||||
let document_id = document_id_bytes.try_into().map(u32::from_be_bytes).ok()?;
|
||||
|
||||
let value = bytes[8..16].try_into().map(f64::from_be_bytes).ok()?;
|
||||
|
||||
Some((*field_id, document_id, value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for FieldDocIdFacetF64Codec {
|
||||
type EItem = (FieldId, DocumentId, f64);
|
||||
|
||||
fn bytes_encode((field_id, document_id, value): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::with_capacity(1 + 4 + 8 + 8);
|
||||
bytes.push(*field_id);
|
||||
bytes.extend_from_slice(&document_id.to_be_bytes());
|
||||
let value_bytes = f64_into_bytes(*value)?;
|
||||
bytes.extend_from_slice(&value_bytes);
|
||||
bytes.extend_from_slice(&value.to_be_bytes());
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
34
milli/src/heed_codec/facet/field_doc_id_facet_i64_codec.rs
Normal file
34
milli/src/heed_codec/facet/field_doc_id_facet_i64_codec.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use std::borrow::Cow;
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::facet::value_encoding::{i64_into_bytes, i64_from_bytes};
|
||||
use crate::{FieldId, DocumentId};
|
||||
|
||||
pub struct FieldDocIdFacetI64Codec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for FieldDocIdFacetI64Codec {
|
||||
type DItem = (FieldId, DocumentId, i64);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (field_id, bytes) = bytes.split_first()?;
|
||||
|
||||
let (document_id_bytes, bytes) = bytes.split_at(4);
|
||||
let document_id = document_id_bytes.try_into().map(u32::from_be_bytes).ok()?;
|
||||
|
||||
let value = bytes[..8].try_into().map(i64_from_bytes).ok()?;
|
||||
|
||||
Some((*field_id, document_id, value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for FieldDocIdFacetI64Codec {
|
||||
type EItem = (FieldId, DocumentId, i64);
|
||||
|
||||
fn bytes_encode((field_id, document_id, value): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::with_capacity(1 + 4 + 8);
|
||||
bytes.push(*field_id);
|
||||
bytes.extend_from_slice(&document_id.to_be_bytes());
|
||||
bytes.extend_from_slice(&i64_into_bytes(*value));
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
use std::borrow::Cow;
|
||||
use std::convert::TryInto;
|
||||
use std::str;
|
||||
|
||||
use crate::{FieldId, DocumentId};
|
||||
|
||||
pub struct FieldDocIdFacetStringCodec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for FieldDocIdFacetStringCodec {
|
||||
type DItem = (FieldId, DocumentId, &'a str);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (field_id, bytes) = bytes.split_first()?;
|
||||
let (document_id_bytes, bytes) = bytes.split_at(4);
|
||||
let document_id = document_id_bytes.try_into().map(u32::from_be_bytes).ok()?;
|
||||
let value = str::from_utf8(bytes).ok()?;
|
||||
Some((*field_id, document_id, value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for FieldDocIdFacetStringCodec {
|
||||
type EItem = (FieldId, DocumentId, &'a str);
|
||||
|
||||
fn bytes_encode((field_id, document_id, value): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::with_capacity(1 + 4 + value.len());
|
||||
bytes.push(*field_id);
|
||||
bytes.extend_from_slice(&document_id.to_be_bytes());
|
||||
bytes.extend_from_slice(value.as_bytes());
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
13
milli/src/heed_codec/facet/mod.rs
Normal file
13
milli/src/heed_codec/facet/mod.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
mod facet_level_value_f64_codec;
|
||||
mod facet_level_value_i64_codec;
|
||||
mod facet_value_string_codec;
|
||||
mod field_doc_id_facet_f64_codec;
|
||||
mod field_doc_id_facet_i64_codec;
|
||||
mod field_doc_id_facet_string_codec;
|
||||
|
||||
pub use self::facet_level_value_f64_codec::FacetLevelValueF64Codec;
|
||||
pub use self::facet_level_value_i64_codec::FacetLevelValueI64Codec;
|
||||
pub use self::facet_value_string_codec::FacetValueStringCodec;
|
||||
pub use self::field_doc_id_facet_f64_codec::FieldDocIdFacetF64Codec;
|
||||
pub use self::field_doc_id_facet_i64_codec::FieldDocIdFacetI64Codec;
|
||||
pub use self::field_doc_id_facet_string_codec::FieldDocIdFacetStringCodec;
|
14
milli/src/heed_codec/mod.rs
Normal file
14
milli/src/heed_codec/mod.rs
Normal file
|
@ -0,0 +1,14 @@
|
|||
mod beu32_str_codec;
|
||||
mod bo_roaring_bitmap_codec;
|
||||
mod cbo_roaring_bitmap_codec;
|
||||
mod obkv_codec;
|
||||
mod roaring_bitmap_codec;
|
||||
mod str_str_u8_codec;
|
||||
pub mod facet;
|
||||
|
||||
pub use self::beu32_str_codec::BEU32StrCodec;
|
||||
pub use self::bo_roaring_bitmap_codec::BoRoaringBitmapCodec;
|
||||
pub use self::cbo_roaring_bitmap_codec::CboRoaringBitmapCodec;
|
||||
pub use self::obkv_codec::ObkvCodec;
|
||||
pub use self::roaring_bitmap_codec::RoaringBitmapCodec;
|
||||
pub use self::str_str_u8_codec::StrStrU8Codec;
|
20
milli/src/heed_codec/obkv_codec.rs
Normal file
20
milli/src/heed_codec/obkv_codec.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
use std::borrow::Cow;
|
||||
use obkv::{KvReader, KvWriter};
|
||||
|
||||
pub struct ObkvCodec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for ObkvCodec {
|
||||
type DItem = KvReader<'a>;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
Some(KvReader::new(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for ObkvCodec {
|
||||
type EItem = KvWriter<Vec<u8>>;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
item.clone().into_inner().map(Cow::Owned).ok()
|
||||
}
|
||||
}
|
22
milli/src/heed_codec/roaring_bitmap_codec.rs
Normal file
22
milli/src/heed_codec/roaring_bitmap_codec.rs
Normal file
|
@ -0,0 +1,22 @@
|
|||
use std::borrow::Cow;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
pub struct RoaringBitmapCodec;
|
||||
|
||||
impl heed::BytesDecode<'_> for RoaringBitmapCodec {
|
||||
type DItem = RoaringBitmap;
|
||||
|
||||
fn bytes_decode(bytes: &[u8]) -> Option<Self::DItem> {
|
||||
RoaringBitmap::deserialize_from(bytes).ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for RoaringBitmapCodec {
|
||||
type EItem = RoaringBitmap;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::with_capacity(item.serialized_size());
|
||||
item.serialize_into(&mut bytes).ok()?;
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
30
milli/src/heed_codec/str_str_u8_codec.rs
Normal file
30
milli/src/heed_codec/str_str_u8_codec.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use std::borrow::Cow;
|
||||
use std::str;
|
||||
|
||||
pub struct StrStrU8Codec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for StrStrU8Codec {
|
||||
type DItem = (&'a str, &'a str, u8);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (n, bytes) = bytes.split_last()?;
|
||||
let s1_end = bytes.iter().position(|b| *b == 0)?;
|
||||
let (s1_bytes, s2_bytes) = bytes.split_at(s1_end);
|
||||
let s1 = str::from_utf8(s1_bytes).ok()?;
|
||||
let s2 = str::from_utf8(&s2_bytes[1..]).ok()?;
|
||||
Some((s1, s2, *n))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for StrStrU8Codec {
|
||||
type EItem = (&'a str, &'a str, u8);
|
||||
|
||||
fn bytes_encode((s1, s2, n): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::with_capacity(s1.len() + s2.len() + 1 + 1);
|
||||
bytes.extend_from_slice(s1.as_bytes());
|
||||
bytes.push(0);
|
||||
bytes.extend_from_slice(s2.as_bytes());
|
||||
bytes.push(*n);
|
||||
Some(Cow::Owned(bytes))
|
||||
}
|
||||
}
|
361
milli/src/index.rs
Normal file
361
milli/src/index.rs
Normal file
|
@ -0,0 +1,361 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use heed::types::*;
|
||||
use heed::{PolyDatabase, Database, RwTxn, RoTxn};
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::facet::FacetType;
|
||||
use crate::fields_ids_map::FieldsIdsMap;
|
||||
use crate::{default_criteria, Criterion, Search, FacetDistribution};
|
||||
use crate::{BEU32, DocumentId, FieldId, ExternalDocumentsIds};
|
||||
use crate::{
|
||||
RoaringBitmapCodec, BEU32StrCodec, StrStrU8Codec, ObkvCodec,
|
||||
BoRoaringBitmapCodec, CboRoaringBitmapCodec,
|
||||
};
|
||||
|
||||
pub const CRITERIA_KEY: &str = "criteria";
|
||||
pub const DISPLAYED_FIELDS_KEY: &str = "displayed-fields";
|
||||
pub const DOCUMENTS_IDS_KEY: &str = "documents-ids";
|
||||
pub const FACETED_DOCUMENTS_IDS_PREFIX: &str = "faceted-documents-ids";
|
||||
pub const FACETED_FIELDS_KEY: &str = "faceted-fields";
|
||||
pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map";
|
||||
pub const PRIMARY_KEY_KEY: &str = "primary-key";
|
||||
pub const SEARCHABLE_FIELDS_KEY: &str = "searchable-fields";
|
||||
pub const HARD_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "hard-external-documents-ids";
|
||||
pub const SOFT_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "soft-external-documents-ids";
|
||||
pub const WORDS_FST_KEY: &str = "words-fst";
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Index {
|
||||
/// The LMDB environment which this index is associated with.
|
||||
pub env: heed::Env,
|
||||
/// Contains many different types (e.g. the fields ids map).
|
||||
pub main: PolyDatabase,
|
||||
/// A word and all the documents ids containing the word.
|
||||
pub word_docids: Database<Str, RoaringBitmapCodec>,
|
||||
/// Maps a word and a document id (u32) to all the positions where the given word appears.
|
||||
pub docid_word_positions: Database<BEU32StrCodec, BoRoaringBitmapCodec>,
|
||||
/// Maps the proximity between a pair of words with all the docids where this relation appears.
|
||||
pub word_pair_proximity_docids: Database<StrStrU8Codec, CboRoaringBitmapCodec>,
|
||||
/// Maps the facet field id and the globally ordered value with the docids that corresponds to it.
|
||||
pub facet_field_id_value_docids: Database<ByteSlice, CboRoaringBitmapCodec>,
|
||||
/// Maps the document id, the facet field id and the globally ordered value.
|
||||
pub field_id_docid_facet_values: Database<ByteSlice, Unit>,
|
||||
/// Maps the document id to the document as an obkv store.
|
||||
pub documents: Database<OwnedType<BEU32>, ObkvCodec>,
|
||||
}
|
||||
|
||||
impl Index {
|
||||
pub fn new<P: AsRef<Path>>(mut options: heed::EnvOpenOptions, path: P) -> anyhow::Result<Index> {
|
||||
options.max_dbs(7);
|
||||
|
||||
let env = options.open(path)?;
|
||||
let main = env.create_poly_database(Some("main"))?;
|
||||
let word_docids = env.create_database(Some("word-docids"))?;
|
||||
let docid_word_positions = env.create_database(Some("docid-word-positions"))?;
|
||||
let word_pair_proximity_docids = env.create_database(Some("word-pair-proximity-docids"))?;
|
||||
let facet_field_id_value_docids = env.create_database(Some("facet-field-id-value-docids"))?;
|
||||
let field_id_docid_facet_values = env.create_database(Some("field-id-docid-facet-values"))?;
|
||||
let documents = env.create_database(Some("documents"))?;
|
||||
|
||||
Ok(Index {
|
||||
env,
|
||||
main,
|
||||
word_docids,
|
||||
docid_word_positions,
|
||||
word_pair_proximity_docids,
|
||||
facet_field_id_value_docids,
|
||||
field_id_docid_facet_values,
|
||||
documents,
|
||||
})
|
||||
}
|
||||
|
||||
/// Create a write transaction to be able to write into the index.
|
||||
pub fn write_txn(&self) -> heed::Result<RwTxn> {
|
||||
self.env.write_txn()
|
||||
}
|
||||
|
||||
/// Create a read transaction to be able to read the index.
|
||||
pub fn read_txn(&self) -> heed::Result<RoTxn> {
|
||||
self.env.read_txn()
|
||||
}
|
||||
|
||||
/// Returns the canonicalized path where the heed `Env` of this `Index` lives.
|
||||
pub fn path(&self) -> &Path {
|
||||
self.env.path()
|
||||
}
|
||||
|
||||
/// Returns an `EnvClosingEvent` that can be used to wait for the closing event,
|
||||
/// multiple threads can wait on this event.
|
||||
///
|
||||
/// Make sure that you drop all the copies of `Index`es you have, env closing are triggered
|
||||
/// when all references are dropped, the last one will eventually close the environment.
|
||||
pub fn prepare_for_closing(self) -> heed::EnvClosingEvent {
|
||||
self.env.prepare_for_closing()
|
||||
}
|
||||
|
||||
/* documents ids */
|
||||
|
||||
/// Writes the documents ids that corresponds to the user-ids-documents-ids FST.
|
||||
pub fn put_documents_ids(&self, wtxn: &mut RwTxn, docids: &RoaringBitmap) -> heed::Result<()> {
|
||||
self.main.put::<_, Str, RoaringBitmapCodec>(wtxn, DOCUMENTS_IDS_KEY, docids)
|
||||
}
|
||||
|
||||
/// Returns the internal documents ids.
|
||||
pub fn documents_ids(&self, rtxn: &RoTxn) -> heed::Result<RoaringBitmap> {
|
||||
Ok(self.main.get::<_, Str, RoaringBitmapCodec>(rtxn, DOCUMENTS_IDS_KEY)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
/* primary key */
|
||||
|
||||
/// Writes the documents primary key, this is the field name that is used to store the id.
|
||||
pub fn put_primary_key(&self, wtxn: &mut RwTxn, primary_key: &str) -> heed::Result<()> {
|
||||
self.main.put::<_, Str, Str>(wtxn, PRIMARY_KEY_KEY, &primary_key)
|
||||
}
|
||||
|
||||
/// Deletes the primary key of the documents, this can be done to reset indexes settings.
|
||||
pub fn delete_primary_key(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||
self.main.delete::<_, Str>(wtxn, PRIMARY_KEY_KEY)
|
||||
}
|
||||
|
||||
/// Returns the documents primary key, `None` if it hasn't been defined.
|
||||
pub fn primary_key<'t>(&self, rtxn: &'t RoTxn) -> heed::Result<Option<&'t str>> {
|
||||
self.main.get::<_, Str, Str>(rtxn, PRIMARY_KEY_KEY)
|
||||
}
|
||||
|
||||
/* external documents ids */
|
||||
|
||||
/// Writes the external documents ids and internal ids (i.e. `u32`).
|
||||
pub fn put_external_documents_ids<'a>(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
external_documents_ids: &ExternalDocumentsIds<'a>,
|
||||
) -> heed::Result<()>
|
||||
{
|
||||
let ExternalDocumentsIds { hard, soft } = external_documents_ids;
|
||||
let hard = hard.as_fst().as_bytes();
|
||||
let soft = soft.as_fst().as_bytes();
|
||||
self.main.put::<_, Str, ByteSlice>(wtxn, HARD_EXTERNAL_DOCUMENTS_IDS_KEY, hard)?;
|
||||
self.main.put::<_, Str, ByteSlice>(wtxn, SOFT_EXTERNAL_DOCUMENTS_IDS_KEY, soft)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the external documents ids map which associate the external ids
|
||||
/// with the internal ids (i.e. `u32`).
|
||||
pub fn external_documents_ids<'t>(&self, rtxn: &'t RoTxn) -> anyhow::Result<ExternalDocumentsIds<'t>> {
|
||||
let hard = self.main.get::<_, Str, ByteSlice>(rtxn, HARD_EXTERNAL_DOCUMENTS_IDS_KEY)?;
|
||||
let soft = self.main.get::<_, Str, ByteSlice>(rtxn, SOFT_EXTERNAL_DOCUMENTS_IDS_KEY)?;
|
||||
let hard = match hard {
|
||||
Some(hard) => fst::Map::new(hard)?.map_data(Cow::Borrowed)?,
|
||||
None => fst::Map::default().map_data(Cow::Owned)?,
|
||||
};
|
||||
let soft = match soft {
|
||||
Some(soft) => fst::Map::new(soft)?.map_data(Cow::Borrowed)?,
|
||||
None => fst::Map::default().map_data(Cow::Owned)?,
|
||||
};
|
||||
Ok(ExternalDocumentsIds::new(hard, soft))
|
||||
}
|
||||
|
||||
/* fields ids map */
|
||||
|
||||
/// Writes the fields ids map which associate the documents keys with an internal field id
|
||||
/// (i.e. `u8`), this field id is used to identify fields in the obkv documents.
|
||||
pub fn put_fields_ids_map(&self, wtxn: &mut RwTxn, map: &FieldsIdsMap) -> heed::Result<()> {
|
||||
self.main.put::<_, Str, SerdeJson<FieldsIdsMap>>(wtxn, FIELDS_IDS_MAP_KEY, map)
|
||||
}
|
||||
|
||||
/// Returns the fields ids map which associate the documents keys with an internal field id
|
||||
/// (i.e. `u8`), this field id is used to identify fields in the obkv documents.
|
||||
pub fn fields_ids_map(&self, rtxn: &RoTxn) -> heed::Result<FieldsIdsMap> {
|
||||
Ok(self.main.get::<_, Str, SerdeJson<FieldsIdsMap>>(rtxn, FIELDS_IDS_MAP_KEY)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
/* displayed fields */
|
||||
|
||||
/// Writes the fields that must be displayed in the defined order.
|
||||
/// There must be not be any duplicate field id.
|
||||
pub fn put_displayed_fields(&self, wtxn: &mut RwTxn, fields: &[&str]) -> heed::Result<()> {
|
||||
self.main.put::<_, Str, SerdeBincode<&[&str]>>(wtxn, DISPLAYED_FIELDS_KEY, &fields)
|
||||
}
|
||||
|
||||
/// Deletes the displayed fields ids, this will make the engine to display
|
||||
/// all the documents attributes in the order of the `FieldsIdsMap`.
|
||||
pub fn delete_displayed_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||
self.main.delete::<_, Str>(wtxn, DISPLAYED_FIELDS_KEY)
|
||||
}
|
||||
|
||||
/// Returns the displayed fields in the order they were set by the user. If it returns
|
||||
/// `None` it means that all the attributes are set as displayed in the order of the `FieldsIdsMap`.
|
||||
pub fn displayed_fields<'t>(&self, rtxn: &'t RoTxn) -> heed::Result<Option<Vec<&'t str>>> {
|
||||
self.main.get::<_, Str, SerdeBincode<Vec<&'t str>>>(rtxn, DISPLAYED_FIELDS_KEY)
|
||||
}
|
||||
|
||||
pub fn displayed_fields_ids(&self, rtxn: &RoTxn) -> heed::Result<Option<Vec<FieldId>>> {
|
||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||
let ids = self.displayed_fields(rtxn)?
|
||||
.map(|fields| fields
|
||||
.into_iter()
|
||||
.map(|name| fields_ids_map.id(name).expect("Field not found"))
|
||||
.collect::<Vec<_>>());
|
||||
Ok(ids)
|
||||
}
|
||||
|
||||
/* searchable fields */
|
||||
|
||||
/// Writes the searchable fields, when this list is specified, only these are indexed.
|
||||
pub fn put_searchable_fields(&self, wtxn: &mut RwTxn, fields: &[&str]) -> heed::Result<()> {
|
||||
self.main.put::<_, Str, SerdeBincode<&[&str]>>(wtxn, SEARCHABLE_FIELDS_KEY, &fields)
|
||||
}
|
||||
|
||||
/// Deletes the searchable fields, when no fields are specified, all fields are indexed.
|
||||
pub fn delete_searchable_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||
self.main.delete::<_, Str>(wtxn, SEARCHABLE_FIELDS_KEY)
|
||||
}
|
||||
|
||||
/// Returns the searchable fields, those are the fields that are indexed,
|
||||
/// if the searchable fields aren't there it means that **all** the fields are indexed.
|
||||
pub fn searchable_fields<'t>(&self, rtxn: &'t RoTxn) -> heed::Result<Option<Vec<&'t str>>> {
|
||||
self.main.get::<_, Str, SerdeBincode<Vec<&'t str>>>(rtxn, SEARCHABLE_FIELDS_KEY)
|
||||
}
|
||||
|
||||
/// Identical to `searchable_fields`, but returns the ids instead.
|
||||
pub fn searchable_fields_ids(&self, rtxn: &RoTxn) -> heed::Result<Option<Vec<FieldId>>> {
|
||||
match self.searchable_fields(rtxn)? {
|
||||
Some(names) => {
|
||||
let fields_map = self.fields_ids_map(rtxn)?;
|
||||
let mut ids = Vec::new();
|
||||
for name in names {
|
||||
let id = fields_map
|
||||
.id(name)
|
||||
.ok_or_else(|| format!("field id map must contain {:?}", name))
|
||||
.expect("corrupted data: ");
|
||||
ids.push(id);
|
||||
}
|
||||
Ok(Some(ids))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/* faceted fields */
|
||||
|
||||
/// Writes the facet fields associated with their facet type or `None` if
|
||||
/// the facet type is currently unknown.
|
||||
pub fn put_faceted_fields(&self, wtxn: &mut RwTxn, fields_types: &HashMap<String, FacetType>) -> heed::Result<()> {
|
||||
self.main.put::<_, Str, SerdeJson<_>>(wtxn, FACETED_FIELDS_KEY, fields_types)
|
||||
}
|
||||
|
||||
/// Deletes the facet fields ids associated with their facet type.
|
||||
pub fn delete_faceted_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||
self.main.delete::<_, Str>(wtxn, FACETED_FIELDS_KEY)
|
||||
}
|
||||
|
||||
/// Returns the facet fields names associated with their facet type.
|
||||
pub fn faceted_fields(&self, rtxn: &RoTxn) -> heed::Result<HashMap<String, FacetType>> {
|
||||
Ok(self.main.get::<_, Str, SerdeJson<_>>(rtxn, FACETED_FIELDS_KEY)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
/// Same as `faceted_fields`, but returns ids instead.
|
||||
pub fn faceted_fields_ids(&self, rtxn: &RoTxn) -> heed::Result<HashMap<FieldId, FacetType>> {
|
||||
let faceted_fields = self.faceted_fields(rtxn)?;
|
||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||
let faceted_fields = faceted_fields
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
let kid = fields_ids_map
|
||||
.id(k)
|
||||
.ok_or_else(|| format!("{:?} should be present in the field id map", k))
|
||||
.expect("corrupted data: ");
|
||||
(kid, *v)
|
||||
})
|
||||
.collect();
|
||||
Ok(faceted_fields)
|
||||
}
|
||||
|
||||
/* faceted documents ids */
|
||||
|
||||
/// Writes the documents ids that are faceted under this field id.
|
||||
pub fn put_faceted_documents_ids(&self, wtxn: &mut RwTxn, field_id: FieldId, docids: &RoaringBitmap) -> heed::Result<()> {
|
||||
let mut buffer = [0u8; FACETED_DOCUMENTS_IDS_PREFIX.len() + 1];
|
||||
buffer[..FACETED_DOCUMENTS_IDS_PREFIX.len()].clone_from_slice(FACETED_DOCUMENTS_IDS_PREFIX.as_bytes());
|
||||
*buffer.last_mut().unwrap() = field_id;
|
||||
self.main.put::<_, ByteSlice, RoaringBitmapCodec>(wtxn, &buffer, docids)
|
||||
}
|
||||
|
||||
/// Retrieve all the documents ids that faceted under this field id.
|
||||
pub fn faceted_documents_ids(&self, rtxn: &RoTxn, field_id: FieldId) -> heed::Result<RoaringBitmap> {
|
||||
let mut buffer = [0u8; FACETED_DOCUMENTS_IDS_PREFIX.len() + 1];
|
||||
buffer[..FACETED_DOCUMENTS_IDS_PREFIX.len()].clone_from_slice(FACETED_DOCUMENTS_IDS_PREFIX.as_bytes());
|
||||
*buffer.last_mut().unwrap() = field_id;
|
||||
match self.main.get::<_, ByteSlice, RoaringBitmapCodec>(rtxn, &buffer)? {
|
||||
Some(docids) => Ok(docids),
|
||||
None => Ok(RoaringBitmap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
/* criteria */
|
||||
|
||||
pub fn put_criteria(&self, wtxn: &mut RwTxn, criteria: &[Criterion]) -> heed::Result<()> {
|
||||
self.main.put::<_, Str, SerdeJson<&[Criterion]>>(wtxn, CRITERIA_KEY, &criteria)
|
||||
}
|
||||
|
||||
pub fn delete_criteria(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||
self.main.delete::<_, Str>(wtxn, CRITERIA_KEY)
|
||||
}
|
||||
|
||||
pub fn criteria(&self, rtxn: &RoTxn) -> heed::Result<Vec<Criterion>> {
|
||||
match self.main.get::<_, Str, SerdeJson<Vec<Criterion>>>(rtxn, CRITERIA_KEY)? {
|
||||
Some(criteria) => Ok(criteria),
|
||||
None => Ok(default_criteria()),
|
||||
}
|
||||
}
|
||||
|
||||
/* words fst */
|
||||
|
||||
/// Writes the FST which is the words dictionnary of the engine.
|
||||
pub fn put_words_fst<A: AsRef<[u8]>>(&self, wtxn: &mut RwTxn, fst: &fst::Set<A>) -> heed::Result<()> {
|
||||
self.main.put::<_, Str, ByteSlice>(wtxn, WORDS_FST_KEY, fst.as_fst().as_bytes())
|
||||
}
|
||||
|
||||
/// Returns the FST which is the words dictionnary of the engine.
|
||||
pub fn words_fst<'t>(&self, rtxn: &'t RoTxn) -> anyhow::Result<fst::Set<Cow<'t, [u8]>>> {
|
||||
match self.main.get::<_, Str, ByteSlice>(rtxn, WORDS_FST_KEY)? {
|
||||
Some(bytes) => Ok(fst::Set::new(bytes)?.map_data(Cow::Borrowed)?),
|
||||
None => Ok(fst::Set::default().map_data(Cow::Owned)?),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a [`Vec`] of the requested documents. Returns an error if a document is missing.
|
||||
pub fn documents<'t>(
|
||||
&self,
|
||||
rtxn: &'t RoTxn,
|
||||
ids: impl IntoIterator<Item=DocumentId>,
|
||||
) -> anyhow::Result<Vec<(DocumentId, obkv::KvReader<'t>)>>
|
||||
{
|
||||
let mut documents = Vec::new();
|
||||
|
||||
for id in ids {
|
||||
let kv = self.documents.get(rtxn, &BEU32::new(id))?
|
||||
.with_context(|| format!("Could not find document {}", id))?;
|
||||
documents.push((id, kv));
|
||||
}
|
||||
|
||||
Ok(documents)
|
||||
}
|
||||
|
||||
/// Returns the number of documents indexed in the database.
|
||||
pub fn number_of_documents(&self, rtxn: &RoTxn) -> anyhow::Result<usize> {
|
||||
Ok(self.documents_ids(rtxn).map(|docids| docids.len() as usize)?)
|
||||
}
|
||||
|
||||
pub fn facets_distribution<'a>(&'a self, rtxn: &'a RoTxn) -> FacetDistribution<'a> {
|
||||
FacetDistribution::new(rtxn, self)
|
||||
}
|
||||
|
||||
pub fn search<'a>(&'a self, rtxn: &'a RoTxn) -> Search<'a> {
|
||||
Search::new(rtxn, self)
|
||||
}
|
||||
}
|
148
milli/src/lib.rs
Normal file
148
milli/src/lib.rs
Normal file
|
@ -0,0 +1,148 @@
|
|||
#[macro_use] extern crate pest_derive;
|
||||
|
||||
mod criterion;
|
||||
mod external_documents_ids;
|
||||
mod fields_ids_map;
|
||||
mod index;
|
||||
mod mdfs;
|
||||
mod query_tokens;
|
||||
mod search;
|
||||
mod update_store;
|
||||
pub mod facet;
|
||||
pub mod heed_codec;
|
||||
pub mod proximity;
|
||||
pub mod subcommand;
|
||||
pub mod update;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use anyhow::Context;
|
||||
use fxhash::{FxHasher32, FxHasher64};
|
||||
use serde_json::{Map, Value};
|
||||
|
||||
pub use self::criterion::{Criterion, default_criteria};
|
||||
pub use self::external_documents_ids::ExternalDocumentsIds;
|
||||
pub use self::fields_ids_map::FieldsIdsMap;
|
||||
pub use self::heed_codec::{BEU32StrCodec, StrStrU8Codec, ObkvCodec};
|
||||
pub use self::heed_codec::{RoaringBitmapCodec, BoRoaringBitmapCodec, CboRoaringBitmapCodec};
|
||||
pub use self::index::Index;
|
||||
pub use self::search::{Search, FacetDistribution, FacetCondition, SearchResult};
|
||||
pub use self::update_store::UpdateStore;
|
||||
|
||||
pub type FastMap4<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher32>>;
|
||||
pub type FastMap8<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher64>>;
|
||||
pub type SmallString32 = smallstr::SmallString<[u8; 32]>;
|
||||
pub type SmallVec32<T> = smallvec::SmallVec<[T; 32]>;
|
||||
pub type SmallVec16<T> = smallvec::SmallVec<[T; 16]>;
|
||||
pub type SmallVec8<T> = smallvec::SmallVec<[T; 8]>;
|
||||
pub type BEU32 = heed::zerocopy::U32<heed::byteorder::BE>;
|
||||
pub type BEU64 = heed::zerocopy::U64<heed::byteorder::BE>;
|
||||
pub type Attribute = u32;
|
||||
pub type DocumentId = u32;
|
||||
pub type FieldId = u8;
|
||||
pub type Position = u32;
|
||||
|
||||
type MergeFn = for<'a> fn(&[u8], &[Cow<'a, [u8]>]) -> anyhow::Result<Vec<u8>>;
|
||||
|
||||
/// Transform a raw obkv store into a JSON Object.
|
||||
pub fn obkv_to_json(
|
||||
displayed_fields: &[FieldId],
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
obkv: obkv::KvReader,
|
||||
) -> anyhow::Result<Map<String, Value>>
|
||||
{
|
||||
displayed_fields.iter()
|
||||
.copied()
|
||||
.flat_map(|id| obkv.get(id).map(|value| (id, value)))
|
||||
.map(|(id, value)| {
|
||||
let name = fields_ids_map.name(id).context("unknown obkv field id")?;
|
||||
let value = serde_json::from_slice(value)?;
|
||||
Ok((name.to_owned(), value))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Transform a JSON value into a string that can be indexed.
|
||||
pub fn json_to_string(value: &Value) -> Option<String> {
|
||||
|
||||
fn inner(value: &Value, output: &mut String) -> bool {
|
||||
use std::fmt::Write;
|
||||
match value {
|
||||
Value::Null => false,
|
||||
Value::Bool(boolean) => write!(output, "{}", boolean).is_ok(),
|
||||
Value::Number(number) => write!(output, "{}", number).is_ok(),
|
||||
Value::String(string) => write!(output, "{}", string).is_ok(),
|
||||
Value::Array(array) => {
|
||||
let mut count = 0;
|
||||
for value in array {
|
||||
if inner(value, output) {
|
||||
output.push_str(". ");
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
// check that at least one value was written
|
||||
count != 0
|
||||
},
|
||||
Value::Object(object) => {
|
||||
let mut buffer = String::new();
|
||||
let mut count = 0;
|
||||
for (key, value) in object {
|
||||
buffer.clear();
|
||||
let _ = write!(&mut buffer, "{}: ", key);
|
||||
if inner(value, &mut buffer) {
|
||||
buffer.push_str(". ");
|
||||
// We write the "key: value. " pair only when
|
||||
// we are sure that the value can be written.
|
||||
output.push_str(&buffer);
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
// check that at least one value was written
|
||||
count != 0
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let mut string = String::new();
|
||||
if inner(value, &mut string) {
|
||||
Some(string)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn json_to_string_object() {
|
||||
let value = json!({
|
||||
"name": "John Doe",
|
||||
"age": 43,
|
||||
"not_there": null,
|
||||
});
|
||||
|
||||
let string = json_to_string(&value).unwrap();
|
||||
assert_eq!(string, "name: John Doe. age: 43. ");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_to_string_array() {
|
||||
let value = json!([
|
||||
{ "name": "John Doe" },
|
||||
43,
|
||||
"hello",
|
||||
[ "I", "am", "fine" ],
|
||||
null,
|
||||
]);
|
||||
|
||||
let string = json_to_string(&value).unwrap();
|
||||
// We don't care about having two point (.) after the other as
|
||||
// the distance of hard separators is clamped to 8 anyway.
|
||||
assert_eq!(string, "name: John Doe. . 43. hello. I. am. fine. . ");
|
||||
}
|
||||
}
|
22
milli/src/main.rs
Normal file
22
milli/src/main.rs
Normal file
|
@ -0,0 +1,22 @@
|
|||
use structopt::StructOpt;
|
||||
|
||||
use milli::subcommand::infos::{self, Opt as InfosOpt};
|
||||
use milli::subcommand::search::{self, Opt as SearchOpt};
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
#[global_allocator]
|
||||
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
#[structopt(name = "milli", about = "The milli project.")]
|
||||
enum Command {
|
||||
Infos(InfosOpt),
|
||||
Search(SearchOpt),
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
match Command::from_args() {
|
||||
Command::Infos(opt) => infos::run(opt),
|
||||
Command::Search(opt) => search::run(opt),
|
||||
}
|
||||
}
|
163
milli/src/mdfs.rs
Normal file
163
milli/src/mdfs.rs
Normal file
|
@ -0,0 +1,163 @@
|
|||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||
use std::collections::HashMap;
|
||||
use std::mem;
|
||||
|
||||
use roaring::RoaringBitmap;
|
||||
use crate::Index;
|
||||
|
||||
/// A mana depth first search implementation.
|
||||
pub struct Mdfs<'a> {
|
||||
index: &'a Index,
|
||||
rtxn: &'a heed::RoTxn<'a>,
|
||||
words: &'a [(HashMap<String, (u8, RoaringBitmap)>, RoaringBitmap)],
|
||||
union_cache: HashMap<(usize, u8), RoaringBitmap>,
|
||||
candidates: RoaringBitmap,
|
||||
mana: u32,
|
||||
max_mana: u32,
|
||||
}
|
||||
|
||||
impl<'a> Mdfs<'a> {
|
||||
pub fn new(
|
||||
index: &'a Index,
|
||||
rtxn: &'a heed::RoTxn,
|
||||
words: &'a [(HashMap<String, (u8, RoaringBitmap)>, RoaringBitmap)],
|
||||
candidates: RoaringBitmap,
|
||||
) -> Mdfs<'a>
|
||||
{
|
||||
// Compute the number of pairs (windows) we have for this list of words.
|
||||
let mana = words.len().saturating_sub(1) as u32;
|
||||
let max_mana = mana * 8;
|
||||
Mdfs { index, rtxn, words, union_cache: HashMap::new(), candidates, mana, max_mana }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Mdfs<'a> {
|
||||
type Item = anyhow::Result<(u32, RoaringBitmap)>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// If there is less or only one word therefore the only
|
||||
// possible documents that we can return are the candidates.
|
||||
if self.words.len() <= 1 {
|
||||
if self.candidates.is_empty() { return None }
|
||||
return Some(Ok((0, mem::take(&mut self.candidates))));
|
||||
}
|
||||
|
||||
while self.mana <= self.max_mana {
|
||||
let mut answer = RoaringBitmap::new();
|
||||
let result = mdfs_step(
|
||||
&self.index,
|
||||
&self.rtxn,
|
||||
self.mana,
|
||||
self.words,
|
||||
&self.candidates,
|
||||
&self.candidates,
|
||||
&mut self.union_cache,
|
||||
&mut answer,
|
||||
);
|
||||
|
||||
match result {
|
||||
Ok(()) => {
|
||||
// We always increase the mana for the next loop.
|
||||
let proximity = self.mana;
|
||||
self.mana += 1;
|
||||
|
||||
// If no documents were found we must not return and continue
|
||||
// the search with more mana.
|
||||
if !answer.is_empty() {
|
||||
|
||||
// We remove the answered documents from the list of
|
||||
// candidates to be sure we don't search for them again.
|
||||
self.candidates.difference_with(&answer);
|
||||
|
||||
// We return the answer.
|
||||
return Some(Ok((proximity, answer)));
|
||||
}
|
||||
},
|
||||
Err(e) => return Some(Err(e)),
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn mdfs_step(
|
||||
index: &Index,
|
||||
rtxn: &heed::RoTxn,
|
||||
mana: u32,
|
||||
words: &[(HashMap<String, (u8, RoaringBitmap)>, RoaringBitmap)],
|
||||
candidates: &RoaringBitmap,
|
||||
parent_docids: &RoaringBitmap,
|
||||
union_cache: &mut HashMap<(usize, u8), RoaringBitmap>,
|
||||
answer: &mut RoaringBitmap,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
use std::cmp::{min, max};
|
||||
|
||||
let (words1, words2) = (&words[0].0, &words[1].0);
|
||||
let pairs = words_pair_combinations(words1, words2);
|
||||
let tail = &words[1..];
|
||||
let nb_children = tail.len() as u32 - 1;
|
||||
|
||||
// The minimum amount of mana that you must consume is at least 1 and the
|
||||
// amount of mana that your children can consume. Because the last child must
|
||||
// consume the remaining mana, it is mandatory that there not too much at the end.
|
||||
let min_proximity = max(1, mana.saturating_sub(nb_children * 8)) as u8;
|
||||
|
||||
// The maximum amount of mana that you can use is 8 or the remaining amount of
|
||||
// mana minus your children, as you can't just consume all the mana,
|
||||
// your children must have at least 1 mana.
|
||||
let max_proximity = min(8, mana - nb_children) as u8;
|
||||
|
||||
for proximity in min_proximity..=max_proximity {
|
||||
let mut docids = match union_cache.entry((words.len(), proximity)) {
|
||||
Occupied(entry) => entry.get().clone(),
|
||||
Vacant(entry) => {
|
||||
let mut docids = RoaringBitmap::new();
|
||||
if proximity == 8 {
|
||||
docids = candidates.clone();
|
||||
} else {
|
||||
for (w1, w2) in pairs.iter().cloned() {
|
||||
let key = (w1, w2, proximity);
|
||||
if let Some(di) = index.word_pair_proximity_docids.get(rtxn, &key)? {
|
||||
docids.union_with(&di);
|
||||
}
|
||||
}
|
||||
}
|
||||
entry.insert(docids).clone()
|
||||
}
|
||||
};
|
||||
|
||||
// We must be sure that we only return docids that are present in the candidates.
|
||||
docids.intersect_with(parent_docids);
|
||||
|
||||
if !docids.is_empty() {
|
||||
let mana = mana.checked_sub(proximity as u32).unwrap();
|
||||
if tail.len() < 2 {
|
||||
// We are the last pair, we return without recuring as we don't have any child.
|
||||
answer.union_with(&docids);
|
||||
return Ok(());
|
||||
} else {
|
||||
return mdfs_step(index, rtxn, mana, tail, candidates, &docids, union_cache, answer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn words_pair_combinations<'h>(
|
||||
w1: &'h HashMap<String, (u8, RoaringBitmap)>,
|
||||
w2: &'h HashMap<String, (u8, RoaringBitmap)>,
|
||||
) -> Vec<(&'h str, &'h str)>
|
||||
{
|
||||
let mut pairs = Vec::new();
|
||||
for (w1, (_typos, docids1)) in w1 {
|
||||
for (w2, (_typos, docids2)) in w2 {
|
||||
if !docids1.is_disjoint(&docids2) {
|
||||
pairs.push((w1.as_str(), w2.as_str()));
|
||||
}
|
||||
}
|
||||
}
|
||||
pairs
|
||||
}
|
28
milli/src/proximity.rs
Normal file
28
milli/src/proximity.rs
Normal file
|
@ -0,0 +1,28 @@
|
|||
use std::cmp;
|
||||
use crate::{Attribute, Position};
|
||||
|
||||
const ONE_ATTRIBUTE: u32 = 1000;
|
||||
const MAX_DISTANCE: u32 = 8;
|
||||
|
||||
pub fn index_proximity(lhs: u32, rhs: u32) -> u32 {
|
||||
if lhs <= rhs {
|
||||
cmp::min(rhs - lhs, MAX_DISTANCE)
|
||||
} else {
|
||||
cmp::min((lhs - rhs) + 1, MAX_DISTANCE)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn positions_proximity(lhs: Position, rhs: Position) -> u32 {
|
||||
let (lhs_attr, lhs_index) = extract_position(lhs);
|
||||
let (rhs_attr, rhs_index) = extract_position(rhs);
|
||||
if lhs_attr != rhs_attr { MAX_DISTANCE }
|
||||
else { index_proximity(lhs_index, rhs_index) }
|
||||
}
|
||||
|
||||
pub fn extract_position(position: Position) -> (Attribute, Position) {
|
||||
(position / ONE_ATTRIBUTE, position % ONE_ATTRIBUTE)
|
||||
}
|
||||
|
||||
pub fn path_proximity(path: &[Position]) -> u32 {
|
||||
path.windows(2).map(|w| positions_proximity(w[0], w[1])).sum::<u32>()
|
||||
}
|
217
milli/src/query_tokens.rs
Normal file
217
milli/src/query_tokens.rs
Normal file
|
@ -0,0 +1,217 @@
|
|||
use meilisearch_tokenizer::{Token, TokenKind};
|
||||
|
||||
#[derive(Debug)]
|
||||
enum State {
|
||||
Free,
|
||||
Quoted,
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn swap(&mut self) {
|
||||
match self {
|
||||
State::Quoted => *self = State::Free,
|
||||
State::Free => *self = State::Quoted,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum QueryToken<'a> {
|
||||
Free(Token<'a>),
|
||||
Quoted(Token<'a>),
|
||||
}
|
||||
|
||||
pub fn query_tokens<'a>(mut tokens: impl Iterator<Item = Token<'a>>) -> impl Iterator<Item = QueryToken<'a>> {
|
||||
let mut state = State::Free;
|
||||
let f = move || {
|
||||
loop {
|
||||
let token = tokens.next()?;
|
||||
match token.kind() {
|
||||
_ if token.text().trim() == "\"" => state.swap(),
|
||||
TokenKind::Word => {
|
||||
let token = match state {
|
||||
State::Quoted => QueryToken::Quoted(token),
|
||||
State::Free => QueryToken::Free(token),
|
||||
};
|
||||
return Some(token);
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
};
|
||||
std::iter::from_fn(f)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use QueryToken::{Quoted, Free};
|
||||
use meilisearch_tokenizer::{Analyzer, AnalyzerConfig};
|
||||
use fst::Set;
|
||||
|
||||
macro_rules! assert_eq_query_token {
|
||||
($test:expr, Quoted($val:literal)) => {
|
||||
match $test {
|
||||
Quoted(val) => assert_eq!(val.text(), $val),
|
||||
Free(val) => panic!("expected Quoted(\"{}\"), found Free(\"{}\")", $val, val.text()),
|
||||
}
|
||||
};
|
||||
|
||||
($test:expr, Free($val:literal)) => {
|
||||
match $test {
|
||||
Quoted(val) => panic!("expected Free(\"{}\"), found Quoted(\"{}\")", $val, val.text()),
|
||||
Free(val) => assert_eq!(val.text(), $val),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert!(iter.next().is_none());
|
||||
|
||||
let query = " ";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn one_quoted_string() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "\"hello\"";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("hello"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn one_pending_quoted_string() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "\"hello";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("hello"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn one_non_quoted_string() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "hello";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("hello"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn quoted_directly_followed_by_free_strings() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "\"hello\"world";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("hello"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("world"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn free_directly_followed_by_quoted_strings() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "hello\"world\"";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("hello"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("world"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn free_followed_by_quoted_strings() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "hello \"world\"";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("hello"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("world"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_spaces_separated_strings() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "hello world ";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("hello"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("world"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_interleaved_quoted_free_strings() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "hello \"world\" coucou \"monde\"";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("hello"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("world"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("coucou"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("monde"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_quoted_strings() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "\"hello world\" coucou \"monde est beau\"";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("hello"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("world"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("coucou"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("monde"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("est"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Quoted("beau"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chinese() {
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let query = "汽车男生";
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let mut iter = query_tokens(tokens);
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("汽车"));
|
||||
assert_eq_query_token!(iter.next().unwrap(), Free("男生"));
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
}
|
756
milli/src/search/facet/facet_condition.rs
Normal file
756
milli/src/search/facet/facet_condition.rs
Normal file
|
@ -0,0 +1,756 @@
|
|||
use std::collections::HashMap;
|
||||
use std::fmt::Debug;
|
||||
use std::ops::Bound::{self, Included, Excluded};
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Context;
|
||||
use either::Either;
|
||||
use heed::types::{ByteSlice, DecodeIgnore};
|
||||
use log::debug;
|
||||
use num_traits::Bounded;
|
||||
use pest::error::{Error as PestError, ErrorVariant};
|
||||
use pest::iterators::{Pair, Pairs};
|
||||
use pest::Parser;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::facet::FacetType;
|
||||
use crate::heed_codec::facet::FacetValueStringCodec;
|
||||
use crate::heed_codec::facet::{FacetLevelValueI64Codec, FacetLevelValueF64Codec};
|
||||
use crate::{Index, FieldId, FieldsIdsMap, CboRoaringBitmapCodec};
|
||||
|
||||
use super::FacetRange;
|
||||
use super::parser::Rule;
|
||||
use super::parser::{PREC_CLIMBER, FilterParser};
|
||||
|
||||
use self::FacetCondition::*;
|
||||
use self::FacetNumberOperator::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub enum FacetNumberOperator<T> {
|
||||
GreaterThan(T),
|
||||
GreaterThanOrEqual(T),
|
||||
Equal(T),
|
||||
NotEqual(T),
|
||||
LowerThan(T),
|
||||
LowerThanOrEqual(T),
|
||||
Between(T, T),
|
||||
}
|
||||
|
||||
impl<T> FacetNumberOperator<T> {
|
||||
/// This method can return two operations in case it must express
|
||||
/// an OR operation for the between case (i.e. `TO`).
|
||||
fn negate(self) -> (Self, Option<Self>) {
|
||||
match self {
|
||||
GreaterThan(x) => (LowerThanOrEqual(x), None),
|
||||
GreaterThanOrEqual(x) => (LowerThan(x), None),
|
||||
Equal(x) => (NotEqual(x), None),
|
||||
NotEqual(x) => (Equal(x), None),
|
||||
LowerThan(x) => (GreaterThanOrEqual(x), None),
|
||||
LowerThanOrEqual(x) => (GreaterThan(x), None),
|
||||
Between(x, y) => (LowerThan(x), Some(GreaterThan(y))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum FacetStringOperator {
|
||||
Equal(String),
|
||||
NotEqual(String),
|
||||
}
|
||||
|
||||
impl FacetStringOperator {
|
||||
fn equal(s: &str) -> Self {
|
||||
FacetStringOperator::Equal(s.to_lowercase())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn not_equal(s: &str) -> Self {
|
||||
FacetStringOperator::equal(s).negate()
|
||||
}
|
||||
|
||||
fn negate(self) -> Self {
|
||||
match self {
|
||||
FacetStringOperator::Equal(x) => FacetStringOperator::NotEqual(x),
|
||||
FacetStringOperator::NotEqual(x) => FacetStringOperator::Equal(x),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum FacetCondition {
|
||||
OperatorI64(FieldId, FacetNumberOperator<i64>),
|
||||
OperatorF64(FieldId, FacetNumberOperator<f64>),
|
||||
OperatorString(FieldId, FacetStringOperator),
|
||||
Or(Box<Self>, Box<Self>),
|
||||
And(Box<Self>, Box<Self>),
|
||||
}
|
||||
|
||||
fn get_field_id_facet_type<'a>(
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
faceted_fields: &HashMap<FieldId, FacetType>,
|
||||
items: &mut Pairs<'a, Rule>,
|
||||
) -> Result<(FieldId, FacetType), PestError<Rule>>
|
||||
{
|
||||
// lexing ensures that we at least have a key
|
||||
let key = items.next().unwrap();
|
||||
let field_id = fields_ids_map
|
||||
.id(key.as_str())
|
||||
.ok_or_else(|| {
|
||||
PestError::new_from_span(
|
||||
ErrorVariant::CustomError {
|
||||
message: format!(
|
||||
"attribute `{}` not found, available attributes are: {}",
|
||||
key.as_str(),
|
||||
fields_ids_map.iter().map(|(_, n)| n).collect::<Vec<_>>().join(", ")
|
||||
),
|
||||
},
|
||||
key.as_span(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let facet_type = faceted_fields
|
||||
.get(&field_id)
|
||||
.copied()
|
||||
.ok_or_else(|| {
|
||||
PestError::new_from_span(
|
||||
ErrorVariant::CustomError {
|
||||
message: format!(
|
||||
"attribute `{}` is not faceted, available faceted attributes are: {}",
|
||||
key.as_str(),
|
||||
faceted_fields.keys().flat_map(|id| fields_ids_map.name(*id)).collect::<Vec<_>>().join(", ")
|
||||
),
|
||||
},
|
||||
key.as_span(),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok((field_id, facet_type))
|
||||
}
|
||||
|
||||
fn pest_parse<T>(pair: Pair<Rule>) -> Result<T, pest::error::Error<Rule>>
|
||||
where T: FromStr,
|
||||
T::Err: ToString,
|
||||
{
|
||||
match pair.as_str().parse() {
|
||||
Ok(value) => Ok(value),
|
||||
Err(e) => {
|
||||
Err(PestError::<Rule>::new_from_span(
|
||||
ErrorVariant::CustomError { message: e.to_string() },
|
||||
pair.as_span(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FacetCondition {
|
||||
pub fn from_array<I, J, A, B>(
|
||||
rtxn: &heed::RoTxn,
|
||||
index: &Index,
|
||||
array: I,
|
||||
) -> anyhow::Result<Option<FacetCondition>>
|
||||
where I: IntoIterator<Item=Either<J, B>>,
|
||||
J: IntoIterator<Item=A>,
|
||||
A: AsRef<str>,
|
||||
B: AsRef<str>,
|
||||
{
|
||||
fn facet_condition(
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
faceted_fields: &HashMap<String, FacetType>,
|
||||
key: &str,
|
||||
value: &str,
|
||||
) -> anyhow::Result<FacetCondition>
|
||||
{
|
||||
let fid = fields_ids_map.id(key).with_context(|| {
|
||||
format!("{:?} isn't present in the fields ids map", key)
|
||||
})?;
|
||||
let ftype = faceted_fields.get(key).copied().with_context(|| {
|
||||
format!("{:?} isn't a faceted field", key)
|
||||
})?;
|
||||
let (neg, value) = match value.trim().strip_prefix('-') {
|
||||
Some(value) => (true, value.trim()),
|
||||
None => (false, value.trim()),
|
||||
};
|
||||
|
||||
let operator = match ftype {
|
||||
FacetType::String => OperatorString(fid, FacetStringOperator::equal(value)),
|
||||
FacetType::Float => OperatorF64(fid, FacetNumberOperator::Equal(value.parse()?)),
|
||||
FacetType::Integer => OperatorI64(fid, FacetNumberOperator::Equal(value.parse()?)),
|
||||
};
|
||||
|
||||
if neg { Ok(operator.negate()) } else { Ok(operator) }
|
||||
}
|
||||
|
||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
let faceted_fields = index.faceted_fields(rtxn)?;
|
||||
let mut ands = None;
|
||||
|
||||
for either in array {
|
||||
match either {
|
||||
Either::Left(array) => {
|
||||
let mut ors = None;
|
||||
for rule in array {
|
||||
let mut iter = rule.as_ref().splitn(2, ':');
|
||||
let key = iter.next().context("missing facet condition key")?;
|
||||
let value = iter.next().context("missing facet condition value")?;
|
||||
let condition = facet_condition(&fields_ids_map, &faceted_fields, key, value)?;
|
||||
ors = match ors.take() {
|
||||
Some(ors) => Some(Or(Box::new(ors), Box::new(condition))),
|
||||
None => Some(condition),
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(rule) = ors {
|
||||
ands = match ands.take() {
|
||||
Some(ands) => Some(And(Box::new(ands), Box::new(rule))),
|
||||
None => Some(rule),
|
||||
};
|
||||
}
|
||||
},
|
||||
Either::Right(rule) => {
|
||||
let mut iter = rule.as_ref().splitn(2, ':');
|
||||
let key = iter.next().context("missing facet condition key")?;
|
||||
let value = iter.next().context("missing facet condition value")?;
|
||||
let condition = facet_condition(&fields_ids_map, &faceted_fields, key, value)?;
|
||||
ands = match ands.take() {
|
||||
Some(ands) => Some(And(Box::new(ands), Box::new(condition))),
|
||||
None => Some(condition),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ands)
|
||||
}
|
||||
|
||||
pub fn from_str(
|
||||
rtxn: &heed::RoTxn,
|
||||
index: &Index,
|
||||
expression: &str,
|
||||
) -> anyhow::Result<FacetCondition>
|
||||
{
|
||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
let faceted_fields = index.faceted_fields_ids(rtxn)?;
|
||||
let lexed = FilterParser::parse(Rule::prgm, expression)?;
|
||||
FacetCondition::from_pairs(&fields_ids_map, &faceted_fields, lexed)
|
||||
}
|
||||
|
||||
fn from_pairs(
|
||||
fim: &FieldsIdsMap,
|
||||
ff: &HashMap<FieldId, FacetType>,
|
||||
expression: Pairs<Rule>,
|
||||
) -> anyhow::Result<Self>
|
||||
{
|
||||
PREC_CLIMBER.climb(
|
||||
expression,
|
||||
|pair: Pair<Rule>| match pair.as_rule() {
|
||||
Rule::greater => Ok(Self::greater_than(fim, ff, pair)?),
|
||||
Rule::geq => Ok(Self::greater_than_or_equal(fim, ff, pair)?),
|
||||
Rule::eq => Ok(Self::equal(fim, ff, pair)?),
|
||||
Rule::neq => Ok(Self::equal(fim, ff, pair)?.negate()),
|
||||
Rule::leq => Ok(Self::lower_than_or_equal(fim, ff, pair)?),
|
||||
Rule::less => Ok(Self::lower_than(fim, ff, pair)?),
|
||||
Rule::between => Ok(Self::between(fim, ff, pair)?),
|
||||
Rule::not => Ok(Self::from_pairs(fim, ff, pair.into_inner())?.negate()),
|
||||
Rule::prgm => Self::from_pairs(fim, ff, pair.into_inner()),
|
||||
Rule::term => Self::from_pairs(fim, ff, pair.into_inner()),
|
||||
_ => unreachable!(),
|
||||
},
|
||||
|lhs: anyhow::Result<Self>, op: Pair<Rule>, rhs: anyhow::Result<Self>| {
|
||||
match op.as_rule() {
|
||||
Rule::or => Ok(Or(Box::new(lhs?), Box::new(rhs?))),
|
||||
Rule::and => Ok(And(Box::new(lhs?), Box::new(rhs?))),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn negate(self) -> FacetCondition {
|
||||
match self {
|
||||
OperatorI64(fid, op) => match op.negate() {
|
||||
(op, None) => OperatorI64(fid, op),
|
||||
(a, Some(b)) => Or(Box::new(OperatorI64(fid, a)), Box::new(OperatorI64(fid, b))),
|
||||
},
|
||||
OperatorF64(fid, op) => match op.negate() {
|
||||
(op, None) => OperatorF64(fid, op),
|
||||
(a, Some(b)) => Or(Box::new(OperatorF64(fid, a)), Box::new(OperatorF64(fid, b))),
|
||||
},
|
||||
OperatorString(fid, op) => OperatorString(fid, op.negate()),
|
||||
Or(a, b) => And(Box::new(a.negate()), Box::new(b.negate())),
|
||||
And(a, b) => Or(Box::new(a.negate()), Box::new(b.negate())),
|
||||
}
|
||||
}
|
||||
|
||||
fn between(
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
faceted_fields: &HashMap<FieldId, FacetType>,
|
||||
item: Pair<Rule>,
|
||||
) -> anyhow::Result<FacetCondition>
|
||||
{
|
||||
let item_span = item.as_span();
|
||||
let mut items = item.into_inner();
|
||||
let (fid, ftype) = get_field_id_facet_type(fields_ids_map, faceted_fields, &mut items)?;
|
||||
let lvalue = items.next().unwrap();
|
||||
let rvalue = items.next().unwrap();
|
||||
match ftype {
|
||||
FacetType::Integer => {
|
||||
let lvalue = pest_parse(lvalue)?;
|
||||
let rvalue = pest_parse(rvalue)?;
|
||||
Ok(OperatorI64(fid, Between(lvalue, rvalue)))
|
||||
},
|
||||
FacetType::Float => {
|
||||
let lvalue = pest_parse(lvalue)?;
|
||||
let rvalue = pest_parse(rvalue)?;
|
||||
Ok(OperatorF64(fid, Between(lvalue, rvalue)))
|
||||
},
|
||||
FacetType::String => {
|
||||
Err(PestError::<Rule>::new_from_span(
|
||||
ErrorVariant::CustomError {
|
||||
message: "invalid operator on a faceted string".to_string(),
|
||||
},
|
||||
item_span,
|
||||
).into())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn equal(
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
faceted_fields: &HashMap<FieldId, FacetType>,
|
||||
item: Pair<Rule>,
|
||||
) -> anyhow::Result<FacetCondition>
|
||||
{
|
||||
let mut items = item.into_inner();
|
||||
let (fid, ftype) = get_field_id_facet_type(fields_ids_map, faceted_fields, &mut items)?;
|
||||
let value = items.next().unwrap();
|
||||
match ftype {
|
||||
FacetType::Integer => Ok(OperatorI64(fid, Equal(pest_parse(value)?))),
|
||||
FacetType::Float => Ok(OperatorF64(fid, Equal(pest_parse(value)?))),
|
||||
FacetType::String => Ok(OperatorString(fid, FacetStringOperator::equal(value.as_str()))),
|
||||
}
|
||||
}
|
||||
|
||||
fn greater_than(
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
faceted_fields: &HashMap<FieldId, FacetType>,
|
||||
item: Pair<Rule>,
|
||||
) -> anyhow::Result<FacetCondition>
|
||||
{
|
||||
let item_span = item.as_span();
|
||||
let mut items = item.into_inner();
|
||||
let (fid, ftype) = get_field_id_facet_type(fields_ids_map, faceted_fields, &mut items)?;
|
||||
let value = items.next().unwrap();
|
||||
match ftype {
|
||||
FacetType::Integer => Ok(OperatorI64(fid, GreaterThan(pest_parse(value)?))),
|
||||
FacetType::Float => Ok(OperatorF64(fid, GreaterThan(pest_parse(value)?))),
|
||||
FacetType::String => {
|
||||
Err(PestError::<Rule>::new_from_span(
|
||||
ErrorVariant::CustomError {
|
||||
message: "invalid operator on a faceted string".to_string(),
|
||||
},
|
||||
item_span,
|
||||
).into())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn greater_than_or_equal(
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
faceted_fields: &HashMap<FieldId, FacetType>,
|
||||
item: Pair<Rule>,
|
||||
) -> anyhow::Result<FacetCondition>
|
||||
{
|
||||
let item_span = item.as_span();
|
||||
let mut items = item.into_inner();
|
||||
let (fid, ftype) = get_field_id_facet_type(fields_ids_map, faceted_fields, &mut items)?;
|
||||
let value = items.next().unwrap();
|
||||
match ftype {
|
||||
FacetType::Integer => Ok(OperatorI64(fid, GreaterThanOrEqual(pest_parse(value)?))),
|
||||
FacetType::Float => Ok(OperatorF64(fid, GreaterThanOrEqual(pest_parse(value)?))),
|
||||
FacetType::String => {
|
||||
Err(PestError::<Rule>::new_from_span(
|
||||
ErrorVariant::CustomError {
|
||||
message: "invalid operator on a faceted string".to_string(),
|
||||
},
|
||||
item_span,
|
||||
).into())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_than(
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
faceted_fields: &HashMap<FieldId, FacetType>,
|
||||
item: Pair<Rule>,
|
||||
) -> anyhow::Result<FacetCondition>
|
||||
{
|
||||
let item_span = item.as_span();
|
||||
let mut items = item.into_inner();
|
||||
let (fid, ftype) = get_field_id_facet_type(fields_ids_map, faceted_fields, &mut items)?;
|
||||
let value = items.next().unwrap();
|
||||
match ftype {
|
||||
FacetType::Integer => Ok(OperatorI64(fid, LowerThan(pest_parse(value)?))),
|
||||
FacetType::Float => Ok(OperatorF64(fid, LowerThan(pest_parse(value)?))),
|
||||
FacetType::String => {
|
||||
Err(PestError::<Rule>::new_from_span(
|
||||
ErrorVariant::CustomError {
|
||||
message: "invalid operator on a faceted string".to_string(),
|
||||
},
|
||||
item_span,
|
||||
).into())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_than_or_equal(
|
||||
fields_ids_map: &FieldsIdsMap,
|
||||
faceted_fields: &HashMap<FieldId, FacetType>,
|
||||
item: Pair<Rule>,
|
||||
) -> anyhow::Result<FacetCondition>
|
||||
{
|
||||
let item_span = item.as_span();
|
||||
let mut items = item.into_inner();
|
||||
let (fid, ftype) = get_field_id_facet_type(fields_ids_map, faceted_fields, &mut items)?;
|
||||
let value = items.next().unwrap();
|
||||
match ftype {
|
||||
FacetType::Integer => Ok(OperatorI64(fid, LowerThanOrEqual(pest_parse(value)?))),
|
||||
FacetType::Float => Ok(OperatorF64(fid, LowerThanOrEqual(pest_parse(value)?))),
|
||||
FacetType::String => {
|
||||
Err(PestError::<Rule>::new_from_span(
|
||||
ErrorVariant::CustomError {
|
||||
message: "invalid operator on a faceted string".to_string(),
|
||||
},
|
||||
item_span,
|
||||
).into())
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FacetCondition {
|
||||
/// Aggregates the documents ids that are part of the specified range automatically
|
||||
/// going deeper through the levels.
|
||||
fn explore_facet_levels<'t, T: 't, KC>(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
db: heed::Database<ByteSlice, CboRoaringBitmapCodec>,
|
||||
field_id: FieldId,
|
||||
level: u8,
|
||||
left: Bound<T>,
|
||||
right: Bound<T>,
|
||||
output: &mut RoaringBitmap,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
T: Copy + PartialEq + PartialOrd + Bounded + Debug,
|
||||
KC: heed::BytesDecode<'t, DItem = (u8, u8, T, T)>,
|
||||
KC: for<'x> heed::BytesEncode<'x, EItem = (u8, u8, T, T)>,
|
||||
{
|
||||
match (left, right) {
|
||||
// If the request is an exact value we must go directly to the deepest level.
|
||||
(Included(l), Included(r)) if l == r && level > 0 => {
|
||||
return Self::explore_facet_levels::<T, KC>(rtxn, db, field_id, 0, left, right, output);
|
||||
},
|
||||
// lower TO upper when lower > upper must return no result
|
||||
(Included(l), Included(r)) if l > r => return Ok(()),
|
||||
(Included(l), Excluded(r)) if l >= r => return Ok(()),
|
||||
(Excluded(l), Excluded(r)) if l >= r => return Ok(()),
|
||||
(Excluded(l), Included(r)) if l >= r => return Ok(()),
|
||||
(_, _) => (),
|
||||
}
|
||||
|
||||
let mut left_found = None;
|
||||
let mut right_found = None;
|
||||
|
||||
// We must create a custom iterator to be able to iterate over the
|
||||
// requested range as the range iterator cannot express some conditions.
|
||||
let iter = FacetRange::new(rtxn, db.remap_key_type::<KC>(), field_id, level, left, right)?;
|
||||
|
||||
debug!("Iterating between {:?} and {:?} (level {})", left, right, level);
|
||||
|
||||
for (i, result) in iter.enumerate() {
|
||||
let ((_fid, level, l, r), docids) = result?;
|
||||
debug!("{:?} to {:?} (level {}) found {} documents", l, r, level, docids.len());
|
||||
output.union_with(&docids);
|
||||
// We save the leftest and rightest bounds we actually found at this level.
|
||||
if i == 0 { left_found = Some(l); }
|
||||
right_found = Some(r);
|
||||
}
|
||||
|
||||
// Can we go deeper?
|
||||
let deeper_level = match level.checked_sub(1) {
|
||||
Some(level) => level,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
// We must refine the left and right bounds of this range by retrieving the
|
||||
// missing part in a deeper level.
|
||||
match left_found.zip(right_found) {
|
||||
Some((left_found, right_found)) => {
|
||||
// If the bound is satisfied we avoid calling this function again.
|
||||
if !matches!(left, Included(l) if l == left_found) {
|
||||
let sub_right = Excluded(left_found);
|
||||
debug!("calling left with {:?} to {:?} (level {})", left, sub_right, deeper_level);
|
||||
Self::explore_facet_levels::<T, KC>(rtxn, db, field_id, deeper_level, left, sub_right, output)?;
|
||||
}
|
||||
if !matches!(right, Included(r) if r == right_found) {
|
||||
let sub_left = Excluded(right_found);
|
||||
debug!("calling right with {:?} to {:?} (level {})", sub_left, right, deeper_level);
|
||||
Self::explore_facet_levels::<T, KC>(rtxn, db, field_id, deeper_level, sub_left, right, output)?;
|
||||
}
|
||||
},
|
||||
None => {
|
||||
// If we found nothing at this level it means that we must find
|
||||
// the same bounds but at a deeper, more precise level.
|
||||
Self::explore_facet_levels::<T, KC>(rtxn, db, field_id, deeper_level, left, right, output)?;
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn evaluate_number_operator<'t, T: 't, KC>(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
index: &Index,
|
||||
db: heed::Database<ByteSlice, CboRoaringBitmapCodec>,
|
||||
field_id: FieldId,
|
||||
operator: FacetNumberOperator<T>,
|
||||
) -> anyhow::Result<RoaringBitmap>
|
||||
where
|
||||
T: Copy + PartialEq + PartialOrd + Bounded + Debug,
|
||||
KC: heed::BytesDecode<'t, DItem = (u8, u8, T, T)>,
|
||||
KC: for<'x> heed::BytesEncode<'x, EItem = (u8, u8, T, T)>,
|
||||
{
|
||||
// Make sure we always bound the ranges with the field id and the level,
|
||||
// as the facets values are all in the same database and prefixed by the
|
||||
// field id and the level.
|
||||
let (left, right) = match operator {
|
||||
GreaterThan(val) => (Excluded(val), Included(T::max_value())),
|
||||
GreaterThanOrEqual(val) => (Included(val), Included(T::max_value())),
|
||||
Equal(val) => (Included(val), Included(val)),
|
||||
NotEqual(val) => {
|
||||
let all_documents_ids = index.faceted_documents_ids(rtxn, field_id)?;
|
||||
let docids = Self::evaluate_number_operator::<T, KC>(rtxn, index, db, field_id, Equal(val))?;
|
||||
return Ok(all_documents_ids - docids);
|
||||
},
|
||||
LowerThan(val) => (Included(T::min_value()), Excluded(val)),
|
||||
LowerThanOrEqual(val) => (Included(T::min_value()), Included(val)),
|
||||
Between(left, right) => (Included(left), Included(right)),
|
||||
};
|
||||
|
||||
// Ask for the biggest value that can exist for this specific field, if it exists
|
||||
// that's fine if it don't, the value just before will be returned instead.
|
||||
let biggest_level = db
|
||||
.remap_types::<KC, DecodeIgnore>()
|
||||
.get_lower_than_or_equal_to(rtxn, &(field_id, u8::MAX, T::max_value(), T::max_value()))?
|
||||
.and_then(|((id, level, _, _), _)| if id == field_id { Some(level) } else { None });
|
||||
|
||||
match biggest_level {
|
||||
Some(level) => {
|
||||
let mut output = RoaringBitmap::new();
|
||||
Self::explore_facet_levels::<T, KC>(rtxn, db, field_id, level, left, right, &mut output)?;
|
||||
Ok(output)
|
||||
},
|
||||
None => Ok(RoaringBitmap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn evaluate_string_operator(
|
||||
rtxn: &heed::RoTxn,
|
||||
index: &Index,
|
||||
db: heed::Database<FacetValueStringCodec, CboRoaringBitmapCodec>,
|
||||
field_id: FieldId,
|
||||
operator: &FacetStringOperator,
|
||||
) -> anyhow::Result<RoaringBitmap>
|
||||
{
|
||||
match operator {
|
||||
FacetStringOperator::Equal(string) => {
|
||||
match db.get(rtxn, &(field_id, string))? {
|
||||
Some(docids) => Ok(docids),
|
||||
None => Ok(RoaringBitmap::new())
|
||||
}
|
||||
},
|
||||
FacetStringOperator::NotEqual(string) => {
|
||||
let all_documents_ids = index.faceted_documents_ids(rtxn, field_id)?;
|
||||
let op = FacetStringOperator::Equal(string.clone());
|
||||
let docids = Self::evaluate_string_operator(rtxn, index, db, field_id, &op)?;
|
||||
Ok(all_documents_ids - docids)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn evaluate(
|
||||
&self,
|
||||
rtxn: &heed::RoTxn,
|
||||
index: &Index,
|
||||
) -> anyhow::Result<RoaringBitmap>
|
||||
{
|
||||
let db = index.facet_field_id_value_docids;
|
||||
match self {
|
||||
OperatorI64(fid, op) => {
|
||||
Self::evaluate_number_operator::<i64, FacetLevelValueI64Codec>(rtxn, index, db, *fid, *op)
|
||||
},
|
||||
OperatorF64(fid, op) => {
|
||||
Self::evaluate_number_operator::<f64, FacetLevelValueF64Codec>(rtxn, index, db, *fid, *op)
|
||||
},
|
||||
OperatorString(fid, op) => {
|
||||
let db = db.remap_key_type::<FacetValueStringCodec>();
|
||||
Self::evaluate_string_operator(rtxn, index, db, *fid, op)
|
||||
},
|
||||
Or(lhs, rhs) => {
|
||||
let lhs = lhs.evaluate(rtxn, index)?;
|
||||
let rhs = rhs.evaluate(rtxn, index)?;
|
||||
Ok(lhs | rhs)
|
||||
},
|
||||
And(lhs, rhs) => {
|
||||
let lhs = lhs.evaluate(rtxn, index)?;
|
||||
let rhs = rhs.evaluate(rtxn, index)?;
|
||||
Ok(lhs & rhs)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::update::Settings;
|
||||
use heed::EnvOpenOptions;
|
||||
use maplit::hashmap;
|
||||
|
||||
#[test]
|
||||
fn string() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// Set the faceted fields to be the channel.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
builder.set_faceted_fields(hashmap!{ "channel".into() => "string".into() });
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Test that the facet condition is correctly generated.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let condition = FacetCondition::from_str(&rtxn, &index, "channel = ponce").unwrap();
|
||||
let expected = OperatorString(0, FacetStringOperator::equal("Ponce"));
|
||||
assert_eq!(condition, expected);
|
||||
|
||||
let condition = FacetCondition::from_str(&rtxn, &index, "channel != ponce").unwrap();
|
||||
let expected = OperatorString(0, FacetStringOperator::not_equal("ponce"));
|
||||
assert_eq!(condition, expected);
|
||||
|
||||
let condition = FacetCondition::from_str(&rtxn, &index, "NOT channel = ponce").unwrap();
|
||||
let expected = OperatorString(0, FacetStringOperator::not_equal("ponce"));
|
||||
assert_eq!(condition, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn i64() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// Set the faceted fields to be the channel.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
builder.set_faceted_fields(hashmap!{ "timestamp".into() => "integer".into() });
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Test that the facet condition is correctly generated.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let condition = FacetCondition::from_str(&rtxn, &index, "timestamp 22 TO 44").unwrap();
|
||||
let expected = OperatorI64(0, Between(22, 44));
|
||||
assert_eq!(condition, expected);
|
||||
|
||||
let condition = FacetCondition::from_str(&rtxn, &index, "NOT timestamp 22 TO 44").unwrap();
|
||||
let expected = Or(
|
||||
Box::new(OperatorI64(0, LowerThan(22))),
|
||||
Box::new(OperatorI64(0, GreaterThan(44))),
|
||||
);
|
||||
assert_eq!(condition, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parentheses() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// Set the faceted fields to be the channel.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
builder.set_searchable_fields(vec!["channel".into(), "timestamp".into()]); // to keep the fields order
|
||||
builder.set_faceted_fields(hashmap!{
|
||||
"channel".into() => "string".into(),
|
||||
"timestamp".into() => "integer".into(),
|
||||
});
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Test that the facet condition is correctly generated.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let condition = FacetCondition::from_str(
|
||||
&rtxn, &index,
|
||||
"channel = gotaga OR (timestamp 22 TO 44 AND channel != ponce)",
|
||||
).unwrap();
|
||||
let expected = Or(
|
||||
Box::new(OperatorString(0, FacetStringOperator::equal("gotaga"))),
|
||||
Box::new(And(
|
||||
Box::new(OperatorI64(1, Between(22, 44))),
|
||||
Box::new(OperatorString(0, FacetStringOperator::not_equal("ponce"))),
|
||||
))
|
||||
);
|
||||
assert_eq!(condition, expected);
|
||||
|
||||
let condition = FacetCondition::from_str(
|
||||
&rtxn, &index,
|
||||
"channel = gotaga OR NOT (timestamp 22 TO 44 AND channel != ponce)",
|
||||
).unwrap();
|
||||
let expected = Or(
|
||||
Box::new(OperatorString(0, FacetStringOperator::equal("gotaga"))),
|
||||
Box::new(Or(
|
||||
Box::new(Or(
|
||||
Box::new(OperatorI64(1, LowerThan(22))),
|
||||
Box::new(OperatorI64(1, GreaterThan(44))),
|
||||
)),
|
||||
Box::new(OperatorString(0, FacetStringOperator::equal("ponce"))),
|
||||
)),
|
||||
);
|
||||
assert_eq!(condition, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_array() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// Set the faceted fields to be the channel.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
builder.set_searchable_fields(vec!["channel".into(), "timestamp".into()]); // to keep the fields order
|
||||
builder.set_faceted_fields(hashmap!{
|
||||
"channel".into() => "string".into(),
|
||||
"timestamp".into() => "integer".into(),
|
||||
});
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Test that the facet condition is correctly generated.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let condition = FacetCondition::from_array(
|
||||
&rtxn, &index,
|
||||
vec![Either::Right("channel:gotaga"), Either::Left(vec!["timestamp:44", "channel:-ponce"])],
|
||||
).unwrap().unwrap();
|
||||
let expected = FacetCondition::from_str(
|
||||
&rtxn, &index,
|
||||
"channel = gotaga AND (timestamp = 44 OR channel != ponce)",
|
||||
).unwrap();
|
||||
assert_eq!(condition, expected);
|
||||
}
|
||||
}
|
260
milli/src/search/facet/facet_distribution.rs
Normal file
260
milli/src/search/facet/facet_distribution.rs
Normal file
|
@ -0,0 +1,260 @@
|
|||
use std::collections::{HashSet, BTreeMap};
|
||||
use std::ops::Bound::Unbounded;
|
||||
use std::{cmp, fmt};
|
||||
|
||||
use anyhow::Context;
|
||||
use heed::BytesDecode;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::facet::{FacetType, FacetValue};
|
||||
use crate::heed_codec::facet::{FacetValueStringCodec, FacetLevelValueF64Codec, FacetLevelValueI64Codec};
|
||||
use crate::heed_codec::facet::{FieldDocIdFacetStringCodec, FieldDocIdFacetF64Codec, FieldDocIdFacetI64Codec};
|
||||
use crate::search::facet::{FacetIter, FacetRange};
|
||||
use crate::{Index, FieldId, DocumentId};
|
||||
|
||||
/// The default number of values by facets that will
|
||||
/// be fetched from the key-value store.
|
||||
const DEFAULT_VALUES_BY_FACET: usize = 100;
|
||||
|
||||
/// The hard limit in the number of values by facets that will be fetched from
|
||||
/// the key-value store. Searching for more values could slow down the engine.
|
||||
const MAX_VALUES_BY_FACET: usize = 1000;
|
||||
|
||||
/// Threshold on the number of candidates that will make
|
||||
/// the system to choose between one algorithm or another.
|
||||
const CANDIDATES_THRESHOLD: u64 = 1000;
|
||||
|
||||
pub struct FacetDistribution<'a> {
|
||||
facets: Option<HashSet<String>>,
|
||||
candidates: Option<RoaringBitmap>,
|
||||
max_values_by_facet: usize,
|
||||
rtxn: &'a heed::RoTxn<'a>,
|
||||
index: &'a Index,
|
||||
}
|
||||
|
||||
impl<'a> FacetDistribution<'a> {
|
||||
pub fn new(rtxn: &'a heed::RoTxn, index: &'a Index) -> FacetDistribution<'a> {
|
||||
FacetDistribution {
|
||||
facets: None,
|
||||
candidates: None,
|
||||
max_values_by_facet: DEFAULT_VALUES_BY_FACET,
|
||||
rtxn,
|
||||
index,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn facets<I: IntoIterator<Item=A>, A: AsRef<str>>(&mut self, names: I) -> &mut Self {
|
||||
self.facets = Some(names.into_iter().map(|s| s.as_ref().to_string()).collect());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn candidates(&mut self, candidates: RoaringBitmap) -> &mut Self {
|
||||
self.candidates = Some(candidates);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn max_values_by_facet(&mut self, max: usize) -> &mut Self {
|
||||
self.max_values_by_facet = cmp::min(max, MAX_VALUES_BY_FACET);
|
||||
self
|
||||
}
|
||||
|
||||
/// There is a small amount of candidates OR we ask for facet string values so we
|
||||
/// decide to iterate over the facet values of each one of them, one by one.
|
||||
fn facet_values_from_documents(
|
||||
&self,
|
||||
field_id: FieldId,
|
||||
facet_type: FacetType,
|
||||
candidates: &RoaringBitmap,
|
||||
) -> heed::Result<BTreeMap<FacetValue, u64>>
|
||||
{
|
||||
fn fetch_facet_values<'t, KC, K: 't>(
|
||||
index: &Index,
|
||||
rtxn: &'t heed::RoTxn,
|
||||
field_id: FieldId,
|
||||
candidates: &RoaringBitmap,
|
||||
) -> heed::Result<BTreeMap<FacetValue, u64>>
|
||||
where
|
||||
KC: BytesDecode<'t, DItem = (FieldId, DocumentId, K)>,
|
||||
K: Into<FacetValue>,
|
||||
{
|
||||
let mut facet_values = BTreeMap::new();
|
||||
let mut key_buffer = vec![field_id];
|
||||
|
||||
for docid in candidates.into_iter().take(CANDIDATES_THRESHOLD as usize) {
|
||||
key_buffer.truncate(1);
|
||||
key_buffer.extend_from_slice(&docid.to_be_bytes());
|
||||
let iter = index.field_id_docid_facet_values
|
||||
.prefix_iter(rtxn, &key_buffer)?
|
||||
.remap_key_type::<KC>();
|
||||
|
||||
for result in iter {
|
||||
let ((_, _, value), ()) = result?;
|
||||
*facet_values.entry(value.into()).or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(facet_values)
|
||||
}
|
||||
|
||||
let index = self.index;
|
||||
let rtxn = self.rtxn;
|
||||
match facet_type {
|
||||
FacetType::String => {
|
||||
fetch_facet_values::<FieldDocIdFacetStringCodec, _>(index, rtxn, field_id, candidates)
|
||||
},
|
||||
FacetType::Float => {
|
||||
fetch_facet_values::<FieldDocIdFacetF64Codec, _>(index, rtxn, field_id, candidates)
|
||||
},
|
||||
FacetType::Integer => {
|
||||
fetch_facet_values::<FieldDocIdFacetI64Codec, _>(index, rtxn, field_id, candidates)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// There is too much documents, we use the facet levels to move throught
|
||||
/// the facet values, to find the candidates and values associated.
|
||||
fn facet_values_from_facet_levels(
|
||||
&self,
|
||||
field_id: FieldId,
|
||||
facet_type: FacetType,
|
||||
candidates: &RoaringBitmap,
|
||||
) -> heed::Result<BTreeMap<FacetValue, u64>>
|
||||
{
|
||||
let iter = match facet_type {
|
||||
FacetType::String => unreachable!(),
|
||||
FacetType::Float => {
|
||||
let iter = FacetIter::<f64, FacetLevelValueF64Codec>::new_non_reducing(
|
||||
self.rtxn, self.index, field_id, candidates.clone(),
|
||||
)?;
|
||||
let iter = iter.map(|r| r.map(|(v, docids)| (FacetValue::from(v), docids)));
|
||||
Box::new(iter) as Box::<dyn Iterator<Item=_>>
|
||||
},
|
||||
FacetType::Integer => {
|
||||
let iter = FacetIter::<i64, FacetLevelValueI64Codec>::new_non_reducing(
|
||||
self.rtxn, self.index, field_id, candidates.clone(),
|
||||
)?;
|
||||
Box::new(iter.map(|r| r.map(|(v, docids)| (FacetValue::from(v), docids))))
|
||||
},
|
||||
};
|
||||
|
||||
let mut facet_values = BTreeMap::new();
|
||||
for result in iter {
|
||||
let (value, mut docids) = result?;
|
||||
docids.intersect_with(candidates);
|
||||
if !docids.is_empty() {
|
||||
facet_values.insert(value, docids.len());
|
||||
}
|
||||
if facet_values.len() == self.max_values_by_facet {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(facet_values)
|
||||
}
|
||||
|
||||
/// Placeholder search, a.k.a. no candidates were specified. We iterate throught the
|
||||
/// facet values one by one and iterate on the facet level 0 for numbers.
|
||||
fn facet_values_from_raw_facet_database(
|
||||
&self,
|
||||
field_id: FieldId,
|
||||
facet_type: FacetType,
|
||||
) -> heed::Result<BTreeMap<FacetValue, u64>>
|
||||
{
|
||||
let db = self.index.facet_field_id_value_docids;
|
||||
let level = 0;
|
||||
let iter = match facet_type {
|
||||
FacetType::String => {
|
||||
let iter = db
|
||||
.prefix_iter(self.rtxn, &[field_id])?
|
||||
.remap_key_type::<FacetValueStringCodec>()
|
||||
.map(|r| r.map(|((_, v), docids)| (FacetValue::from(v), docids)));
|
||||
Box::new(iter) as Box::<dyn Iterator<Item=_>>
|
||||
},
|
||||
FacetType::Float => {
|
||||
let db = db.remap_key_type::<FacetLevelValueF64Codec>();
|
||||
let range = FacetRange::<f64, _>::new(
|
||||
self.rtxn, db, field_id, level, Unbounded, Unbounded,
|
||||
)?;
|
||||
Box::new(range.map(|r| r.map(|((_, _, v, _), docids)| (FacetValue::from(v), docids))))
|
||||
},
|
||||
FacetType::Integer => {
|
||||
let db = db.remap_key_type::<FacetLevelValueI64Codec>();
|
||||
let range = FacetRange::<i64, _>::new(
|
||||
self.rtxn, db, field_id, level, Unbounded, Unbounded,
|
||||
)?;
|
||||
Box::new(range.map(|r| r.map(|((_, _, v, _), docids)| (FacetValue::from(v), docids))))
|
||||
},
|
||||
};
|
||||
|
||||
let mut facet_values = BTreeMap::new();
|
||||
for result in iter {
|
||||
let (value, docids) = result?;
|
||||
facet_values.insert(value, docids.len());
|
||||
if facet_values.len() == self.max_values_by_facet {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(facet_values)
|
||||
}
|
||||
|
||||
fn facet_values(
|
||||
&self,
|
||||
field_id: FieldId,
|
||||
facet_type: FacetType,
|
||||
) -> heed::Result<BTreeMap<FacetValue, u64>>
|
||||
{
|
||||
if let Some(candidates) = self.candidates.as_ref() {
|
||||
// Classic search, candidates were specified, we must return facet values only related
|
||||
// to those candidates. We also enter here for facet strings for performance reasons.
|
||||
if candidates.len() <= CANDIDATES_THRESHOLD || facet_type == FacetType::String {
|
||||
self.facet_values_from_documents(field_id, facet_type, candidates)
|
||||
} else {
|
||||
self.facet_values_from_facet_levels(field_id, facet_type, candidates)
|
||||
}
|
||||
} else {
|
||||
self.facet_values_from_raw_facet_database(field_id, facet_type)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute(&self) -> anyhow::Result<BTreeMap<String, BTreeMap<FacetValue, u64>>> {
|
||||
let fields_ids_map = self.index.fields_ids_map(self.rtxn)?;
|
||||
let faceted_fields = self.index.faceted_fields(self.rtxn)?;
|
||||
let fields_ids: Vec<_> = match &self.facets {
|
||||
Some(names) => names
|
||||
.iter()
|
||||
.filter_map(|n| faceted_fields.get(n).map(|t| (n.to_string(), *t)))
|
||||
.collect(),
|
||||
None => faceted_fields.into_iter().collect(),
|
||||
};
|
||||
|
||||
let mut facets_values = BTreeMap::new();
|
||||
for (name, ftype) in fields_ids {
|
||||
let fid = fields_ids_map.id(&name).with_context(|| {
|
||||
format!("missing field name {:?} from the fields id map", name)
|
||||
})?;
|
||||
let values = self.facet_values(fid, ftype)?;
|
||||
facets_values.insert(name, values);
|
||||
}
|
||||
|
||||
Ok(facets_values)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for FacetDistribution<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let FacetDistribution {
|
||||
facets,
|
||||
candidates,
|
||||
max_values_by_facet,
|
||||
rtxn: _,
|
||||
index: _,
|
||||
} = self;
|
||||
|
||||
f.debug_struct("FacetDistribution")
|
||||
.field("facets", facets)
|
||||
.field("candidates", candidates)
|
||||
.field("max_values_by_facet", max_values_by_facet)
|
||||
.finish()
|
||||
}
|
||||
}
|
29
milli/src/search/facet/grammar.pest
Normal file
29
milli/src/search/facet/grammar.pest
Normal file
|
@ -0,0 +1,29 @@
|
|||
key = _{quoted | word}
|
||||
value = _{quoted | word}
|
||||
quoted = _{ (PUSH("'") | PUSH("\"")) ~ string ~ POP }
|
||||
string = {char*}
|
||||
word = ${(LETTER | NUMBER | "_" | "-" | ".")+}
|
||||
|
||||
char = _{ !(PEEK | "\\") ~ ANY
|
||||
| "\\" ~ (PEEK | "\\" | "/" | "b" | "f" | "n" | "r" | "t")
|
||||
| "\\" ~ ("u" ~ ASCII_HEX_DIGIT{4})}
|
||||
|
||||
condition = _{between | eq | greater | less | geq | leq | neq}
|
||||
between = {key ~ value ~ "TO" ~ value}
|
||||
geq = {key ~ ">=" ~ value}
|
||||
leq = {key ~ "<=" ~ value}
|
||||
neq = {key ~ "!=" ~ value}
|
||||
eq = {key ~ "=" ~ value}
|
||||
greater = {key ~ ">" ~ value}
|
||||
less = {key ~ "<" ~ value}
|
||||
|
||||
prgm = {SOI ~ expr ~ EOI}
|
||||
expr = _{ ( term ~ (operation ~ term)* ) }
|
||||
term = { ("(" ~ expr ~ ")") | condition | not }
|
||||
operation = _{ and | or }
|
||||
and = {"AND"}
|
||||
or = {"OR"}
|
||||
|
||||
not = {"NOT" ~ term}
|
||||
|
||||
WHITESPACE = _{ " " }
|
286
milli/src/search/facet/mod.rs
Normal file
286
milli/src/search/facet/mod.rs
Normal file
|
@ -0,0 +1,286 @@
|
|||
use std::fmt::Debug;
|
||||
use std::ops::Bound::{self, Included, Excluded, Unbounded};
|
||||
|
||||
use either::Either::{self, Left, Right};
|
||||
use heed::types::{DecodeIgnore, ByteSlice};
|
||||
use heed::{BytesEncode, BytesDecode};
|
||||
use heed::{Database, RoRange, RoRevRange, LazyDecode};
|
||||
use log::debug;
|
||||
use num_traits::Bounded;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::heed_codec::CboRoaringBitmapCodec;
|
||||
use crate::{Index, FieldId};
|
||||
|
||||
pub use self::facet_condition::{FacetCondition, FacetNumberOperator, FacetStringOperator};
|
||||
pub use self::facet_distribution::FacetDistribution;
|
||||
|
||||
mod facet_condition;
|
||||
mod facet_distribution;
|
||||
mod parser;
|
||||
|
||||
pub struct FacetRange<'t, T: 't, KC> {
|
||||
iter: RoRange<'t, KC, LazyDecode<CboRoaringBitmapCodec>>,
|
||||
end: Bound<T>,
|
||||
}
|
||||
|
||||
impl<'t, T: 't, KC> FacetRange<'t, T, KC>
|
||||
where
|
||||
KC: for<'a> BytesEncode<'a, EItem = (FieldId, u8, T, T)>,
|
||||
T: PartialOrd + Copy + Bounded,
|
||||
{
|
||||
pub fn new(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
db: Database<KC, CboRoaringBitmapCodec>,
|
||||
field_id: FieldId,
|
||||
level: u8,
|
||||
left: Bound<T>,
|
||||
right: Bound<T>,
|
||||
) -> heed::Result<FacetRange<'t, T, KC>>
|
||||
{
|
||||
let left_bound = match left {
|
||||
Included(left) => Included((field_id, level, left, T::min_value())),
|
||||
Excluded(left) => Excluded((field_id, level, left, T::min_value())),
|
||||
Unbounded => Included((field_id, level, T::min_value(), T::min_value())),
|
||||
};
|
||||
let right_bound = Included((field_id, level, T::max_value(), T::max_value()));
|
||||
let iter = db.lazily_decode_data().range(rtxn, &(left_bound, right_bound))?;
|
||||
Ok(FacetRange { iter, end: right })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'t, T, KC> Iterator for FacetRange<'t, T, KC>
|
||||
where
|
||||
KC: for<'a> BytesEncode<'a, EItem = (FieldId, u8, T, T)>,
|
||||
KC: BytesDecode<'t, DItem = (FieldId, u8, T, T)>,
|
||||
T: PartialOrd + Copy,
|
||||
{
|
||||
type Item = heed::Result<((FieldId, u8, T, T), RoaringBitmap)>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self.iter.next() {
|
||||
Some(Ok(((fid, level, left, right), docids))) => {
|
||||
let must_be_returned = match self.end {
|
||||
Included(end) => right <= end,
|
||||
Excluded(end) => right < end,
|
||||
Unbounded => true,
|
||||
};
|
||||
if must_be_returned {
|
||||
match docids.decode() {
|
||||
Ok(docids) => Some(Ok(((fid, level, left, right), docids))),
|
||||
Err(e) => Some(Err(e)),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
Some(Err(e)) => Some(Err(e)),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FacetRevRange<'t, T: 't, KC> {
|
||||
iter: RoRevRange<'t, KC, LazyDecode<CboRoaringBitmapCodec>>,
|
||||
end: Bound<T>,
|
||||
}
|
||||
|
||||
impl<'t, T: 't, KC> FacetRevRange<'t, T, KC>
|
||||
where
|
||||
KC: for<'a> BytesEncode<'a, EItem = (FieldId, u8, T, T)>,
|
||||
T: PartialOrd + Copy + Bounded,
|
||||
{
|
||||
pub fn new(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
db: Database<KC, CboRoaringBitmapCodec>,
|
||||
field_id: FieldId,
|
||||
level: u8,
|
||||
left: Bound<T>,
|
||||
right: Bound<T>,
|
||||
) -> heed::Result<FacetRevRange<'t, T, KC>>
|
||||
{
|
||||
let left_bound = match left {
|
||||
Included(left) => Included((field_id, level, left, T::min_value())),
|
||||
Excluded(left) => Excluded((field_id, level, left, T::min_value())),
|
||||
Unbounded => Included((field_id, level, T::min_value(), T::min_value())),
|
||||
};
|
||||
let right_bound = Included((field_id, level, T::max_value(), T::max_value()));
|
||||
let iter = db.lazily_decode_data().rev_range(rtxn, &(left_bound, right_bound))?;
|
||||
Ok(FacetRevRange { iter, end: right })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'t, T, KC> Iterator for FacetRevRange<'t, T, KC>
|
||||
where
|
||||
KC: for<'a> BytesEncode<'a, EItem = (FieldId, u8, T, T)>,
|
||||
KC: BytesDecode<'t, DItem = (FieldId, u8, T, T)>,
|
||||
T: PartialOrd + Copy,
|
||||
{
|
||||
type Item = heed::Result<((FieldId, u8, T, T), RoaringBitmap)>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
match self.iter.next() {
|
||||
Some(Ok(((fid, level, left, right), docids))) => {
|
||||
let must_be_returned = match self.end {
|
||||
Included(end) => right <= end,
|
||||
Excluded(end) => right < end,
|
||||
Unbounded => true,
|
||||
};
|
||||
if must_be_returned {
|
||||
match docids.decode() {
|
||||
Ok(docids) => return Some(Ok(((fid, level, left, right), docids))),
|
||||
Err(e) => return Some(Err(e)),
|
||||
}
|
||||
}
|
||||
continue;
|
||||
},
|
||||
Some(Err(e)) => return Some(Err(e)),
|
||||
None => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FacetIter<'t, T: 't, KC> {
|
||||
rtxn: &'t heed::RoTxn<'t>,
|
||||
db: Database<KC, CboRoaringBitmapCodec>,
|
||||
field_id: FieldId,
|
||||
level_iters: Vec<(RoaringBitmap, Either<FacetRange<'t, T, KC>, FacetRevRange<'t, T, KC>>)>,
|
||||
must_reduce: bool,
|
||||
}
|
||||
|
||||
impl<'t, T, KC> FacetIter<'t, T, KC>
|
||||
where
|
||||
KC: heed::BytesDecode<'t, DItem = (FieldId, u8, T, T)>,
|
||||
KC: for<'a> BytesEncode<'a, EItem = (FieldId, u8, T, T)>,
|
||||
T: PartialOrd + Copy + Bounded,
|
||||
{
|
||||
/// Create a `FacetIter` that will iterate on the different facet entries
|
||||
/// (facet value + documents ids) and that will reduce the given documents ids
|
||||
/// while iterating on the different facet levels.
|
||||
pub fn new_reducing(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
index: &'t Index,
|
||||
field_id: FieldId,
|
||||
documents_ids: RoaringBitmap,
|
||||
) -> heed::Result<FacetIter<'t, T, KC>>
|
||||
{
|
||||
let db = index.facet_field_id_value_docids.remap_key_type::<KC>();
|
||||
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
||||
let highest_iter = FacetRange::new(rtxn, db, field_id, highest_level, Unbounded, Unbounded)?;
|
||||
let level_iters = vec![(documents_ids, Left(highest_iter))];
|
||||
Ok(FacetIter { rtxn, db, field_id, level_iters, must_reduce: true })
|
||||
}
|
||||
|
||||
/// Create a `FacetIter` that will iterate on the different facet entries in reverse
|
||||
/// (facet value + documents ids) and that will reduce the given documents ids
|
||||
/// while iterating on the different facet levels.
|
||||
pub fn new_reverse_reducing(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
index: &'t Index,
|
||||
field_id: FieldId,
|
||||
documents_ids: RoaringBitmap,
|
||||
) -> heed::Result<FacetIter<'t, T, KC>>
|
||||
{
|
||||
let db = index.facet_field_id_value_docids.remap_key_type::<KC>();
|
||||
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
||||
let highest_iter = FacetRevRange::new(rtxn, db, field_id, highest_level, Unbounded, Unbounded)?;
|
||||
let level_iters = vec![(documents_ids, Right(highest_iter))];
|
||||
Ok(FacetIter { rtxn, db, field_id, level_iters, must_reduce: true })
|
||||
}
|
||||
|
||||
/// Create a `FacetIter` that will iterate on the different facet entries
|
||||
/// (facet value + documents ids) and that will not reduce the given documents ids
|
||||
/// while iterating on the different facet levels, possibly returning multiple times
|
||||
/// a document id associated with multiple facet values.
|
||||
pub fn new_non_reducing(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
index: &'t Index,
|
||||
field_id: FieldId,
|
||||
documents_ids: RoaringBitmap,
|
||||
) -> heed::Result<FacetIter<'t, T, KC>>
|
||||
{
|
||||
let db = index.facet_field_id_value_docids.remap_key_type::<KC>();
|
||||
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
||||
let highest_iter = FacetRange::new(rtxn, db, field_id, highest_level, Unbounded, Unbounded)?;
|
||||
let level_iters = vec![(documents_ids, Left(highest_iter))];
|
||||
Ok(FacetIter { rtxn, db, field_id, level_iters, must_reduce: false })
|
||||
}
|
||||
|
||||
fn highest_level<X>(rtxn: &'t heed::RoTxn, db: Database<KC, X>, fid: FieldId) -> heed::Result<Option<u8>> {
|
||||
let level = db.remap_types::<ByteSlice, DecodeIgnore>()
|
||||
.prefix_iter(rtxn, &[fid][..])?
|
||||
.remap_key_type::<KC>()
|
||||
.last().transpose()?
|
||||
.map(|((_, level, _, _), _)| level);
|
||||
Ok(level)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'t, T: 't, KC> Iterator for FacetIter<'t, T, KC>
|
||||
where
|
||||
KC: heed::BytesDecode<'t, DItem = (FieldId, u8, T, T)>,
|
||||
KC: for<'x> heed::BytesEncode<'x, EItem = (FieldId, u8, T, T)>,
|
||||
T: PartialOrd + Copy + Bounded + Debug,
|
||||
{
|
||||
type Item = heed::Result<(T, RoaringBitmap)>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
'outer: loop {
|
||||
let (documents_ids, last) = self.level_iters.last_mut()?;
|
||||
let is_ascending = last.is_left();
|
||||
for result in last {
|
||||
// If the last iterator must find an empty set of documents it means
|
||||
// that we found all the documents in the sub level iterations already,
|
||||
// we can pop this level iterator.
|
||||
if documents_ids.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(((_fid, level, left, right), mut docids)) => {
|
||||
|
||||
docids.intersect_with(&documents_ids);
|
||||
if !docids.is_empty() {
|
||||
if self.must_reduce {
|
||||
documents_ids.difference_with(&docids);
|
||||
}
|
||||
|
||||
if level == 0 {
|
||||
debug!("found {:?} at {:?}", docids, left);
|
||||
return Some(Ok((left, docids)));
|
||||
}
|
||||
|
||||
let rtxn = self.rtxn;
|
||||
let db = self.db;
|
||||
let fid = self.field_id;
|
||||
let left = Included(left);
|
||||
let right = Included(right);
|
||||
|
||||
debug!("calling with {:?} to {:?} (level {}) to find {:?}",
|
||||
left, right, level - 1, docids,
|
||||
);
|
||||
|
||||
let result = if is_ascending {
|
||||
FacetRange::new(rtxn, db, fid, level - 1, left, right).map(Left)
|
||||
} else {
|
||||
FacetRevRange::new(rtxn, db, fid, level - 1, left, right).map(Right)
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(iter) => {
|
||||
self.level_iters.push((docids, iter));
|
||||
continue 'outer;
|
||||
},
|
||||
Err(e) => return Some(Err(e)),
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(e) => return Some(Err(e)),
|
||||
}
|
||||
}
|
||||
self.level_iters.pop();
|
||||
}
|
||||
}
|
||||
}
|
12
milli/src/search/facet/parser.rs
Normal file
12
milli/src/search/facet/parser.rs
Normal file
|
@ -0,0 +1,12 @@
|
|||
use once_cell::sync::Lazy;
|
||||
use pest::prec_climber::{Operator, Assoc, PrecClimber};
|
||||
|
||||
pub static PREC_CLIMBER: Lazy<PrecClimber<Rule>> = Lazy::new(|| {
|
||||
use Assoc::*;
|
||||
use Rule::*;
|
||||
pest::prec_climber::PrecClimber::new(vec![Operator::new(or, Left), Operator::new(and, Left)])
|
||||
});
|
||||
|
||||
#[derive(Parser)]
|
||||
#[grammar = "search/facet/grammar.pest"]
|
||||
pub struct FilterParser;
|
393
milli/src/search/mod.rs
Normal file
393
milli/src/search/mod.rs
Normal file
|
@ -0,0 +1,393 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fmt;
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::{bail, Context};
|
||||
use fst::{IntoStreamer, Streamer, Set};
|
||||
use levenshtein_automata::DFA;
|
||||
use levenshtein_automata::LevenshteinAutomatonBuilder as LevBuilder;
|
||||
use log::debug;
|
||||
use meilisearch_tokenizer::{AnalyzerConfig, Analyzer};
|
||||
use once_cell::sync::Lazy;
|
||||
use ordered_float::OrderedFloat;
|
||||
use roaring::bitmap::RoaringBitmap;
|
||||
|
||||
use crate::facet::FacetType;
|
||||
use crate::heed_codec::facet::{FacetLevelValueF64Codec, FacetLevelValueI64Codec};
|
||||
use crate::heed_codec::facet::{FieldDocIdFacetF64Codec, FieldDocIdFacetI64Codec};
|
||||
use crate::mdfs::Mdfs;
|
||||
use crate::query_tokens::{query_tokens, QueryToken};
|
||||
use crate::{Index, FieldId, DocumentId, Criterion};
|
||||
|
||||
pub use self::facet::{FacetCondition, FacetDistribution, FacetNumberOperator, FacetStringOperator};
|
||||
pub use self::facet::{FacetIter};
|
||||
|
||||
// Building these factories is not free.
|
||||
static LEVDIST0: Lazy<LevBuilder> = Lazy::new(|| LevBuilder::new(0, true));
|
||||
static LEVDIST1: Lazy<LevBuilder> = Lazy::new(|| LevBuilder::new(1, true));
|
||||
static LEVDIST2: Lazy<LevBuilder> = Lazy::new(|| LevBuilder::new(2, true));
|
||||
|
||||
mod facet;
|
||||
|
||||
pub struct Search<'a> {
|
||||
query: Option<String>,
|
||||
facet_condition: Option<FacetCondition>,
|
||||
offset: usize,
|
||||
limit: usize,
|
||||
rtxn: &'a heed::RoTxn<'a>,
|
||||
index: &'a Index,
|
||||
}
|
||||
|
||||
impl<'a> Search<'a> {
|
||||
pub fn new(rtxn: &'a heed::RoTxn, index: &'a Index) -> Search<'a> {
|
||||
Search { query: None, facet_condition: None, offset: 0, limit: 20, rtxn, index }
|
||||
}
|
||||
|
||||
pub fn query(&mut self, query: impl Into<String>) -> &mut Search<'a> {
|
||||
self.query = Some(query.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn offset(&mut self, offset: usize) -> &mut Search<'a> {
|
||||
self.offset = offset;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn limit(&mut self, limit: usize) -> &mut Search<'a> {
|
||||
self.limit = limit;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn facet_condition(&mut self, condition: FacetCondition) -> &mut Search<'a> {
|
||||
self.facet_condition = Some(condition);
|
||||
self
|
||||
}
|
||||
|
||||
/// Extracts the query words from the query string and returns the DFAs accordingly.
|
||||
/// TODO introduce settings for the number of typos regarding the words lengths.
|
||||
fn generate_query_dfas(query: &str) -> Vec<(String, bool, DFA)> {
|
||||
let (lev0, lev1, lev2) = (&LEVDIST0, &LEVDIST1, &LEVDIST2);
|
||||
|
||||
let stop_words = Set::default();
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(&stop_words));
|
||||
let analyzed = analyzer.analyze(query);
|
||||
let tokens = analyzed.tokens();
|
||||
let words: Vec<_> = query_tokens(tokens).collect();
|
||||
|
||||
let ends_with_whitespace = query.chars().last().map_or(false, char::is_whitespace);
|
||||
let number_of_words = words.len();
|
||||
|
||||
words.into_iter().enumerate().map(|(i, word)| {
|
||||
let (word, quoted) = match word {
|
||||
QueryToken::Free(token) => (token.text().to_string(), token.text().len() <= 3),
|
||||
QueryToken::Quoted(token) => (token.text().to_string(), true),
|
||||
};
|
||||
let is_last = i + 1 == number_of_words;
|
||||
let is_prefix = is_last && !ends_with_whitespace && !quoted;
|
||||
let lev = match word.len() {
|
||||
0..=4 => if quoted { lev0 } else { lev0 },
|
||||
5..=8 => if quoted { lev0 } else { lev1 },
|
||||
_ => if quoted { lev0 } else { lev2 },
|
||||
};
|
||||
|
||||
let dfa = if is_prefix {
|
||||
lev.build_prefix_dfa(&word)
|
||||
} else {
|
||||
lev.build_dfa(&word)
|
||||
};
|
||||
|
||||
(word, is_prefix, dfa)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Fetch the words from the given FST related to the given DFAs along with
|
||||
/// the associated documents ids.
|
||||
fn fetch_words_docids(
|
||||
&self,
|
||||
fst: &fst::Set<Cow<[u8]>>,
|
||||
dfas: Vec<(String, bool, DFA)>,
|
||||
) -> anyhow::Result<Vec<(HashMap<String, (u8, RoaringBitmap)>, RoaringBitmap)>>
|
||||
{
|
||||
// A Vec storing all the derived words from the original query words, associated
|
||||
// with the distance from the original word and the docids where the words appears.
|
||||
let mut derived_words = Vec::<(HashMap::<String, (u8, RoaringBitmap)>, RoaringBitmap)>::with_capacity(dfas.len());
|
||||
|
||||
for (_word, _is_prefix, dfa) in dfas {
|
||||
|
||||
let mut acc_derived_words = HashMap::new();
|
||||
let mut unions_docids = RoaringBitmap::new();
|
||||
let mut stream = fst.search_with_state(&dfa).into_stream();
|
||||
while let Some((word, state)) = stream.next() {
|
||||
|
||||
let word = std::str::from_utf8(word)?;
|
||||
let docids = self.index.word_docids.get(self.rtxn, word)?.unwrap();
|
||||
let distance = dfa.distance(state);
|
||||
unions_docids.union_with(&docids);
|
||||
acc_derived_words.insert(word.to_string(), (distance.to_u8(), docids));
|
||||
}
|
||||
derived_words.push((acc_derived_words, unions_docids));
|
||||
}
|
||||
|
||||
Ok(derived_words)
|
||||
}
|
||||
|
||||
/// Returns the set of docids that contains all of the query words.
|
||||
fn compute_candidates(
|
||||
derived_words: &[(HashMap<String, (u8, RoaringBitmap)>, RoaringBitmap)],
|
||||
) -> RoaringBitmap
|
||||
{
|
||||
// We sort the derived words by inverse popularity, this way intersections are faster.
|
||||
let mut derived_words: Vec<_> = derived_words.iter().collect();
|
||||
derived_words.sort_unstable_by_key(|(_, docids)| docids.len());
|
||||
|
||||
// we do a union between all the docids of each of the derived words,
|
||||
// we got N unions (the number of original query words), we then intersect them.
|
||||
let mut candidates = RoaringBitmap::new();
|
||||
|
||||
for (i, (_, union_docids)) in derived_words.iter().enumerate() {
|
||||
if i == 0 {
|
||||
candidates = union_docids.clone();
|
||||
} else {
|
||||
candidates.intersect_with(&union_docids);
|
||||
}
|
||||
}
|
||||
|
||||
candidates
|
||||
}
|
||||
|
||||
fn facet_ordered(
|
||||
&self,
|
||||
field_id: FieldId,
|
||||
facet_type: FacetType,
|
||||
ascending: bool,
|
||||
mut documents_ids: RoaringBitmap,
|
||||
limit: usize,
|
||||
) -> anyhow::Result<Vec<DocumentId>>
|
||||
{
|
||||
let mut output: Vec<_> = match facet_type {
|
||||
FacetType::Float => {
|
||||
if documents_ids.len() <= 1000 {
|
||||
let db = self.index.field_id_docid_facet_values.remap_key_type::<FieldDocIdFacetF64Codec>();
|
||||
let mut docids_values = Vec::with_capacity(documents_ids.len() as usize);
|
||||
for docid in documents_ids.iter() {
|
||||
let left = (field_id, docid, f64::MIN);
|
||||
let right = (field_id, docid, f64::MAX);
|
||||
let mut iter = db.range(self.rtxn, &(left..=right))?;
|
||||
let entry = if ascending { iter.next() } else { iter.last() };
|
||||
if let Some(((_, _, value), ())) = entry.transpose()? {
|
||||
docids_values.push((docid, OrderedFloat(value)));
|
||||
}
|
||||
}
|
||||
docids_values.sort_unstable_by_key(|(_, value)| *value);
|
||||
let iter = docids_values.into_iter().map(|(id, _)| id);
|
||||
if ascending {
|
||||
iter.take(limit).collect()
|
||||
} else {
|
||||
iter.rev().take(limit).collect()
|
||||
}
|
||||
} else {
|
||||
let facet_fn = if ascending {
|
||||
FacetIter::<f64, FacetLevelValueF64Codec>::new_reducing
|
||||
} else {
|
||||
FacetIter::<f64, FacetLevelValueF64Codec>::new_reverse_reducing
|
||||
};
|
||||
let mut limit_tmp = limit;
|
||||
let mut output = Vec::new();
|
||||
for result in facet_fn(self.rtxn, self.index, field_id, documents_ids.clone())? {
|
||||
let (_val, docids) = result?;
|
||||
limit_tmp = limit_tmp.saturating_sub(docids.len() as usize);
|
||||
output.push(docids);
|
||||
if limit_tmp == 0 { break }
|
||||
}
|
||||
output.into_iter().flatten().take(limit).collect()
|
||||
}
|
||||
},
|
||||
FacetType::Integer => {
|
||||
if documents_ids.len() <= 1000 {
|
||||
let db = self.index.field_id_docid_facet_values.remap_key_type::<FieldDocIdFacetI64Codec>();
|
||||
let mut docids_values = Vec::with_capacity(documents_ids.len() as usize);
|
||||
for docid in documents_ids.iter() {
|
||||
let left = (field_id, docid, i64::MIN);
|
||||
let right = (field_id, docid, i64::MAX);
|
||||
let mut iter = db.range(self.rtxn, &(left..=right))?;
|
||||
let entry = if ascending { iter.next() } else { iter.last() };
|
||||
if let Some(((_, _, value), ())) = entry.transpose()? {
|
||||
docids_values.push((docid, value));
|
||||
}
|
||||
}
|
||||
docids_values.sort_unstable_by_key(|(_, value)| *value);
|
||||
let iter = docids_values.into_iter().map(|(id, _)| id);
|
||||
if ascending {
|
||||
iter.take(limit).collect()
|
||||
} else {
|
||||
iter.rev().take(limit).collect()
|
||||
}
|
||||
} else {
|
||||
let facet_fn = if ascending {
|
||||
FacetIter::<i64, FacetLevelValueI64Codec>::new_reducing
|
||||
} else {
|
||||
FacetIter::<i64, FacetLevelValueI64Codec>::new_reverse_reducing
|
||||
};
|
||||
let mut limit_tmp = limit;
|
||||
let mut output = Vec::new();
|
||||
for result in facet_fn(self.rtxn, self.index, field_id, documents_ids.clone())? {
|
||||
let (_val, docids) = result?;
|
||||
limit_tmp = limit_tmp.saturating_sub(docids.len() as usize);
|
||||
output.push(docids);
|
||||
if limit_tmp == 0 { break }
|
||||
}
|
||||
output.into_iter().flatten().take(limit).collect()
|
||||
}
|
||||
},
|
||||
FacetType::String => bail!("criteria facet type must be a number"),
|
||||
};
|
||||
|
||||
// if there isn't enough documents to return we try to complete that list
|
||||
// with documents that are maybe not faceted under this field and therefore
|
||||
// not returned by the previous facet iteration.
|
||||
if output.len() < limit {
|
||||
output.iter().for_each(|n| { documents_ids.remove(*n); });
|
||||
let remaining = documents_ids.iter().take(limit - output.len());
|
||||
output.extend(remaining);
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
pub fn execute(&self) -> anyhow::Result<SearchResult> {
|
||||
let limit = self.limit;
|
||||
let fst = self.index.words_fst(self.rtxn)?;
|
||||
|
||||
// Construct the DFAs related to the query words.
|
||||
let derived_words = match self.query.as_deref().map(Self::generate_query_dfas) {
|
||||
Some(dfas) if !dfas.is_empty() => Some(self.fetch_words_docids(&fst, dfas)?),
|
||||
_otherwise => None,
|
||||
};
|
||||
|
||||
// We create the original candidates with the facet conditions results.
|
||||
let before = Instant::now();
|
||||
let facet_candidates = match &self.facet_condition {
|
||||
Some(condition) => Some(condition.evaluate(self.rtxn, self.index)?),
|
||||
None => None,
|
||||
};
|
||||
|
||||
debug!("facet candidates: {:?} took {:.02?}", facet_candidates, before.elapsed());
|
||||
|
||||
let order_by_facet = {
|
||||
let criteria = self.index.criteria(self.rtxn)?;
|
||||
let result = criteria.into_iter().flat_map(|criterion| {
|
||||
match criterion {
|
||||
Criterion::Asc(fid) => Some((fid, true)),
|
||||
Criterion::Desc(fid) => Some((fid, false)),
|
||||
_ => None
|
||||
}
|
||||
}).next();
|
||||
match result {
|
||||
Some((attr_name, is_ascending)) => {
|
||||
let field_id_map = self.index.fields_ids_map(self.rtxn)?;
|
||||
let fid = field_id_map.id(&attr_name).with_context(|| format!("unknown field: {:?}", attr_name))?;
|
||||
let faceted_fields = self.index.faceted_fields_ids(self.rtxn)?;
|
||||
let ftype = *faceted_fields.get(&fid)
|
||||
.with_context(|| format!("{:?} not found in the faceted fields.", attr_name))
|
||||
.expect("corrupted data: ");
|
||||
Some((fid, ftype, is_ascending))
|
||||
},
|
||||
None => None,
|
||||
}
|
||||
};
|
||||
|
||||
let before = Instant::now();
|
||||
let (candidates, derived_words) = match (facet_candidates, derived_words) {
|
||||
(Some(mut facet_candidates), Some(derived_words)) => {
|
||||
let words_candidates = Self::compute_candidates(&derived_words);
|
||||
facet_candidates.intersect_with(&words_candidates);
|
||||
(facet_candidates, derived_words)
|
||||
},
|
||||
(None, Some(derived_words)) => {
|
||||
(Self::compute_candidates(&derived_words), derived_words)
|
||||
},
|
||||
(Some(facet_candidates), None) => {
|
||||
// If the query is not set or results in no DFAs but
|
||||
// there is some facet conditions we return a placeholder.
|
||||
let documents_ids = match order_by_facet {
|
||||
Some((fid, ftype, is_ascending)) => {
|
||||
self.facet_ordered(fid, ftype, is_ascending, facet_candidates.clone(), limit)?
|
||||
},
|
||||
None => facet_candidates.iter().take(limit).collect(),
|
||||
};
|
||||
return Ok(SearchResult {
|
||||
documents_ids,
|
||||
candidates: facet_candidates,
|
||||
..Default::default()
|
||||
})
|
||||
},
|
||||
(None, None) => {
|
||||
// If the query is not set or results in no DFAs we return a placeholder.
|
||||
let all_docids = self.index.documents_ids(self.rtxn)?;
|
||||
let documents_ids = match order_by_facet {
|
||||
Some((fid, ftype, is_ascending)) => {
|
||||
self.facet_ordered(fid, ftype, is_ascending, all_docids.clone(), limit)?
|
||||
},
|
||||
None => all_docids.iter().take(limit).collect(),
|
||||
};
|
||||
return Ok(SearchResult { documents_ids, candidates: all_docids,..Default::default() })
|
||||
},
|
||||
};
|
||||
|
||||
debug!("candidates: {:?} took {:.02?}", candidates, before.elapsed());
|
||||
|
||||
// The mana depth first search is a revised DFS that explore
|
||||
// solutions in the order of their proximities.
|
||||
let mut mdfs = Mdfs::new(self.index, self.rtxn, &derived_words, candidates.clone());
|
||||
let mut documents = Vec::new();
|
||||
|
||||
// We execute the Mdfs iterator until we find enough documents.
|
||||
while documents.iter().map(RoaringBitmap::len).sum::<u64>() < limit as u64 {
|
||||
match mdfs.next().transpose()? {
|
||||
Some((proximity, answer)) => {
|
||||
debug!("answer with a proximity of {}: {:?}", proximity, answer);
|
||||
documents.push(answer);
|
||||
},
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
|
||||
let found_words = derived_words.into_iter().flat_map(|(w, _)| w).map(|(w, _)| w).collect();
|
||||
let documents_ids = match order_by_facet {
|
||||
Some((fid, ftype, order)) => {
|
||||
let mut ordered_documents = Vec::new();
|
||||
for documents_ids in documents {
|
||||
let docids = self.facet_ordered(fid, ftype, order, documents_ids, limit)?;
|
||||
ordered_documents.push(docids);
|
||||
if ordered_documents.iter().map(Vec::len).sum::<usize>() >= limit { break }
|
||||
}
|
||||
ordered_documents.into_iter().flatten().take(limit).collect()
|
||||
},
|
||||
None => documents.into_iter().flatten().take(limit).collect(),
|
||||
};
|
||||
|
||||
Ok(SearchResult { found_words, candidates, documents_ids })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Search<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let Search { query, facet_condition, offset, limit, rtxn: _, index: _ } = self;
|
||||
f.debug_struct("Search")
|
||||
.field("query", query)
|
||||
.field("facet_condition", facet_condition)
|
||||
.field("offset", offset)
|
||||
.field("limit", limit)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SearchResult {
|
||||
pub found_words: HashSet<String>,
|
||||
pub candidates: RoaringBitmap,
|
||||
// TODO those documents ids should be associated with their criteria scores.
|
||||
pub documents_ids: Vec<DocumentId>,
|
||||
}
|
754
milli/src/subcommand/infos.rs
Normal file
754
milli/src/subcommand/infos.rs
Normal file
|
@ -0,0 +1,754 @@
|
|||
use std::fmt::Write as _;
|
||||
use std::path::PathBuf;
|
||||
use std::{str, io, fmt};
|
||||
|
||||
use anyhow::Context;
|
||||
use byte_unit::Byte;
|
||||
use crate::Index;
|
||||
use heed::EnvOpenOptions;
|
||||
use structopt::StructOpt;
|
||||
|
||||
use Command::*;
|
||||
|
||||
const MAIN_DB_NAME: &str = "main";
|
||||
const WORD_DOCIDS_DB_NAME: &str = "word-docids";
|
||||
const DOCID_WORD_POSITIONS_DB_NAME: &str = "docid-word-positions";
|
||||
const WORD_PAIR_PROXIMITY_DOCIDS_DB_NAME: &str = "word-pair-proximity-docids";
|
||||
const DOCUMENTS_DB_NAME: &str = "documents";
|
||||
const USERS_IDS_DOCUMENTS_IDS: &[u8] = b"users-ids-documents-ids";
|
||||
|
||||
const ALL_DATABASE_NAMES: &[&str] = &[
|
||||
MAIN_DB_NAME,
|
||||
WORD_DOCIDS_DB_NAME,
|
||||
DOCID_WORD_POSITIONS_DB_NAME,
|
||||
WORD_PAIR_PROXIMITY_DOCIDS_DB_NAME,
|
||||
DOCUMENTS_DB_NAME,
|
||||
];
|
||||
|
||||
const POSTINGS_DATABASE_NAMES: &[&str] = &[
|
||||
WORD_DOCIDS_DB_NAME,
|
||||
DOCID_WORD_POSITIONS_DB_NAME,
|
||||
WORD_PAIR_PROXIMITY_DOCIDS_DB_NAME,
|
||||
];
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
/// A stats fetcher for milli.
|
||||
pub struct Opt {
|
||||
/// The database path where the database is located.
|
||||
/// It is created if it doesn't already exist.
|
||||
#[structopt(long = "db", parse(from_os_str))]
|
||||
database: PathBuf,
|
||||
|
||||
/// The maximum size the database can take on disk. It is recommended to specify
|
||||
/// the whole disk space (value must be a multiple of a page size).
|
||||
#[structopt(long = "db-size", default_value = "100 GiB")]
|
||||
database_size: Byte,
|
||||
|
||||
/// Verbose mode (-v, -vv, -vvv, etc.)
|
||||
#[structopt(short, long, parse(from_occurrences))]
|
||||
verbose: usize,
|
||||
|
||||
#[structopt(subcommand)]
|
||||
command: Command,
|
||||
}
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
enum Command {
|
||||
/// Outputs a CSV of the most frequent words of this index.
|
||||
///
|
||||
/// `word` are displayed and ordered by frequency.
|
||||
/// `document_frequency` defines the number of documents which contains the word.
|
||||
MostCommonWords {
|
||||
/// The maximum number of frequencies to return.
|
||||
#[structopt(default_value = "10")]
|
||||
limit: usize,
|
||||
},
|
||||
|
||||
/// Outputs a CSV with the biggest entries of the database.
|
||||
BiggestValues {
|
||||
/// The maximum number of sizes to return.
|
||||
#[structopt(default_value = "10")]
|
||||
limit: usize,
|
||||
},
|
||||
|
||||
/// Outputs a CSV with the documents ids where the given words appears.
|
||||
WordsDocids {
|
||||
/// Display the whole documents ids in details.
|
||||
#[structopt(long)]
|
||||
full_display: bool,
|
||||
|
||||
/// The words to display the documents ids of.
|
||||
words: Vec<String>,
|
||||
},
|
||||
|
||||
/// Outputs a CSV with the documents ids along with the facet values where it appears.
|
||||
FacetValuesDocids {
|
||||
/// Display the whole documents ids in details.
|
||||
#[structopt(long)]
|
||||
full_display: bool,
|
||||
|
||||
/// The field name in the document.
|
||||
field_name: String,
|
||||
},
|
||||
|
||||
/// Outputs some facets statistics for the given facet name.
|
||||
FacetStats {
|
||||
/// The field name in the document.
|
||||
field_name: String,
|
||||
},
|
||||
|
||||
/// Outputs the total size of all the docid-word-positions keys and values.
|
||||
TotalDocidWordPositionsSize,
|
||||
|
||||
/// Outputs the average number of *different* words by document.
|
||||
AverageNumberOfWordsByDoc,
|
||||
|
||||
/// Outputs the average number of positions for each document words.
|
||||
AverageNumberOfPositionsByWord,
|
||||
|
||||
/// Outputs some statistics about the given database (e.g. median, quartiles,
|
||||
/// percentiles, minimum, maximum, averge, key size, value size).
|
||||
DatabaseStats {
|
||||
#[structopt(possible_values = POSTINGS_DATABASE_NAMES)]
|
||||
database: String,
|
||||
},
|
||||
|
||||
/// Outputs the size in bytes of the specified database.
|
||||
SizeOfDatabase {
|
||||
#[structopt(possible_values = ALL_DATABASE_NAMES)]
|
||||
database: String,
|
||||
},
|
||||
|
||||
/// Outputs a CSV with the proximities for the two specidied words and
|
||||
/// the documents ids where these relations appears.
|
||||
///
|
||||
/// `word1`, `word2` defines the word pair specified *in this specific order*.
|
||||
/// `proximity` defines the proximity between the two specified words.
|
||||
/// `documents_ids` defines the documents ids where the relation appears.
|
||||
WordPairProximitiesDocids {
|
||||
/// Display the whole documents ids in details.
|
||||
#[structopt(long)]
|
||||
full_display: bool,
|
||||
|
||||
/// First word of the word pair.
|
||||
word1: String,
|
||||
|
||||
/// Second word of the word pair.
|
||||
word2: String,
|
||||
},
|
||||
|
||||
/// Outputs the words FST to standard output.
|
||||
///
|
||||
/// One can use the FST binary helper to dissect and analyze it,
|
||||
/// you can install it using `cargo install fst-bin`.
|
||||
ExportWordsFst,
|
||||
|
||||
/// Outputs the documents as JSON lines to the standard output.
|
||||
///
|
||||
/// All of the fields are extracted, not just the displayed ones.
|
||||
ExportDocuments,
|
||||
|
||||
/// A command that patches the old external ids
|
||||
/// into the new external ids format.
|
||||
PatchToNewExternalIds,
|
||||
}
|
||||
|
||||
pub fn run(opt: Opt) -> anyhow::Result<()> {
|
||||
stderrlog::new()
|
||||
.verbosity(opt.verbose)
|
||||
.show_level(false)
|
||||
.timestamp(stderrlog::Timestamp::Off)
|
||||
.init()?;
|
||||
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(opt.database_size.get_bytes() as usize);
|
||||
|
||||
// Open the LMDB database.
|
||||
let index = Index::new(options, opt.database)?;
|
||||
let rtxn = index.read_txn()?;
|
||||
|
||||
match opt.command {
|
||||
MostCommonWords { limit } => most_common_words(&index, &rtxn, limit),
|
||||
BiggestValues { limit } => biggest_value_sizes(&index, &rtxn, limit),
|
||||
WordsDocids { full_display, words } => words_docids(&index, &rtxn, !full_display, words),
|
||||
FacetValuesDocids { full_display, field_name } => {
|
||||
facet_values_docids(&index, &rtxn, !full_display, field_name)
|
||||
},
|
||||
FacetStats { field_name } => facet_stats(&index, &rtxn, field_name),
|
||||
TotalDocidWordPositionsSize => total_docid_word_positions_size(&index, &rtxn),
|
||||
AverageNumberOfWordsByDoc => average_number_of_words_by_doc(&index, &rtxn),
|
||||
AverageNumberOfPositionsByWord => {
|
||||
average_number_of_positions_by_word(&index, &rtxn)
|
||||
},
|
||||
SizeOfDatabase { database } => size_of_database(&index, &rtxn, &database),
|
||||
DatabaseStats { database } => database_stats(&index, &rtxn, &database),
|
||||
WordPairProximitiesDocids { full_display, word1, word2 } => {
|
||||
word_pair_proximities_docids(&index, &rtxn, !full_display, word1, word2)
|
||||
},
|
||||
ExportWordsFst => export_words_fst(&index, &rtxn),
|
||||
ExportDocuments => export_documents(&index, &rtxn),
|
||||
PatchToNewExternalIds => {
|
||||
drop(rtxn);
|
||||
let mut wtxn = index.write_txn()?;
|
||||
let result = patch_to_new_external_ids(&index, &mut wtxn);
|
||||
wtxn.commit()?;
|
||||
result
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn patch_to_new_external_ids(index: &Index, wtxn: &mut heed::RwTxn) -> anyhow::Result<()> {
|
||||
use heed::types::ByteSlice;
|
||||
|
||||
if let Some(documents_ids) = index.main.get::<_, ByteSlice, ByteSlice>(wtxn, USERS_IDS_DOCUMENTS_IDS)? {
|
||||
let documents_ids = documents_ids.to_owned();
|
||||
index.main.put::<_, ByteSlice, ByteSlice>(
|
||||
wtxn,
|
||||
crate::index::HARD_EXTERNAL_DOCUMENTS_IDS_KEY.as_bytes(),
|
||||
&documents_ids,
|
||||
)?;
|
||||
index.main.delete::<_, ByteSlice>(wtxn, USERS_IDS_DOCUMENTS_IDS)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn most_common_words(index: &Index, rtxn: &heed::RoTxn, limit: usize) -> anyhow::Result<()> {
|
||||
use std::collections::BinaryHeap;
|
||||
use std::cmp::Reverse;
|
||||
|
||||
let mut heap = BinaryHeap::with_capacity(limit + 1);
|
||||
for result in index.word_docids.iter(rtxn)? {
|
||||
if limit == 0 { break }
|
||||
let (word, docids) = result?;
|
||||
heap.push((Reverse(docids.len()), word));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
}
|
||||
|
||||
let stdout = io::stdout();
|
||||
let mut wtr = csv::Writer::from_writer(stdout.lock());
|
||||
wtr.write_record(&["word", "document_frequency"])?;
|
||||
|
||||
for (Reverse(document_frequency), word) in heap.into_sorted_vec() {
|
||||
wtr.write_record(&[word, &document_frequency.to_string()])?;
|
||||
}
|
||||
|
||||
Ok(wtr.flush()?)
|
||||
}
|
||||
|
||||
/// Helper function that converts the facet value key to a unique type
|
||||
/// that can be used to log or display purposes.
|
||||
fn facet_values_iter<'txn, DC: 'txn, T>(
|
||||
rtxn: &'txn heed::RoTxn,
|
||||
db: heed::Database<heed::types::ByteSlice, DC>,
|
||||
field_id: u8,
|
||||
facet_type: crate::facet::FacetType,
|
||||
string_fn: impl Fn(&str) -> T + 'txn,
|
||||
float_fn: impl Fn(u8, f64, f64) -> T + 'txn,
|
||||
integer_fn: impl Fn(u8, i64, i64) -> T + 'txn,
|
||||
) -> heed::Result<Box<dyn Iterator<Item=heed::Result<(T, DC::DItem)>> + 'txn>>
|
||||
where
|
||||
DC: heed::BytesDecode<'txn>,
|
||||
{
|
||||
use crate::facet::FacetType;
|
||||
use crate::heed_codec::facet::{
|
||||
FacetValueStringCodec, FacetLevelValueF64Codec, FacetLevelValueI64Codec,
|
||||
};
|
||||
|
||||
let iter = db.prefix_iter(&rtxn, &[field_id])?;
|
||||
match facet_type {
|
||||
FacetType::String => {
|
||||
let iter = iter.remap_key_type::<FacetValueStringCodec>()
|
||||
.map(move |r| r.map(|((_, key), value)| (string_fn(key), value)));
|
||||
Ok(Box::new(iter) as Box<dyn Iterator<Item=_>>)
|
||||
},
|
||||
FacetType::Float => {
|
||||
let iter = iter.remap_key_type::<FacetLevelValueF64Codec>()
|
||||
.map(move |r| r.map(|((_, level, left, right), value)| {
|
||||
(float_fn(level, left, right), value)
|
||||
}));
|
||||
Ok(Box::new(iter))
|
||||
},
|
||||
FacetType::Integer => {
|
||||
let iter = iter.remap_key_type::<FacetLevelValueI64Codec>()
|
||||
.map(move |r| r.map(|((_, level, left, right), value)| {
|
||||
(integer_fn(level, left, right), value)
|
||||
}));
|
||||
Ok(Box::new(iter))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn facet_number_value_to_string<T: fmt::Debug>(level: u8, left: T, right: T) -> (u8, String) {
|
||||
if level == 0 {
|
||||
(level, format!("{:?}", left))
|
||||
} else {
|
||||
(level, format!("{:?} to {:?}", left, right))
|
||||
}
|
||||
}
|
||||
|
||||
fn biggest_value_sizes(index: &Index, rtxn: &heed::RoTxn, limit: usize) -> anyhow::Result<()> {
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::BinaryHeap;
|
||||
use heed::types::{Str, ByteSlice};
|
||||
|
||||
let Index {
|
||||
env: _env,
|
||||
main,
|
||||
word_docids,
|
||||
docid_word_positions,
|
||||
word_pair_proximity_docids,
|
||||
facet_field_id_value_docids,
|
||||
field_id_docid_facet_values: _,
|
||||
documents,
|
||||
} = index;
|
||||
|
||||
let main_name = "main";
|
||||
let word_docids_name = "word_docids";
|
||||
let docid_word_positions_name = "docid_word_positions";
|
||||
let word_pair_proximity_docids_name = "word_pair_proximity_docids";
|
||||
let facet_field_id_value_docids_name = "facet_field_id_value_docids";
|
||||
let documents_name = "documents";
|
||||
|
||||
let mut heap = BinaryHeap::with_capacity(limit + 1);
|
||||
|
||||
if limit > 0 {
|
||||
let words_fst = index.words_fst(rtxn)?;
|
||||
heap.push(Reverse((words_fst.as_fst().as_bytes().len(), format!("words-fst"), main_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
|
||||
if let Some(documents_ids) = main.get::<_, Str, ByteSlice>(rtxn, "documents-ids")? {
|
||||
heap.push(Reverse((documents_ids.len(), format!("documents-ids"), main_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
}
|
||||
|
||||
for result in word_docids.remap_data_type::<ByteSlice>().iter(rtxn)? {
|
||||
let (word, value) = result?;
|
||||
heap.push(Reverse((value.len(), word.to_string(), word_docids_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
}
|
||||
|
||||
for result in docid_word_positions.remap_data_type::<ByteSlice>().iter(rtxn)? {
|
||||
let ((docid, word), value) = result?;
|
||||
let key = format!("{} {}", docid, word);
|
||||
heap.push(Reverse((value.len(), key, docid_word_positions_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
}
|
||||
|
||||
for result in word_pair_proximity_docids.remap_data_type::<ByteSlice>().iter(rtxn)? {
|
||||
let ((word1, word2, prox), value) = result?;
|
||||
let key = format!("{} {} {}", word1, word2, prox);
|
||||
heap.push(Reverse((value.len(), key, word_pair_proximity_docids_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
}
|
||||
|
||||
let faceted_fields = index.faceted_fields_ids(rtxn)?;
|
||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
for (field_id, field_type) in faceted_fields {
|
||||
let facet_name = fields_ids_map.name(field_id).unwrap();
|
||||
|
||||
let db = facet_field_id_value_docids.remap_data_type::<ByteSlice>();
|
||||
let iter = facet_values_iter(
|
||||
rtxn,
|
||||
db,
|
||||
field_id,
|
||||
field_type,
|
||||
|key| key.to_owned(),
|
||||
|level, left, right| {
|
||||
let mut output = facet_number_value_to_string(level, left, right).1;
|
||||
let _ = write!(&mut output, " (level {})", level);
|
||||
output
|
||||
},
|
||||
|level, left, right| {
|
||||
let mut output = facet_number_value_to_string(level, left, right).1;
|
||||
let _ = write!(&mut output, " (level {})", level);
|
||||
output
|
||||
},
|
||||
)?;
|
||||
|
||||
for result in iter {
|
||||
let (fvalue, value) = result?;
|
||||
let key = format!("{} {}", facet_name, fvalue);
|
||||
heap.push(Reverse((value.len(), key, facet_field_id_value_docids_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
}
|
||||
}
|
||||
|
||||
for result in documents.remap_data_type::<ByteSlice>().iter(rtxn)? {
|
||||
let (id, value) = result?;
|
||||
heap.push(Reverse((value.len(), id.to_string(), documents_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
}
|
||||
}
|
||||
|
||||
let stdout = io::stdout();
|
||||
let mut wtr = csv::Writer::from_writer(stdout.lock());
|
||||
wtr.write_record(&["database_name", "key_name", "size"])?;
|
||||
|
||||
for Reverse((size, key_name, database_name)) in heap.into_sorted_vec() {
|
||||
wtr.write_record(&[database_name.to_string(), key_name, size.to_string()])?;
|
||||
}
|
||||
|
||||
Ok(wtr.flush()?)
|
||||
}
|
||||
|
||||
fn words_docids(index: &Index, rtxn: &heed::RoTxn, debug: bool, words: Vec<String>) -> anyhow::Result<()> {
|
||||
let stdout = io::stdout();
|
||||
let mut wtr = csv::Writer::from_writer(stdout.lock());
|
||||
wtr.write_record(&["word", "documents_ids"])?;
|
||||
|
||||
for word in words {
|
||||
if let Some(docids) = index.word_docids.get(rtxn, &word)? {
|
||||
let docids = if debug {
|
||||
format!("{:?}", docids)
|
||||
} else {
|
||||
format!("{:?}", docids.iter().collect::<Vec<_>>())
|
||||
};
|
||||
wtr.write_record(&[word, docids])?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(wtr.flush()?)
|
||||
}
|
||||
|
||||
fn facet_values_docids(index: &Index, rtxn: &heed::RoTxn, debug: bool, field_name: String) -> anyhow::Result<()> {
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
||||
let faceted_fields = index.faceted_fields_ids(&rtxn)?;
|
||||
|
||||
let field_id = fields_ids_map.id(&field_name)
|
||||
.with_context(|| format!("field {} not found", field_name))?;
|
||||
let field_type = faceted_fields.get(&field_id)
|
||||
.with_context(|| format!("field {} is not faceted", field_name))?;
|
||||
|
||||
let stdout = io::stdout();
|
||||
let mut wtr = csv::Writer::from_writer(stdout.lock());
|
||||
wtr.write_record(&["facet_value", "facet_level", "documents_count", "documents_ids"])?;
|
||||
|
||||
let db = index.facet_field_id_value_docids;
|
||||
let iter = facet_values_iter(
|
||||
rtxn,
|
||||
db,
|
||||
field_id,
|
||||
*field_type,
|
||||
|key| (0, key.to_owned()),
|
||||
facet_number_value_to_string,
|
||||
facet_number_value_to_string,
|
||||
)?;
|
||||
|
||||
for result in iter {
|
||||
let ((level, value), docids) = result?;
|
||||
let count = docids.len();
|
||||
let docids = if debug {
|
||||
format!("{:?}", docids)
|
||||
} else {
|
||||
format!("{:?}", docids.iter().collect::<Vec<_>>())
|
||||
};
|
||||
wtr.write_record(&[value, level.to_string(), count.to_string(), docids])?;
|
||||
}
|
||||
|
||||
Ok(wtr.flush()?)
|
||||
}
|
||||
|
||||
fn facet_stats(index: &Index, rtxn: &heed::RoTxn, field_name: String) -> anyhow::Result<()> {
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
||||
let faceted_fields = index.faceted_fields_ids(&rtxn)?;
|
||||
|
||||
let field_id = fields_ids_map.id(&field_name)
|
||||
.with_context(|| format!("field {} not found", field_name))?;
|
||||
let field_type = faceted_fields.get(&field_id)
|
||||
.with_context(|| format!("field {} is not faceted", field_name))?;
|
||||
|
||||
let db = index.facet_field_id_value_docids;
|
||||
let iter = facet_values_iter(
|
||||
rtxn,
|
||||
db,
|
||||
field_id,
|
||||
*field_type,
|
||||
|_key| 0u8,
|
||||
|level, _left, _right| level,
|
||||
|level, _left, _right| level,
|
||||
)?;
|
||||
|
||||
println!("The database {:?} facet stats", field_name);
|
||||
|
||||
let mut level_size = 0;
|
||||
let mut current_level = None;
|
||||
for result in iter {
|
||||
let (level, _) = result?;
|
||||
if let Some(current) = current_level {
|
||||
if current != level {
|
||||
println!("\tnumber of groups at level {}: {}", current, level_size);
|
||||
level_size = 0;
|
||||
}
|
||||
}
|
||||
current_level = Some(level);
|
||||
level_size += 1;
|
||||
}
|
||||
|
||||
if let Some(current) = current_level {
|
||||
println!("\tnumber of groups at level {}: {}", current, level_size);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn export_words_fst(index: &Index, rtxn: &heed::RoTxn) -> anyhow::Result<()> {
|
||||
use std::io::Write as _;
|
||||
|
||||
let mut stdout = io::stdout();
|
||||
let words_fst = index.words_fst(rtxn)?;
|
||||
stdout.write_all(words_fst.as_fst().as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn export_documents(index: &Index, rtxn: &heed::RoTxn) -> anyhow::Result<()> {
|
||||
use std::io::{BufWriter, Write as _};
|
||||
use crate::obkv_to_json;
|
||||
|
||||
let stdout = io::stdout();
|
||||
let mut out = BufWriter::new(stdout);
|
||||
|
||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
let displayed_fields: Vec<_> = fields_ids_map.iter().map(|(id, _name)| id).collect();
|
||||
|
||||
for result in index.documents.iter(rtxn)? {
|
||||
let (_id, obkv) = result?;
|
||||
let document = obkv_to_json(&displayed_fields, &fields_ids_map, obkv)?;
|
||||
serde_json::to_writer(&mut out, &document)?;
|
||||
writeln!(&mut out)?;
|
||||
}
|
||||
|
||||
out.into_inner()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn total_docid_word_positions_size(index: &Index, rtxn: &heed::RoTxn) -> anyhow::Result<()> {
|
||||
use heed::types::ByteSlice;
|
||||
|
||||
let mut total_key_size = 0;
|
||||
let mut total_val_size = 0;
|
||||
let mut count = 0;
|
||||
|
||||
let iter = index.docid_word_positions.as_polymorph().iter::<_, ByteSlice, ByteSlice>(rtxn)?;
|
||||
for result in iter {
|
||||
let (key, val) = result?;
|
||||
total_key_size += key.len();
|
||||
total_val_size += val.len();
|
||||
count += 1;
|
||||
}
|
||||
|
||||
println!("number of keys: {}", count);
|
||||
println!("total key size: {}", total_key_size);
|
||||
println!("total value size: {}", total_val_size);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn average_number_of_words_by_doc(index: &Index, rtxn: &heed::RoTxn) -> anyhow::Result<()> {
|
||||
use heed::types::DecodeIgnore;
|
||||
use crate::{DocumentId, BEU32StrCodec};
|
||||
|
||||
let mut words_counts = Vec::new();
|
||||
let mut count = 0;
|
||||
let mut prev = None as Option<(DocumentId, u32)>;
|
||||
|
||||
let iter = index.docid_word_positions.as_polymorph().iter::<_, BEU32StrCodec, DecodeIgnore>(rtxn)?;
|
||||
for result in iter {
|
||||
let ((docid, _word), ()) = result?;
|
||||
|
||||
match prev.as_mut() {
|
||||
Some((prev_docid, prev_count)) if docid == *prev_docid => {
|
||||
*prev_count += 1;
|
||||
},
|
||||
Some((prev_docid, prev_count)) => {
|
||||
words_counts.push(*prev_count);
|
||||
*prev_docid = docid;
|
||||
*prev_count = 0;
|
||||
count += 1;
|
||||
},
|
||||
None => prev = Some((docid, 1)),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((_, prev_count)) = prev.take() {
|
||||
words_counts.push(prev_count);
|
||||
count += 1;
|
||||
}
|
||||
|
||||
let words_count = words_counts.into_iter().map(|c| c as usize).sum::<usize>() as f64;
|
||||
let count = count as f64;
|
||||
|
||||
println!("average number of different words by document: {}", words_count / count);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn average_number_of_positions_by_word(index: &Index, rtxn: &heed::RoTxn) -> anyhow::Result<()> {
|
||||
use heed::types::DecodeIgnore;
|
||||
use crate::BoRoaringBitmapCodec;
|
||||
|
||||
let mut values_length = Vec::new();
|
||||
let mut count = 0;
|
||||
|
||||
let db = index.docid_word_positions.as_polymorph();
|
||||
for result in db.iter::<_, DecodeIgnore, BoRoaringBitmapCodec>(rtxn)? {
|
||||
let ((), val) = result?;
|
||||
values_length.push(val.len() as u32);
|
||||
count += 1;
|
||||
}
|
||||
|
||||
let values_length_sum = values_length.into_iter().map(|c| c as usize).sum::<usize>() as f64;
|
||||
let count = count as f64;
|
||||
|
||||
println!("average number of positions by word: {}", values_length_sum / count);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn size_of_database(index: &Index, rtxn: &heed::RoTxn, name: &str) -> anyhow::Result<()> {
|
||||
use heed::types::ByteSlice;
|
||||
|
||||
let database = match name {
|
||||
MAIN_DB_NAME => &index.main,
|
||||
WORD_DOCIDS_DB_NAME => index.word_docids.as_polymorph(),
|
||||
DOCID_WORD_POSITIONS_DB_NAME => index.docid_word_positions.as_polymorph(),
|
||||
WORD_PAIR_PROXIMITY_DOCIDS_DB_NAME => index.word_pair_proximity_docids.as_polymorph(),
|
||||
DOCUMENTS_DB_NAME => index.documents.as_polymorph(),
|
||||
unknown => anyhow::bail!("unknown database {:?}", unknown),
|
||||
};
|
||||
|
||||
let mut key_size: u64 = 0;
|
||||
let mut val_size: u64 = 0;
|
||||
for result in database.iter::<_, ByteSlice, ByteSlice>(rtxn)? {
|
||||
let (k, v) = result?;
|
||||
key_size += k.len() as u64;
|
||||
val_size += v.len() as u64;
|
||||
}
|
||||
|
||||
println!("The {} database weigh:", name);
|
||||
println!("\ttotal key size: {} bytes", key_size);
|
||||
println!("\ttotal val size: {} bytes", val_size);
|
||||
println!("\ttotal size: {} bytes", key_size + val_size);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn database_stats(index: &Index, rtxn: &heed::RoTxn, name: &str) -> anyhow::Result<()> {
|
||||
use heed::types::ByteSlice;
|
||||
use heed::{Error, BytesDecode};
|
||||
use roaring::RoaringBitmap;
|
||||
use crate::{BoRoaringBitmapCodec, CboRoaringBitmapCodec, RoaringBitmapCodec};
|
||||
|
||||
fn compute_stats<'a, DC: BytesDecode<'a, DItem = RoaringBitmap>>(
|
||||
db: heed::PolyDatabase,
|
||||
rtxn: &'a heed::RoTxn,
|
||||
name: &str,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
let mut key_size = 0u64;
|
||||
let mut val_size = 0u64;
|
||||
let mut values_length = Vec::new();
|
||||
|
||||
for result in db.iter::<_, ByteSlice, ByteSlice>(rtxn)? {
|
||||
let (key, val) = result?;
|
||||
key_size += key.len() as u64;
|
||||
val_size += val.len() as u64;
|
||||
let val = DC::bytes_decode(val).ok_or(Error::Decoding)?;
|
||||
values_length.push(val.len() as u32);
|
||||
}
|
||||
|
||||
values_length.sort_unstable();
|
||||
|
||||
let median = values_length.len() / 2;
|
||||
let quartile = values_length.len() / 4;
|
||||
let percentile = values_length.len() / 100;
|
||||
|
||||
let twenty_five_percentile = values_length.get(quartile).unwrap_or(&0);
|
||||
let fifty_percentile = values_length.get(median).unwrap_or(&0);
|
||||
let seventy_five_percentile = values_length.get(quartile * 3).unwrap_or(&0);
|
||||
let ninety_percentile = values_length.get(percentile * 90).unwrap_or(&0);
|
||||
let ninety_five_percentile = values_length.get(percentile * 95).unwrap_or(&0);
|
||||
let ninety_nine_percentile = values_length.get(percentile * 99).unwrap_or(&0);
|
||||
let minimum = values_length.first().unwrap_or(&0);
|
||||
let maximum = values_length.last().unwrap_or(&0);
|
||||
let count = values_length.len();
|
||||
let sum = values_length.iter().map(|l| *l as u64).sum::<u64>();
|
||||
|
||||
println!("The {} database stats on the lengths", name);
|
||||
println!("\tnumber of proximity pairs: {}", count);
|
||||
println!("\t25th percentile (first quartile): {}", twenty_five_percentile);
|
||||
println!("\t50th percentile (median): {}", fifty_percentile);
|
||||
println!("\t75th percentile (third quartile): {}", seventy_five_percentile);
|
||||
println!("\t90th percentile: {}", ninety_percentile);
|
||||
println!("\t95th percentile: {}", ninety_five_percentile);
|
||||
println!("\t99th percentile: {}", ninety_nine_percentile);
|
||||
println!("\tminimum: {}", minimum);
|
||||
println!("\tmaximum: {}", maximum);
|
||||
println!("\taverage: {}", sum as f64 / count as f64);
|
||||
println!("\ttotal key size: {} bytes", key_size);
|
||||
println!("\ttotal val size: {} bytes", val_size);
|
||||
println!("\ttotal size: {} bytes", key_size + val_size);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
match name {
|
||||
WORD_DOCIDS_DB_NAME => {
|
||||
let db = index.word_docids.as_polymorph();
|
||||
compute_stats::<RoaringBitmapCodec>(*db, rtxn, name)
|
||||
},
|
||||
DOCID_WORD_POSITIONS_DB_NAME => {
|
||||
let db = index.docid_word_positions.as_polymorph();
|
||||
compute_stats::<BoRoaringBitmapCodec>(*db, rtxn, name)
|
||||
},
|
||||
WORD_PAIR_PROXIMITY_DOCIDS_DB_NAME => {
|
||||
let db = index.word_pair_proximity_docids.as_polymorph();
|
||||
compute_stats::<CboRoaringBitmapCodec>(*db, rtxn, name)
|
||||
},
|
||||
unknown => anyhow::bail!("unknown database {:?}", unknown),
|
||||
}
|
||||
}
|
||||
|
||||
fn word_pair_proximities_docids(
|
||||
index: &Index,
|
||||
rtxn: &heed::RoTxn,
|
||||
debug: bool,
|
||||
word1: String,
|
||||
word2: String,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
use heed::types::ByteSlice;
|
||||
use crate::RoaringBitmapCodec;
|
||||
|
||||
let stdout = io::stdout();
|
||||
let mut wtr = csv::Writer::from_writer(stdout.lock());
|
||||
wtr.write_record(&["word1", "word2", "proximity", "documents_ids"])?;
|
||||
|
||||
// Create the prefix key with only the pair of words.
|
||||
let mut prefix = Vec::with_capacity(word1.len() + word2.len() + 1);
|
||||
prefix.extend_from_slice(word1.as_bytes());
|
||||
prefix.push(0);
|
||||
prefix.extend_from_slice(word2.as_bytes());
|
||||
|
||||
let db = index.word_pair_proximity_docids.as_polymorph();
|
||||
let iter = db.prefix_iter::<_, ByteSlice, RoaringBitmapCodec>(rtxn, &prefix)?;
|
||||
for result in iter {
|
||||
let (key, docids) = result?;
|
||||
|
||||
// Skip keys that are longer than the requested one,
|
||||
// a longer key means that the second word is a prefix of the request word.
|
||||
if key.len() != prefix.len() + 1 { continue; }
|
||||
|
||||
let proximity = key.last().unwrap();
|
||||
let docids = if debug {
|
||||
format!("{:?}", docids)
|
||||
} else {
|
||||
format!("{:?}", docids.iter().collect::<Vec<_>>())
|
||||
};
|
||||
wtr.write_record(&[&word1, &word2, &proximity.to_string(), &docids])?;
|
||||
}
|
||||
|
||||
Ok(wtr.flush()?)
|
||||
}
|
2
milli/src/subcommand/mod.rs
Normal file
2
milli/src/subcommand/mod.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
pub mod infos;
|
||||
pub mod search;
|
88
milli/src/subcommand/search.rs
Normal file
88
milli/src/subcommand/search.rs
Normal file
|
@ -0,0 +1,88 @@
|
|||
use std::io::{self, BufRead, Write};
|
||||
use std::iter::once;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Instant;
|
||||
|
||||
use byte_unit::Byte;
|
||||
use heed::EnvOpenOptions;
|
||||
use log::debug;
|
||||
use structopt::StructOpt;
|
||||
|
||||
use crate::{Index, obkv_to_json};
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
/// A simple search helper binary for the milli project.
|
||||
pub struct Opt {
|
||||
/// The database path where the database is located.
|
||||
/// It is created if it doesn't already exist.
|
||||
#[structopt(long = "db", parse(from_os_str))]
|
||||
database: PathBuf,
|
||||
|
||||
/// The maximum size the database can take on disk. It is recommended to specify
|
||||
/// the whole disk space (value must be a multiple of a page size).
|
||||
#[structopt(long = "db-size", default_value = "100 GiB")]
|
||||
database_size: Byte,
|
||||
|
||||
/// Verbose mode (-v, -vv, -vvv, etc.)
|
||||
#[structopt(short, long, parse(from_occurrences))]
|
||||
verbose: usize,
|
||||
|
||||
/// The query string to search for (doesn't support prefix search yet).
|
||||
query: Option<String>,
|
||||
|
||||
/// Compute and print the facet distribution of all the faceted fields.
|
||||
#[structopt(long)]
|
||||
print_facet_distribution: bool,
|
||||
}
|
||||
|
||||
pub fn run(opt: Opt) -> anyhow::Result<()> {
|
||||
stderrlog::new()
|
||||
.verbosity(opt.verbose)
|
||||
.show_level(false)
|
||||
.timestamp(stderrlog::Timestamp::Off)
|
||||
.init()?;
|
||||
|
||||
std::fs::create_dir_all(&opt.database)?;
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(opt.database_size.get_bytes() as usize);
|
||||
|
||||
// Open the LMDB database.
|
||||
let index = Index::new(options, &opt.database)?;
|
||||
let rtxn = index.read_txn()?;
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
||||
let displayed_fields = match index.displayed_fields_ids(&rtxn)? {
|
||||
Some(fields) => fields,
|
||||
None => fields_ids_map.iter().map(|(id, _)| id).collect(),
|
||||
};
|
||||
|
||||
let stdin = io::stdin();
|
||||
let lines = match opt.query {
|
||||
Some(query) => Box::new(once(Ok(query))),
|
||||
None => Box::new(stdin.lock().lines()) as Box<dyn Iterator<Item = _>>,
|
||||
};
|
||||
|
||||
let mut stdout = io::stdout();
|
||||
for result in lines {
|
||||
let before = Instant::now();
|
||||
|
||||
let query = result?;
|
||||
let result = index.search(&rtxn).query(query).execute()?;
|
||||
let documents = index.documents(&rtxn, result.documents_ids.iter().cloned())?;
|
||||
|
||||
for (_id, record) in documents {
|
||||
let val = obkv_to_json(&displayed_fields, &fields_ids_map, record)?;
|
||||
serde_json::to_writer(&mut stdout, &val)?;
|
||||
let _ = writeln!(&mut stdout);
|
||||
}
|
||||
|
||||
if opt.print_facet_distribution {
|
||||
let facets = index.facets_distribution(&rtxn).candidates(result.candidates).execute()?;
|
||||
serde_json::to_writer(&mut stdout, &facets)?;
|
||||
let _ = writeln!(&mut stdout);
|
||||
}
|
||||
|
||||
debug!("Took {:.02?} to find {} documents", before.elapsed(), result.documents_ids.len());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
67
milli/src/update/available_documents_ids.rs
Normal file
67
milli/src/update/available_documents_ids.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
use std::iter::{Chain, FromIterator};
|
||||
use std::ops::RangeInclusive;
|
||||
use roaring::bitmap::{RoaringBitmap, IntoIter};
|
||||
|
||||
pub struct AvailableDocumentsIds {
|
||||
iter: Chain<IntoIter, RangeInclusive<u32>>,
|
||||
}
|
||||
|
||||
impl AvailableDocumentsIds {
|
||||
pub fn from_documents_ids(docids: &RoaringBitmap) -> AvailableDocumentsIds {
|
||||
match docids.max() {
|
||||
Some(last_id) => {
|
||||
let mut available = RoaringBitmap::from_iter(0..last_id);
|
||||
available.difference_with(&docids);
|
||||
|
||||
let iter = match last_id.checked_add(1) {
|
||||
Some(id) => id..=u32::max_value(),
|
||||
None => 1..=0, // empty range iterator
|
||||
};
|
||||
|
||||
AvailableDocumentsIds {
|
||||
iter: available.into_iter().chain(iter),
|
||||
}
|
||||
},
|
||||
None => {
|
||||
let empty = RoaringBitmap::new().into_iter();
|
||||
AvailableDocumentsIds {
|
||||
iter: empty.chain(0..=u32::max_value()),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for AvailableDocumentsIds {
|
||||
type Item = u32;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let base = RoaringBitmap::new();
|
||||
let left = AvailableDocumentsIds::from_documents_ids(&base);
|
||||
let right = 0..=u32::max_value();
|
||||
left.zip(right).take(500).for_each(|(l, r)| assert_eq!(l, r));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scattered() {
|
||||
let mut base = RoaringBitmap::new();
|
||||
base.insert(0);
|
||||
base.insert(10);
|
||||
base.insert(100);
|
||||
base.insert(405);
|
||||
|
||||
let left = AvailableDocumentsIds::from_documents_ids(&base);
|
||||
let right = (0..=u32::max_value()).filter(|&n| n != 0 && n != 10 && n != 100 && n != 405);
|
||||
left.zip(right).take(500).for_each(|(l, r)| assert_eq!(l, r));
|
||||
}
|
||||
}
|
56
milli/src/update/clear_documents.rs
Normal file
56
milli/src/update/clear_documents.rs
Normal file
|
@ -0,0 +1,56 @@
|
|||
use roaring::RoaringBitmap;
|
||||
use crate::{ExternalDocumentsIds, Index};
|
||||
|
||||
pub struct ClearDocuments<'t, 'u, 'i> {
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
_update_id: u64,
|
||||
}
|
||||
|
||||
impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
|
||||
pub fn new(
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
update_id: u64
|
||||
) -> ClearDocuments<'t, 'u, 'i> {
|
||||
|
||||
ClearDocuments { wtxn, index, _update_id: update_id }
|
||||
}
|
||||
|
||||
pub fn execute(self) -> anyhow::Result<usize> {
|
||||
let Index {
|
||||
env: _env,
|
||||
main: _main,
|
||||
word_docids,
|
||||
docid_word_positions,
|
||||
word_pair_proximity_docids,
|
||||
facet_field_id_value_docids,
|
||||
field_id_docid_facet_values,
|
||||
documents,
|
||||
} = self.index;
|
||||
|
||||
// We retrieve the number of documents ids that we are deleting.
|
||||
let number_of_documents = self.index.number_of_documents(self.wtxn)?;
|
||||
let faceted_fields = self.index.faceted_fields_ids(self.wtxn)?;
|
||||
|
||||
// We clean some of the main engine datastructures.
|
||||
self.index.put_words_fst(self.wtxn, &fst::Set::default())?;
|
||||
self.index.put_external_documents_ids(self.wtxn, &ExternalDocumentsIds::default())?;
|
||||
self.index.put_documents_ids(self.wtxn, &RoaringBitmap::default())?;
|
||||
|
||||
// We clean all the faceted documents ids.
|
||||
for (field_id, _) in faceted_fields {
|
||||
self.index.put_faceted_documents_ids(self.wtxn, field_id, &RoaringBitmap::default())?;
|
||||
}
|
||||
|
||||
// Clear the other databases.
|
||||
word_docids.clear(self.wtxn)?;
|
||||
docid_word_positions.clear(self.wtxn)?;
|
||||
word_pair_proximity_docids.clear(self.wtxn)?;
|
||||
facet_field_id_value_docids.clear(self.wtxn)?;
|
||||
field_id_docid_facet_values.clear(self.wtxn)?;
|
||||
documents.clear(self.wtxn)?;
|
||||
|
||||
Ok(number_of_documents)
|
||||
}
|
||||
}
|
250
milli/src/update/delete_documents.rs
Normal file
250
milli/src/update/delete_documents.rs
Normal file
|
@ -0,0 +1,250 @@
|
|||
use fst::IntoStreamer;
|
||||
use heed::types::ByteSlice;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::facet::FacetType;
|
||||
use crate::{Index, BEU32, SmallString32, ExternalDocumentsIds};
|
||||
use crate::heed_codec::facet::{FieldDocIdFacetStringCodec, FieldDocIdFacetF64Codec, FieldDocIdFacetI64Codec};
|
||||
use super::ClearDocuments;
|
||||
|
||||
pub struct DeleteDocuments<'t, 'u, 'i> {
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
external_documents_ids: ExternalDocumentsIds<'static>,
|
||||
documents_ids: RoaringBitmap,
|
||||
update_id: u64,
|
||||
}
|
||||
|
||||
impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
||||
pub fn new(
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
update_id: u64,
|
||||
) -> anyhow::Result<DeleteDocuments<'t, 'u, 'i>>
|
||||
{
|
||||
let external_documents_ids = index
|
||||
.external_documents_ids(wtxn)?
|
||||
.into_static();
|
||||
|
||||
Ok(DeleteDocuments {
|
||||
wtxn,
|
||||
index,
|
||||
external_documents_ids,
|
||||
documents_ids: RoaringBitmap::new(),
|
||||
update_id,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_document(&mut self, docid: u32) {
|
||||
self.documents_ids.insert(docid);
|
||||
}
|
||||
|
||||
pub fn delete_documents(&mut self, docids: &RoaringBitmap) {
|
||||
self.documents_ids.union_with(docids);
|
||||
}
|
||||
|
||||
pub fn delete_external_id(&mut self, external_id: &str) -> Option<u32> {
|
||||
let docid = self.external_documents_ids.get(external_id)?;
|
||||
self.delete_document(docid);
|
||||
Some(docid)
|
||||
}
|
||||
|
||||
pub fn execute(self) -> anyhow::Result<usize> {
|
||||
// We retrieve the current documents ids that are in the database.
|
||||
let mut documents_ids = self.index.documents_ids(self.wtxn)?;
|
||||
|
||||
// We can and must stop removing documents in a database that is empty.
|
||||
if documents_ids.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
// We remove the documents ids that we want to delete
|
||||
// from the documents in the database and write them back.
|
||||
let current_documents_ids_len = documents_ids.len();
|
||||
documents_ids.difference_with(&self.documents_ids);
|
||||
self.index.put_documents_ids(self.wtxn, &documents_ids)?;
|
||||
|
||||
// We can execute a ClearDocuments operation when the number of documents
|
||||
// to delete is exactly the number of documents in the database.
|
||||
if current_documents_ids_len == self.documents_ids.len() {
|
||||
return ClearDocuments::new(self.wtxn, self.index, self.update_id).execute();
|
||||
}
|
||||
|
||||
let fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
||||
let id_field = fields_ids_map.id("id").expect(r#"the field "id" to be present"#);
|
||||
|
||||
let Index {
|
||||
env: _env,
|
||||
main: _main,
|
||||
word_docids,
|
||||
docid_word_positions,
|
||||
word_pair_proximity_docids,
|
||||
facet_field_id_value_docids,
|
||||
field_id_docid_facet_values,
|
||||
documents,
|
||||
} = self.index;
|
||||
|
||||
// Retrieve the words and the external documents ids contained in the documents.
|
||||
let mut words = Vec::new();
|
||||
let mut external_ids = Vec::new();
|
||||
for docid in &self.documents_ids {
|
||||
// We create an iterator to be able to get the content and delete the document
|
||||
// content itself. It's faster to acquire a cursor to get and delete,
|
||||
// as we avoid traversing the LMDB B-Tree two times but only once.
|
||||
let key = BEU32::new(docid);
|
||||
let mut iter = documents.range_mut(self.wtxn, &(key..=key))?;
|
||||
if let Some((_key, obkv)) = iter.next().transpose()? {
|
||||
if let Some(content) = obkv.get(id_field) {
|
||||
let external_id: SmallString32 = serde_json::from_slice(content).unwrap();
|
||||
external_ids.push(external_id);
|
||||
}
|
||||
iter.del_current()?;
|
||||
}
|
||||
drop(iter);
|
||||
|
||||
// We iterate througt the words positions of the document id,
|
||||
// retrieve the word and delete the positions.
|
||||
let mut iter = docid_word_positions.prefix_iter_mut(self.wtxn, &(docid, ""))?;
|
||||
while let Some(result) = iter.next() {
|
||||
let ((_docid, word), _positions) = result?;
|
||||
// This boolean will indicate if we must remove this word from the words FST.
|
||||
words.push((SmallString32::from(word), false));
|
||||
iter.del_current()?;
|
||||
}
|
||||
}
|
||||
|
||||
// We create the FST map of the external ids that we must delete.
|
||||
external_ids.sort_unstable();
|
||||
let external_ids_to_delete = fst::Set::from_iter(external_ids.iter().map(AsRef::as_ref))?;
|
||||
|
||||
// We acquire the current external documents ids map...
|
||||
let mut new_external_documents_ids = self.index.external_documents_ids(self.wtxn)?;
|
||||
// ...and remove the to-delete external ids.
|
||||
new_external_documents_ids.delete_ids(external_ids_to_delete)?;
|
||||
|
||||
// We write the new external ids into the main database.
|
||||
let new_external_documents_ids = new_external_documents_ids.into_static();
|
||||
self.index.put_external_documents_ids(self.wtxn, &new_external_documents_ids)?;
|
||||
|
||||
// Maybe we can improve the get performance of the words
|
||||
// if we sort the words first, keeping the LMDB pages in cache.
|
||||
words.sort_unstable();
|
||||
|
||||
// We iterate over the words and delete the documents ids
|
||||
// from the word docids database.
|
||||
for (word, must_remove) in &mut words {
|
||||
// We create an iterator to be able to get the content and delete the word docids.
|
||||
// It's faster to acquire a cursor to get and delete or put, as we avoid traversing
|
||||
// the LMDB B-Tree two times but only once.
|
||||
let mut iter = word_docids.prefix_iter_mut(self.wtxn, &word)?;
|
||||
if let Some((key, mut docids)) = iter.next().transpose()? {
|
||||
if key == word.as_ref() {
|
||||
let previous_len = docids.len();
|
||||
docids.difference_with(&self.documents_ids);
|
||||
if docids.is_empty() {
|
||||
iter.del_current()?;
|
||||
*must_remove = true;
|
||||
} else if docids.len() != previous_len {
|
||||
iter.put_current(key, &docids)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We construct an FST set that contains the words to delete from the words FST.
|
||||
let words_to_delete = words.iter().filter_map(|(word, must_remove)| {
|
||||
if *must_remove { Some(word.as_ref()) } else { None }
|
||||
});
|
||||
let words_to_delete = fst::Set::from_iter(words_to_delete)?;
|
||||
|
||||
let new_words_fst = {
|
||||
// We retrieve the current words FST from the database.
|
||||
let words_fst = self.index.words_fst(self.wtxn)?;
|
||||
let difference = words_fst.op().add(&words_to_delete).difference();
|
||||
|
||||
// We stream the new external ids that does no more contains the to-delete external ids.
|
||||
let mut new_words_fst_builder = fst::SetBuilder::memory();
|
||||
new_words_fst_builder.extend_stream(difference.into_stream())?;
|
||||
|
||||
// We create an words FST set from the above builder.
|
||||
new_words_fst_builder.into_set()
|
||||
};
|
||||
|
||||
// We write the new words FST into the main database.
|
||||
self.index.put_words_fst(self.wtxn, &new_words_fst)?;
|
||||
|
||||
// We delete the documents ids that are under the pairs of words,
|
||||
// it is faster and use no memory to iterate over all the words pairs than
|
||||
// to compute the cartesian product of every words of the deleted documents.
|
||||
let mut iter = word_pair_proximity_docids.remap_key_type::<ByteSlice>().iter_mut(self.wtxn)?;
|
||||
while let Some(result) = iter.next() {
|
||||
let (bytes, mut docids) = result?;
|
||||
let previous_len = docids.len();
|
||||
docids.difference_with(&self.documents_ids);
|
||||
if docids.is_empty() {
|
||||
iter.del_current()?;
|
||||
} else if docids.len() != previous_len {
|
||||
iter.put_current(bytes, &docids)?;
|
||||
}
|
||||
}
|
||||
|
||||
drop(iter);
|
||||
|
||||
// Remove the documents ids from the faceted documents ids.
|
||||
let faceted_fields = self.index.faceted_fields_ids(self.wtxn)?;
|
||||
for (field_id, facet_type) in faceted_fields {
|
||||
let mut docids = self.index.faceted_documents_ids(self.wtxn, field_id)?;
|
||||
docids.difference_with(&self.documents_ids);
|
||||
self.index.put_faceted_documents_ids(self.wtxn, field_id, &docids)?;
|
||||
|
||||
// We delete the entries that are part of the documents ids.
|
||||
let iter = field_id_docid_facet_values.prefix_iter_mut(self.wtxn, &[field_id])?;
|
||||
match facet_type {
|
||||
FacetType::String => {
|
||||
let mut iter = iter.remap_key_type::<FieldDocIdFacetStringCodec>();
|
||||
while let Some(result) = iter.next() {
|
||||
let ((_fid, docid, _value), ()) = result?;
|
||||
if self.documents_ids.contains(docid) {
|
||||
iter.del_current()?;
|
||||
}
|
||||
}
|
||||
},
|
||||
FacetType::Float => {
|
||||
let mut iter = iter.remap_key_type::<FieldDocIdFacetF64Codec>();
|
||||
while let Some(result) = iter.next() {
|
||||
let ((_fid, docid, _value), ()) = result?;
|
||||
if self.documents_ids.contains(docid) {
|
||||
iter.del_current()?;
|
||||
}
|
||||
}
|
||||
},
|
||||
FacetType::Integer => {
|
||||
let mut iter = iter.remap_key_type::<FieldDocIdFacetI64Codec>();
|
||||
while let Some(result) = iter.next() {
|
||||
let ((_fid, docid, _value), ()) = result?;
|
||||
if self.documents_ids.contains(docid) {
|
||||
iter.del_current()?;
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// We delete the documents ids that are under the facet field id values.
|
||||
let mut iter = facet_field_id_value_docids.iter_mut(self.wtxn)?;
|
||||
while let Some(result) = iter.next() {
|
||||
let (bytes, mut docids) = result?;
|
||||
let previous_len = docids.len();
|
||||
docids.difference_with(&self.documents_ids);
|
||||
if docids.is_empty() {
|
||||
iter.del_current()?;
|
||||
} else if docids.len() != previous_len {
|
||||
iter.put_current(bytes, &docids)?;
|
||||
}
|
||||
}
|
||||
|
||||
drop(iter);
|
||||
|
||||
Ok(self.documents_ids.len() as usize)
|
||||
}
|
||||
}
|
262
milli/src/update/facets.rs
Normal file
262
milli/src/update/facets.rs
Normal file
|
@ -0,0 +1,262 @@
|
|||
use std::cmp;
|
||||
use std::fs::File;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use grenad::{CompressionType, Reader, Writer, FileFuse};
|
||||
use heed::types::{ByteSlice, DecodeIgnore};
|
||||
use heed::{BytesEncode, Error};
|
||||
use log::debug;
|
||||
use num_traits::{Bounded, Zero};
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::facet::FacetType;
|
||||
use crate::heed_codec::CboRoaringBitmapCodec;
|
||||
use crate::heed_codec::facet::{FacetLevelValueI64Codec, FacetLevelValueF64Codec};
|
||||
use crate::Index;
|
||||
use crate::update::index_documents::WriteMethod;
|
||||
use crate::update::index_documents::{create_writer, writer_into_reader, write_into_lmdb_database};
|
||||
|
||||
pub struct Facets<'t, 'u, 'i> {
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
pub(crate) chunk_compression_type: CompressionType,
|
||||
pub(crate) chunk_compression_level: Option<u32>,
|
||||
pub(crate) chunk_fusing_shrink_size: Option<u64>,
|
||||
level_group_size: NonZeroUsize,
|
||||
min_level_size: NonZeroUsize,
|
||||
_update_id: u64,
|
||||
}
|
||||
|
||||
impl<'t, 'u, 'i> Facets<'t, 'u, 'i> {
|
||||
pub fn new(
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
update_id: u64,
|
||||
) -> Facets<'t, 'u, 'i> {
|
||||
Facets {
|
||||
wtxn,
|
||||
index,
|
||||
chunk_compression_type: CompressionType::None,
|
||||
chunk_compression_level: None,
|
||||
chunk_fusing_shrink_size: None,
|
||||
level_group_size: NonZeroUsize::new(4).unwrap(),
|
||||
min_level_size: NonZeroUsize::new(5).unwrap(),
|
||||
_update_id: update_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn level_group_size(&mut self, value: NonZeroUsize) -> &mut Self {
|
||||
self.level_group_size = NonZeroUsize::new(cmp::max(value.get(), 2)).unwrap();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn min_level_size(&mut self, value: NonZeroUsize) -> &mut Self {
|
||||
self.min_level_size = value;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn execute(self) -> anyhow::Result<()> {
|
||||
// We get the faceted fields to be able to create the facet levels.
|
||||
let faceted_fields = self.index.faceted_fields_ids(self.wtxn)?;
|
||||
|
||||
debug!("Computing and writing the facet values levels docids into LMDB on disk...");
|
||||
for (field_id, facet_type) in faceted_fields {
|
||||
let (content, documents_ids) = match facet_type {
|
||||
FacetType::Integer => {
|
||||
clear_field_levels::<i64, FacetLevelValueI64Codec>(
|
||||
self.wtxn,
|
||||
self.index.facet_field_id_value_docids,
|
||||
field_id,
|
||||
)?;
|
||||
|
||||
let documents_ids = compute_faceted_documents_ids(
|
||||
self.wtxn,
|
||||
self.index.facet_field_id_value_docids,
|
||||
field_id,
|
||||
)?;
|
||||
|
||||
let content = compute_facet_levels::<i64, FacetLevelValueI64Codec>(
|
||||
self.wtxn,
|
||||
self.index.facet_field_id_value_docids,
|
||||
self.chunk_compression_type,
|
||||
self.chunk_compression_level,
|
||||
self.chunk_fusing_shrink_size,
|
||||
self.level_group_size,
|
||||
self.min_level_size,
|
||||
field_id,
|
||||
)?;
|
||||
|
||||
(Some(content), documents_ids)
|
||||
},
|
||||
FacetType::Float => {
|
||||
clear_field_levels::<f64, FacetLevelValueF64Codec>(
|
||||
self.wtxn,
|
||||
self.index.facet_field_id_value_docids,
|
||||
field_id,
|
||||
)?;
|
||||
|
||||
let documents_ids = compute_faceted_documents_ids(
|
||||
self.wtxn,
|
||||
self.index.facet_field_id_value_docids,
|
||||
field_id,
|
||||
)?;
|
||||
|
||||
let content = compute_facet_levels::<f64, FacetLevelValueF64Codec>(
|
||||
self.wtxn,
|
||||
self.index.facet_field_id_value_docids,
|
||||
self.chunk_compression_type,
|
||||
self.chunk_compression_level,
|
||||
self.chunk_fusing_shrink_size,
|
||||
self.level_group_size,
|
||||
self.min_level_size,
|
||||
field_id,
|
||||
)?;
|
||||
|
||||
(Some(content), documents_ids)
|
||||
},
|
||||
FacetType::String => {
|
||||
let documents_ids = compute_faceted_documents_ids(
|
||||
self.wtxn,
|
||||
self.index.facet_field_id_value_docids,
|
||||
field_id,
|
||||
)?;
|
||||
|
||||
(None, documents_ids)
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(content) = content {
|
||||
write_into_lmdb_database(
|
||||
self.wtxn,
|
||||
*self.index.facet_field_id_value_docids.as_polymorph(),
|
||||
content,
|
||||
|_, _| anyhow::bail!("invalid facet level merging"),
|
||||
WriteMethod::GetMergePut,
|
||||
)?;
|
||||
}
|
||||
|
||||
self.index.put_faceted_documents_ids(self.wtxn, field_id, &documents_ids)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn clear_field_levels<'t, T: 't, KC>(
|
||||
wtxn: &'t mut heed::RwTxn,
|
||||
db: heed::Database<ByteSlice, CboRoaringBitmapCodec>,
|
||||
field_id: u8,
|
||||
) -> heed::Result<()>
|
||||
where
|
||||
T: Copy + Bounded,
|
||||
KC: heed::BytesDecode<'t, DItem = (u8, u8, T, T)>,
|
||||
KC: for<'x> heed::BytesEncode<'x, EItem = (u8, u8, T, T)>,
|
||||
{
|
||||
let left = (field_id, 1, T::min_value(), T::min_value());
|
||||
let right = (field_id, u8::MAX, T::max_value(), T::max_value());
|
||||
let range = left..=right;
|
||||
db.remap_key_type::<KC>().delete_range(wtxn, &range).map(drop)
|
||||
}
|
||||
|
||||
fn compute_facet_levels<'t, T: 't, KC>(
|
||||
rtxn: &'t heed::RoTxn,
|
||||
db: heed::Database<ByteSlice, CboRoaringBitmapCodec>,
|
||||
compression_type: CompressionType,
|
||||
compression_level: Option<u32>,
|
||||
shrink_size: Option<u64>,
|
||||
level_group_size: NonZeroUsize,
|
||||
min_level_size: NonZeroUsize,
|
||||
field_id: u8,
|
||||
) -> anyhow::Result<Reader<FileFuse>>
|
||||
where
|
||||
T: Copy + PartialEq + PartialOrd + Bounded + Zero,
|
||||
KC: heed::BytesDecode<'t, DItem = (u8, u8, T, T)>,
|
||||
KC: for<'x> heed::BytesEncode<'x, EItem = (u8, u8, T, T)>,
|
||||
{
|
||||
let first_level_size = db.prefix_iter(rtxn, &[field_id])?
|
||||
.remap_types::<DecodeIgnore, DecodeIgnore>()
|
||||
.fold(Ok(0usize), |count, result| result.and(count).map(|c| c + 1))?;
|
||||
|
||||
// It is forbidden to keep a cursor and write in a database at the same time with LMDB
|
||||
// therefore we write the facet levels entries into a grenad file before transfering them.
|
||||
let mut writer = tempfile::tempfile().and_then(|file| {
|
||||
create_writer(compression_type, compression_level, file)
|
||||
})?;
|
||||
|
||||
let level_0_range = {
|
||||
let left = (field_id, 0, T::min_value(), T::min_value());
|
||||
let right = (field_id, 0, T::max_value(), T::max_value());
|
||||
left..=right
|
||||
};
|
||||
|
||||
// Groups sizes are always a power of the original level_group_size and therefore a group
|
||||
// always maps groups of the previous level and never splits previous levels groups in half.
|
||||
let group_size_iter = (1u8..)
|
||||
.map(|l| (l, level_group_size.get().pow(l as u32)))
|
||||
.take_while(|(_, s)| first_level_size / *s >= min_level_size.get());
|
||||
|
||||
for (level, group_size) in group_size_iter {
|
||||
let mut left = T::zero();
|
||||
let mut right = T::zero();
|
||||
let mut group_docids = RoaringBitmap::new();
|
||||
|
||||
let db = db.remap_key_type::<KC>();
|
||||
for (i, result) in db.range(rtxn, &level_0_range)?.enumerate() {
|
||||
let ((_field_id, _level, value, _right), docids) = result?;
|
||||
|
||||
if i == 0 {
|
||||
left = value;
|
||||
} else if i % group_size == 0 {
|
||||
// we found the first bound of the next group, we must store the left
|
||||
// and right bounds associated with the docids.
|
||||
write_entry::<T, KC>(&mut writer, field_id, level, left, right, &group_docids)?;
|
||||
|
||||
// We save the left bound for the new group and also reset the docids.
|
||||
group_docids = RoaringBitmap::new();
|
||||
left = value;
|
||||
}
|
||||
|
||||
// The right bound is always the bound we run through.
|
||||
group_docids.union_with(&docids);
|
||||
right = value;
|
||||
}
|
||||
|
||||
if !group_docids.is_empty() {
|
||||
write_entry::<T, KC>(&mut writer, field_id, level, left, right, &group_docids)?;
|
||||
}
|
||||
}
|
||||
|
||||
writer_into_reader(writer, shrink_size)
|
||||
}
|
||||
|
||||
fn compute_faceted_documents_ids(
|
||||
rtxn: &heed::RoTxn,
|
||||
db: heed::Database<ByteSlice, CboRoaringBitmapCodec>,
|
||||
field_id: u8,
|
||||
) -> anyhow::Result<RoaringBitmap>
|
||||
{
|
||||
let mut documents_ids = RoaringBitmap::new();
|
||||
for result in db.prefix_iter(rtxn, &[field_id])? {
|
||||
let (_key, docids) = result?;
|
||||
documents_ids.union_with(&docids);
|
||||
}
|
||||
Ok(documents_ids)
|
||||
}
|
||||
|
||||
fn write_entry<T, KC>(
|
||||
writer: &mut Writer<File>,
|
||||
field_id: u8,
|
||||
level: u8,
|
||||
left: T,
|
||||
right: T,
|
||||
ids: &RoaringBitmap,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
KC: for<'x> heed::BytesEncode<'x, EItem = (u8, u8, T, T)>,
|
||||
{
|
||||
let key = (field_id, level, left, right);
|
||||
let key = KC::bytes_encode(&key).ok_or(Error::Encoding)?;
|
||||
let data = CboRoaringBitmapCodec::bytes_encode(&ids).ok_or(Error::Encoding)?;
|
||||
writer.insert(&key, &data)?;
|
||||
Ok(())
|
||||
}
|
105
milli/src/update/index_documents/merge_function.rs
Normal file
105
milli/src/update/index_documents/merge_function.rs
Normal file
|
@ -0,0 +1,105 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use anyhow::{bail, ensure, Context};
|
||||
use bstr::ByteSlice as _;
|
||||
use fst::IntoStreamer;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::heed_codec::CboRoaringBitmapCodec;
|
||||
|
||||
const WORDS_FST_KEY: &[u8] = crate::index::WORDS_FST_KEY.as_bytes();
|
||||
const FIELDS_IDS_MAP_KEY: &[u8] = crate::index::FIELDS_IDS_MAP_KEY.as_bytes();
|
||||
const DOCUMENTS_IDS_KEY: &[u8] = crate::index::DOCUMENTS_IDS_KEY.as_bytes();
|
||||
|
||||
pub fn main_merge(key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
match key {
|
||||
WORDS_FST_KEY => {
|
||||
let fsts: Vec<_> = values.iter().map(|v| fst::Set::new(v).unwrap()).collect();
|
||||
|
||||
// Union of the FSTs
|
||||
let mut op = fst::set::OpBuilder::new();
|
||||
fsts.iter().for_each(|fst| op.push(fst.into_stream()));
|
||||
let op = op.r#union();
|
||||
|
||||
let mut build = fst::SetBuilder::memory();
|
||||
build.extend_stream(op.into_stream()).unwrap();
|
||||
Ok(build.into_inner().unwrap())
|
||||
},
|
||||
FIELDS_IDS_MAP_KEY => {
|
||||
ensure!(values.windows(2).all(|vs| vs[0] == vs[1]), "fields ids map doesn't match");
|
||||
Ok(values[0].to_vec())
|
||||
},
|
||||
DOCUMENTS_IDS_KEY => roaring_bitmap_merge(values),
|
||||
otherwise => bail!("wut {:?}", otherwise),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn word_docids_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
roaring_bitmap_merge(values)
|
||||
}
|
||||
|
||||
pub fn docid_word_positions_merge(key: &[u8], _values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
bail!("merging docid word positions is an error ({:?})", key.as_bstr())
|
||||
}
|
||||
|
||||
pub fn field_id_docid_facet_values_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
let first = values.first().context("no value to merge")?;
|
||||
ensure!(values.iter().all(|v| v == first), "invalid field id docid facet value merging");
|
||||
Ok(first.to_vec())
|
||||
}
|
||||
|
||||
pub fn words_pairs_proximities_docids_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
cbo_roaring_bitmap_merge(values)
|
||||
}
|
||||
|
||||
pub fn facet_field_value_docids_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
cbo_roaring_bitmap_merge(values)
|
||||
}
|
||||
|
||||
pub fn documents_merge(key: &[u8], _values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
bail!("merging documents is an error ({:?})", key.as_bstr())
|
||||
}
|
||||
|
||||
pub fn merge_two_obkvs(base: obkv::KvReader, update: obkv::KvReader, buffer: &mut Vec<u8>) {
|
||||
use itertools::merge_join_by;
|
||||
use itertools::EitherOrBoth::{Both, Left, Right};
|
||||
|
||||
buffer.clear();
|
||||
|
||||
let mut writer = obkv::KvWriter::new(buffer);
|
||||
for eob in merge_join_by(base.iter(), update.iter(), |(b, _), (u, _)| b.cmp(u)) {
|
||||
match eob {
|
||||
Both(_, (k, v)) | Left((k, v)) | Right((k, v)) => writer.insert(k, v).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
writer.finish().unwrap();
|
||||
}
|
||||
|
||||
fn roaring_bitmap_merge(values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
let (head, tail) = values.split_first().unwrap();
|
||||
let mut head = RoaringBitmap::deserialize_from(&head[..])?;
|
||||
|
||||
for value in tail {
|
||||
let bitmap = RoaringBitmap::deserialize_from(&value[..])?;
|
||||
head.union_with(&bitmap);
|
||||
}
|
||||
|
||||
let mut vec = Vec::with_capacity(head.serialized_size());
|
||||
head.serialize_into(&mut vec)?;
|
||||
Ok(vec)
|
||||
}
|
||||
|
||||
fn cbo_roaring_bitmap_merge(values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
let (head, tail) = values.split_first().unwrap();
|
||||
let mut head = CboRoaringBitmapCodec::deserialize_from(&head[..])?;
|
||||
|
||||
for value in tail {
|
||||
let bitmap = CboRoaringBitmapCodec::deserialize_from(&value[..])?;
|
||||
head.union_with(&bitmap);
|
||||
}
|
||||
|
||||
let mut vec = Vec::new();
|
||||
CboRoaringBitmapCodec::serialize_into(&head, &mut vec)?;
|
||||
Ok(vec)
|
||||
}
|
1070
milli/src/update/index_documents/mod.rs
Normal file
1070
milli/src/update/index_documents/mod.rs
Normal file
File diff suppressed because it is too large
Load diff
690
milli/src/update/index_documents/store.rs
Normal file
690
milli/src/update/index_documents/store.rs
Normal file
|
@ -0,0 +1,690 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
use std::convert::{TryFrom, TryInto};
|
||||
use std::fs::File;
|
||||
use std::iter::FromIterator;
|
||||
use std::time::Instant;
|
||||
use std::{cmp, iter};
|
||||
|
||||
use anyhow::{bail, Context};
|
||||
use bstr::ByteSlice as _;
|
||||
use fst::Set;
|
||||
use grenad::{Reader, FileFuse, Writer, Sorter, CompressionType};
|
||||
use heed::BytesEncode;
|
||||
use linked_hash_map::LinkedHashMap;
|
||||
use log::{debug, info};
|
||||
use meilisearch_tokenizer::{Analyzer, AnalyzerConfig};
|
||||
use ordered_float::OrderedFloat;
|
||||
use roaring::RoaringBitmap;
|
||||
use serde_json::Value;
|
||||
use tempfile::tempfile;
|
||||
|
||||
use crate::facet::FacetType;
|
||||
use crate::heed_codec::facet::{FacetValueStringCodec, FacetLevelValueF64Codec, FacetLevelValueI64Codec};
|
||||
use crate::heed_codec::facet::{FieldDocIdFacetStringCodec, FieldDocIdFacetF64Codec, FieldDocIdFacetI64Codec};
|
||||
use crate::heed_codec::{BoRoaringBitmapCodec, CboRoaringBitmapCodec};
|
||||
use crate::update::UpdateIndexingStep;
|
||||
use crate::{json_to_string, SmallVec8, SmallVec32, SmallString32, Position, DocumentId, FieldId};
|
||||
|
||||
use super::{MergeFn, create_writer, create_sorter, writer_into_reader};
|
||||
use super::merge_function::{
|
||||
main_merge, word_docids_merge, words_pairs_proximities_docids_merge,
|
||||
facet_field_value_docids_merge, field_id_docid_facet_values_merge,
|
||||
};
|
||||
|
||||
const LMDB_MAX_KEY_LENGTH: usize = 511;
|
||||
const ONE_KILOBYTE: usize = 1024 * 1024;
|
||||
|
||||
const MAX_POSITION: usize = 1000;
|
||||
const WORDS_FST_KEY: &[u8] = crate::index::WORDS_FST_KEY.as_bytes();
|
||||
|
||||
pub struct Readers {
|
||||
pub main: Reader<FileFuse>,
|
||||
pub word_docids: Reader<FileFuse>,
|
||||
pub docid_word_positions: Reader<FileFuse>,
|
||||
pub words_pairs_proximities_docids: Reader<FileFuse>,
|
||||
pub facet_field_value_docids: Reader<FileFuse>,
|
||||
pub field_id_docid_facet_values: Reader<FileFuse>,
|
||||
pub documents: Reader<FileFuse>,
|
||||
}
|
||||
|
||||
pub struct Store<'s, A> {
|
||||
// Indexing parameters
|
||||
searchable_fields: HashSet<FieldId>,
|
||||
faceted_fields: HashMap<FieldId, FacetType>,
|
||||
// Caches
|
||||
word_docids: LinkedHashMap<SmallVec32<u8>, RoaringBitmap>,
|
||||
word_docids_limit: usize,
|
||||
words_pairs_proximities_docids: LinkedHashMap<(SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap>,
|
||||
words_pairs_proximities_docids_limit: usize,
|
||||
facet_field_value_docids: LinkedHashMap<(u8, FacetValue), RoaringBitmap>,
|
||||
facet_field_value_docids_limit: usize,
|
||||
// MTBL parameters
|
||||
chunk_compression_type: CompressionType,
|
||||
chunk_compression_level: Option<u32>,
|
||||
chunk_fusing_shrink_size: Option<u64>,
|
||||
// MTBL sorters
|
||||
main_sorter: Sorter<MergeFn>,
|
||||
word_docids_sorter: Sorter<MergeFn>,
|
||||
words_pairs_proximities_docids_sorter: Sorter<MergeFn>,
|
||||
facet_field_value_docids_sorter: Sorter<MergeFn>,
|
||||
field_id_docid_facet_values_sorter: Sorter<MergeFn>,
|
||||
// MTBL writers
|
||||
docid_word_positions_writer: Writer<File>,
|
||||
documents_writer: Writer<File>,
|
||||
// tokenizer
|
||||
analyzer: Analyzer<'s, A>,
|
||||
}
|
||||
|
||||
impl<'s, A: AsRef<[u8]>> Store<'s, A> {
|
||||
pub fn new(
|
||||
searchable_fields: HashSet<FieldId>,
|
||||
faceted_fields: HashMap<FieldId, FacetType>,
|
||||
linked_hash_map_size: Option<usize>,
|
||||
max_nb_chunks: Option<usize>,
|
||||
max_memory: Option<usize>,
|
||||
chunk_compression_type: CompressionType,
|
||||
chunk_compression_level: Option<u32>,
|
||||
chunk_fusing_shrink_size: Option<u64>,
|
||||
stop_words: &'s Set<A>,
|
||||
) -> anyhow::Result<Self>
|
||||
{
|
||||
// We divide the max memory by the number of sorter the Store have.
|
||||
let max_memory = max_memory.map(|mm| cmp::max(ONE_KILOBYTE, mm / 4));
|
||||
let linked_hash_map_size = linked_hash_map_size.unwrap_or(500);
|
||||
|
||||
let main_sorter = create_sorter(
|
||||
main_merge,
|
||||
chunk_compression_type,
|
||||
chunk_compression_level,
|
||||
chunk_fusing_shrink_size,
|
||||
max_nb_chunks,
|
||||
max_memory,
|
||||
);
|
||||
let word_docids_sorter = create_sorter(
|
||||
word_docids_merge,
|
||||
chunk_compression_type,
|
||||
chunk_compression_level,
|
||||
chunk_fusing_shrink_size,
|
||||
max_nb_chunks,
|
||||
max_memory,
|
||||
);
|
||||
let words_pairs_proximities_docids_sorter = create_sorter(
|
||||
words_pairs_proximities_docids_merge,
|
||||
chunk_compression_type,
|
||||
chunk_compression_level,
|
||||
chunk_fusing_shrink_size,
|
||||
max_nb_chunks,
|
||||
max_memory,
|
||||
);
|
||||
let facet_field_value_docids_sorter = create_sorter(
|
||||
facet_field_value_docids_merge,
|
||||
chunk_compression_type,
|
||||
chunk_compression_level,
|
||||
chunk_fusing_shrink_size,
|
||||
max_nb_chunks,
|
||||
max_memory,
|
||||
);
|
||||
let field_id_docid_facet_values_sorter = create_sorter(
|
||||
field_id_docid_facet_values_merge,
|
||||
chunk_compression_type,
|
||||
chunk_compression_level,
|
||||
chunk_fusing_shrink_size,
|
||||
max_nb_chunks,
|
||||
Some(1024 * 1024 * 1024), // 1MB
|
||||
);
|
||||
|
||||
let documents_writer = tempfile().and_then(|f| {
|
||||
create_writer(chunk_compression_type, chunk_compression_level, f)
|
||||
})?;
|
||||
let docid_word_positions_writer = tempfile().and_then(|f| {
|
||||
create_writer(chunk_compression_type, chunk_compression_level, f)
|
||||
})?;
|
||||
|
||||
let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(stop_words));
|
||||
|
||||
Ok(Store {
|
||||
// Indexing parameters.
|
||||
searchable_fields,
|
||||
faceted_fields,
|
||||
// Caches
|
||||
word_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
|
||||
word_docids_limit: linked_hash_map_size,
|
||||
words_pairs_proximities_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
|
||||
words_pairs_proximities_docids_limit: linked_hash_map_size,
|
||||
facet_field_value_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
|
||||
facet_field_value_docids_limit: linked_hash_map_size,
|
||||
// MTBL parameters
|
||||
chunk_compression_type,
|
||||
chunk_compression_level,
|
||||
chunk_fusing_shrink_size,
|
||||
// MTBL sorters
|
||||
main_sorter,
|
||||
word_docids_sorter,
|
||||
words_pairs_proximities_docids_sorter,
|
||||
facet_field_value_docids_sorter,
|
||||
field_id_docid_facet_values_sorter,
|
||||
// MTBL writers
|
||||
docid_word_positions_writer,
|
||||
documents_writer,
|
||||
// tokenizer
|
||||
analyzer,
|
||||
})
|
||||
}
|
||||
|
||||
// Save the documents ids under the position and word we have seen it.
|
||||
fn insert_word_docid(&mut self, word: &str, id: DocumentId) -> anyhow::Result<()> {
|
||||
// if get_refresh finds the element it is assured to be at the end of the linked hash map.
|
||||
match self.word_docids.get_refresh(word.as_bytes()) {
|
||||
Some(old) => { old.insert(id); },
|
||||
None => {
|
||||
let word_vec = SmallVec32::from(word.as_bytes());
|
||||
// A newly inserted element is append at the end of the linked hash map.
|
||||
self.word_docids.insert(word_vec, RoaringBitmap::from_iter(Some(id)));
|
||||
// If the word docids just reached it's capacity we must make sure to remove
|
||||
// one element, this way next time we insert we doesn't grow the capacity.
|
||||
if self.word_docids.len() == self.word_docids_limit {
|
||||
// Removing the front element is equivalent to removing the LRU element.
|
||||
let lru = self.word_docids.pop_front();
|
||||
Self::write_word_docids(&mut self.word_docids_sorter, lru)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Save the documents ids under the facet field id and value we have seen it.
|
||||
fn insert_facet_values_docid(
|
||||
&mut self,
|
||||
field_id: FieldId,
|
||||
field_value: FacetValue,
|
||||
id: DocumentId,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
Self::write_field_id_docid_facet_value(&mut self.field_id_docid_facet_values_sorter, field_id, id, &field_value)?;
|
||||
|
||||
let key = (field_id, field_value);
|
||||
// if get_refresh finds the element it is assured to be at the end of the linked hash map.
|
||||
match self.facet_field_value_docids.get_refresh(&key) {
|
||||
Some(old) => { old.insert(id); },
|
||||
None => {
|
||||
// A newly inserted element is append at the end of the linked hash map.
|
||||
self.facet_field_value_docids.insert(key, RoaringBitmap::from_iter(Some(id)));
|
||||
// If the word docids just reached it's capacity we must make sure to remove
|
||||
// one element, this way next time we insert we doesn't grow the capacity.
|
||||
if self.facet_field_value_docids.len() == self.facet_field_value_docids_limit {
|
||||
// Removing the front element is equivalent to removing the LRU element.
|
||||
Self::write_facet_field_value_docids(
|
||||
&mut self.facet_field_value_docids_sorter,
|
||||
self.facet_field_value_docids.pop_front(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Save the documents ids under the words pairs proximities that it contains.
|
||||
fn insert_words_pairs_proximities_docids<'a>(
|
||||
&mut self,
|
||||
words_pairs_proximities: impl IntoIterator<Item=((&'a str, &'a str), u8)>,
|
||||
id: DocumentId,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
for ((w1, w2), prox) in words_pairs_proximities {
|
||||
let w1 = SmallVec32::from(w1.as_bytes());
|
||||
let w2 = SmallVec32::from(w2.as_bytes());
|
||||
let key = (w1, w2, prox);
|
||||
// if get_refresh finds the element it is assured
|
||||
// to be at the end of the linked hash map.
|
||||
match self.words_pairs_proximities_docids.get_refresh(&key) {
|
||||
Some(old) => { old.insert(id); },
|
||||
None => {
|
||||
// A newly inserted element is append at the end of the linked hash map.
|
||||
let ids = RoaringBitmap::from_iter(Some(id));
|
||||
self.words_pairs_proximities_docids.insert(key, ids);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the linked hashmap is over capacity we must remove the overflowing elements.
|
||||
let len = self.words_pairs_proximities_docids.len();
|
||||
let overflow = len.checked_sub(self.words_pairs_proximities_docids_limit);
|
||||
if let Some(overflow) = overflow {
|
||||
let mut lrus = Vec::with_capacity(overflow);
|
||||
// Removing front elements is equivalent to removing the LRUs.
|
||||
let iter = iter::from_fn(|| self.words_pairs_proximities_docids.pop_front());
|
||||
iter.take(overflow).for_each(|x| lrus.push(x));
|
||||
Self::write_words_pairs_proximities(&mut self.words_pairs_proximities_docids_sorter, lrus)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_document(
|
||||
&mut self,
|
||||
document_id: DocumentId,
|
||||
words_positions: &mut HashMap<String, SmallVec32<Position>>,
|
||||
facet_values: &mut HashMap<FieldId, SmallVec8<FacetValue>>,
|
||||
record: &[u8],
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
// We compute the list of words pairs proximities (self-join) and write it directly to disk.
|
||||
let words_pair_proximities = compute_words_pair_proximities(&words_positions);
|
||||
self.insert_words_pairs_proximities_docids(words_pair_proximities, document_id)?;
|
||||
|
||||
// We store document_id associated with all the words the record contains.
|
||||
for (word, _) in words_positions.drain() {
|
||||
self.insert_word_docid(&word, document_id)?;
|
||||
}
|
||||
|
||||
self.documents_writer.insert(document_id.to_be_bytes(), record)?;
|
||||
Self::write_docid_word_positions(&mut self.docid_word_positions_writer, document_id, words_positions)?;
|
||||
|
||||
// We store document_id associated with all the field id and values.
|
||||
for (field, values) in facet_values.drain() {
|
||||
for value in values {
|
||||
self.insert_facet_values_docid(field, value, document_id)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_words_pairs_proximities(
|
||||
sorter: &mut Sorter<MergeFn>,
|
||||
iter: impl IntoIterator<Item=((SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap)>,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
let mut key = Vec::new();
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
for ((w1, w2, min_prox), docids) in iter {
|
||||
key.clear();
|
||||
key.extend_from_slice(w1.as_bytes());
|
||||
key.push(0);
|
||||
key.extend_from_slice(w2.as_bytes());
|
||||
// Storing the minimun proximity found between those words
|
||||
key.push(min_prox);
|
||||
// We serialize the document ids into a buffer
|
||||
buffer.clear();
|
||||
buffer.reserve(CboRoaringBitmapCodec::serialized_size(&docids));
|
||||
CboRoaringBitmapCodec::serialize_into(&docids, &mut buffer)?;
|
||||
// that we write under the generated key into MTBL
|
||||
if lmdb_key_valid_size(&key) {
|
||||
sorter.insert(&key, &buffer)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_docid_word_positions(
|
||||
writer: &mut Writer<File>,
|
||||
id: DocumentId,
|
||||
words_positions: &HashMap<String, SmallVec32<Position>>,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
// We prefix the words by the document id.
|
||||
let mut key = id.to_be_bytes().to_vec();
|
||||
let base_size = key.len();
|
||||
|
||||
// We order the words lexicographically, this way we avoid passing by a sorter.
|
||||
let words_positions = BTreeMap::from_iter(words_positions);
|
||||
|
||||
for (word, positions) in words_positions {
|
||||
key.truncate(base_size);
|
||||
key.extend_from_slice(word.as_bytes());
|
||||
// We serialize the positions into a buffer.
|
||||
let positions = RoaringBitmap::from_iter(positions.iter().cloned());
|
||||
let bytes = BoRoaringBitmapCodec::bytes_encode(&positions)
|
||||
.with_context(|| "could not serialize positions")?;
|
||||
// that we write under the generated key into MTBL
|
||||
if lmdb_key_valid_size(&key) {
|
||||
writer.insert(&key, &bytes)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_facet_field_value_docids<I>(
|
||||
sorter: &mut Sorter<MergeFn>,
|
||||
iter: I,
|
||||
) -> anyhow::Result<()>
|
||||
where I: IntoIterator<Item=((FieldId, FacetValue), RoaringBitmap)>
|
||||
{
|
||||
use FacetValue::*;
|
||||
|
||||
for ((field_id, value), docids) in iter {
|
||||
let result = match value {
|
||||
String(s) => FacetValueStringCodec::bytes_encode(&(field_id, &s)).map(Cow::into_owned),
|
||||
Float(f) => FacetLevelValueF64Codec::bytes_encode(&(field_id, 0, *f, *f)).map(Cow::into_owned),
|
||||
Integer(i) => FacetLevelValueI64Codec::bytes_encode(&(field_id, 0, i, i)).map(Cow::into_owned),
|
||||
};
|
||||
let key = result.context("could not serialize facet key")?;
|
||||
let bytes = CboRoaringBitmapCodec::bytes_encode(&docids)
|
||||
.context("could not serialize docids")?;
|
||||
if lmdb_key_valid_size(&key) {
|
||||
sorter.insert(&key, &bytes)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_field_id_docid_facet_value(
|
||||
sorter: &mut Sorter<MergeFn>,
|
||||
field_id: FieldId,
|
||||
document_id: DocumentId,
|
||||
value: &FacetValue,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
use FacetValue::*;
|
||||
|
||||
let result = match value {
|
||||
String(s) => FieldDocIdFacetStringCodec::bytes_encode(&(field_id, document_id, s)).map(Cow::into_owned),
|
||||
Float(f) => FieldDocIdFacetF64Codec::bytes_encode(&(field_id, document_id, **f)).map(Cow::into_owned),
|
||||
Integer(i) => FieldDocIdFacetI64Codec::bytes_encode(&(field_id, document_id, *i)).map(Cow::into_owned),
|
||||
};
|
||||
|
||||
let key = result.context("could not serialize facet key")?;
|
||||
if lmdb_key_valid_size(&key) {
|
||||
sorter.insert(&key, &[])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_word_docids<I>(sorter: &mut Sorter<MergeFn>, iter: I) -> anyhow::Result<()>
|
||||
where I: IntoIterator<Item=(SmallVec32<u8>, RoaringBitmap)>
|
||||
{
|
||||
let mut key = Vec::new();
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
for (word, ids) in iter {
|
||||
key.clear();
|
||||
key.extend_from_slice(&word);
|
||||
// We serialize the document ids into a buffer
|
||||
buffer.clear();
|
||||
let ids = RoaringBitmap::from_iter(ids);
|
||||
buffer.reserve(ids.serialized_size());
|
||||
ids.serialize_into(&mut buffer)?;
|
||||
// that we write under the generated key into MTBL
|
||||
if lmdb_key_valid_size(&key) {
|
||||
sorter.insert(&key, &buffer)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn index<F>(
|
||||
mut self,
|
||||
mut documents: grenad::Reader<&[u8]>,
|
||||
documents_count: usize,
|
||||
thread_index: usize,
|
||||
num_threads: usize,
|
||||
log_every_n: Option<usize>,
|
||||
mut progress_callback: F,
|
||||
) -> anyhow::Result<Readers>
|
||||
where F: FnMut(UpdateIndexingStep),
|
||||
{
|
||||
debug!("{:?}: Indexing in a Store...", thread_index);
|
||||
|
||||
let mut before = Instant::now();
|
||||
let mut words_positions = HashMap::new();
|
||||
let mut facet_values = HashMap::new();
|
||||
|
||||
let mut count: usize = 0;
|
||||
while let Some((key, value)) = documents.next()? {
|
||||
let document_id = key.try_into().map(u32::from_be_bytes).unwrap();
|
||||
let document = obkv::KvReader::new(value);
|
||||
|
||||
// We skip documents that must not be indexed by this thread.
|
||||
if count % num_threads == thread_index {
|
||||
// This is a log routine that we do every `log_every_n` documents.
|
||||
if thread_index == 0 && log_every_n.map_or(false, |len| count % len == 0) {
|
||||
info!("We have seen {} documents so far ({:.02?}).", format_count(count), before.elapsed());
|
||||
progress_callback(UpdateIndexingStep::IndexDocuments {
|
||||
documents_seen: count,
|
||||
total_documents: documents_count,
|
||||
});
|
||||
before = Instant::now();
|
||||
}
|
||||
|
||||
for (attr, content) in document.iter() {
|
||||
if self.faceted_fields.contains_key(&attr) || self.searchable_fields.contains(&attr) {
|
||||
let value = serde_json::from_slice(content)?;
|
||||
|
||||
if let Some(ftype) = self.faceted_fields.get(&attr) {
|
||||
let mut values = parse_facet_value(*ftype, &value).with_context(|| {
|
||||
format!("extracting facets from the value {}", value)
|
||||
})?;
|
||||
facet_values.entry(attr).or_insert_with(SmallVec8::new).extend(values.drain(..));
|
||||
}
|
||||
|
||||
if self.searchable_fields.contains(&attr) {
|
||||
let content = match json_to_string(&value) {
|
||||
Some(content) => content,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let analyzed = self.analyzer.analyze(&content);
|
||||
let tokens = analyzed
|
||||
.tokens()
|
||||
.filter(|t| t.is_word())
|
||||
.map(|t| t.text().to_string());
|
||||
|
||||
for (pos, word) in tokens.enumerate().take(MAX_POSITION) {
|
||||
let position = (attr as usize * MAX_POSITION + pos) as u32;
|
||||
words_positions.entry(word).or_insert_with(SmallVec32::new).push(position);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We write the document in the documents store.
|
||||
self.write_document(document_id, &mut words_positions, &mut facet_values, value)?;
|
||||
}
|
||||
|
||||
// Compute the document id of the next document.
|
||||
count += 1;
|
||||
}
|
||||
|
||||
progress_callback(UpdateIndexingStep::IndexDocuments {
|
||||
documents_seen: count,
|
||||
total_documents: documents_count,
|
||||
});
|
||||
|
||||
let readers = self.finish()?;
|
||||
debug!("{:?}: Store created!", thread_index);
|
||||
Ok(readers)
|
||||
}
|
||||
|
||||
fn finish(mut self) -> anyhow::Result<Readers> {
|
||||
let comp_type = self.chunk_compression_type;
|
||||
let comp_level = self.chunk_compression_level;
|
||||
let shrink_size = self.chunk_fusing_shrink_size;
|
||||
|
||||
Self::write_word_docids(&mut self.word_docids_sorter, self.word_docids)?;
|
||||
Self::write_words_pairs_proximities(
|
||||
&mut self.words_pairs_proximities_docids_sorter,
|
||||
self.words_pairs_proximities_docids,
|
||||
)?;
|
||||
Self::write_facet_field_value_docids(
|
||||
&mut self.facet_field_value_docids_sorter,
|
||||
self.facet_field_value_docids,
|
||||
)?;
|
||||
|
||||
let mut word_docids_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
||||
let mut builder = fst::SetBuilder::memory();
|
||||
|
||||
let mut iter = self.word_docids_sorter.into_iter()?;
|
||||
while let Some((word, val)) = iter.next()? {
|
||||
// This is a lexicographically ordered word position
|
||||
// we use the key to construct the words fst.
|
||||
builder.insert(word)?;
|
||||
word_docids_wtr.insert(word, val)?;
|
||||
}
|
||||
|
||||
let fst = builder.into_set();
|
||||
self.main_sorter.insert(WORDS_FST_KEY, fst.as_fst().as_bytes())?;
|
||||
|
||||
let mut main_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
||||
self.main_sorter.write_into(&mut main_wtr)?;
|
||||
|
||||
let mut words_pairs_proximities_docids_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
||||
self.words_pairs_proximities_docids_sorter.write_into(&mut words_pairs_proximities_docids_wtr)?;
|
||||
|
||||
let mut facet_field_value_docids_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
||||
self.facet_field_value_docids_sorter.write_into(&mut facet_field_value_docids_wtr)?;
|
||||
|
||||
let mut field_id_docid_facet_values_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
||||
self.field_id_docid_facet_values_sorter.write_into(&mut field_id_docid_facet_values_wtr)?;
|
||||
|
||||
let main = writer_into_reader(main_wtr, shrink_size)?;
|
||||
let word_docids = writer_into_reader(word_docids_wtr, shrink_size)?;
|
||||
let words_pairs_proximities_docids = writer_into_reader(words_pairs_proximities_docids_wtr, shrink_size)?;
|
||||
let facet_field_value_docids = writer_into_reader(facet_field_value_docids_wtr, shrink_size)?;
|
||||
let field_id_docid_facet_values = writer_into_reader(field_id_docid_facet_values_wtr, shrink_size)?;
|
||||
let docid_word_positions = writer_into_reader(self.docid_word_positions_writer, shrink_size)?;
|
||||
let documents = writer_into_reader(self.documents_writer, shrink_size)?;
|
||||
|
||||
Ok(Readers {
|
||||
main,
|
||||
word_docids,
|
||||
docid_word_positions,
|
||||
words_pairs_proximities_docids,
|
||||
facet_field_value_docids,
|
||||
field_id_docid_facet_values,
|
||||
documents,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Outputs a list of all pairs of words with the shortest proximity between 1 and 7 inclusive.
|
||||
///
|
||||
/// This list is used by the engine to calculate the documents containing words that are
|
||||
/// close to each other.
|
||||
fn compute_words_pair_proximities(
|
||||
word_positions: &HashMap<String, SmallVec32<Position>>,
|
||||
) -> HashMap<(&str, &str), u8>
|
||||
{
|
||||
use itertools::Itertools;
|
||||
|
||||
let mut words_pair_proximities = HashMap::new();
|
||||
for ((w1, ps1), (w2, ps2)) in word_positions.iter().cartesian_product(word_positions) {
|
||||
let mut min_prox = None;
|
||||
for (ps1, ps2) in ps1.iter().cartesian_product(ps2) {
|
||||
let prox = crate::proximity::positions_proximity(*ps1, *ps2);
|
||||
let prox = u8::try_from(prox).unwrap();
|
||||
// We don't care about a word that appear at the
|
||||
// same position or too far from the other.
|
||||
if prox >= 1 && prox <= 7 && min_prox.map_or(true, |mp| prox < mp) {
|
||||
min_prox = Some(prox)
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(min_prox) = min_prox {
|
||||
words_pair_proximities.insert((w1.as_str(), w2.as_str()), min_prox);
|
||||
}
|
||||
}
|
||||
|
||||
words_pair_proximities
|
||||
}
|
||||
|
||||
fn format_count(n: usize) -> String {
|
||||
human_format::Formatter::new().with_decimals(1).with_separator("").format(n as f64)
|
||||
}
|
||||
|
||||
fn lmdb_key_valid_size(key: &[u8]) -> bool {
|
||||
!key.is_empty() && key.len() <= LMDB_MAX_KEY_LENGTH
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
enum FacetValue {
|
||||
String(SmallString32),
|
||||
Float(OrderedFloat<f64>),
|
||||
Integer(i64),
|
||||
}
|
||||
|
||||
fn parse_facet_value(ftype: FacetType, value: &Value) -> anyhow::Result<SmallVec8<FacetValue>> {
|
||||
use FacetValue::*;
|
||||
|
||||
fn inner_parse_facet_value(
|
||||
ftype: FacetType,
|
||||
value: &Value,
|
||||
can_recurse: bool,
|
||||
output: &mut SmallVec8<FacetValue>,
|
||||
) -> anyhow::Result<()>
|
||||
{
|
||||
match value {
|
||||
Value::Null => Ok(()),
|
||||
Value::Bool(b) => {
|
||||
output.push(Integer(*b as i64));
|
||||
Ok(())
|
||||
},
|
||||
Value::Number(number) => match ftype {
|
||||
FacetType::String => {
|
||||
let string = SmallString32::from(number.to_string());
|
||||
output.push(String(string));
|
||||
Ok(())
|
||||
},
|
||||
FacetType::Float => match number.as_f64() {
|
||||
Some(float) => {
|
||||
output.push(Float(OrderedFloat(float)));
|
||||
Ok(())
|
||||
},
|
||||
None => bail!("invalid facet type, expecting {} found integer", ftype),
|
||||
},
|
||||
FacetType::Integer => match number.as_i64() {
|
||||
Some(integer) => {
|
||||
output.push(Integer(integer));
|
||||
Ok(())
|
||||
},
|
||||
None => if number.is_f64() {
|
||||
bail!("invalid facet type, expecting {} found float", ftype)
|
||||
} else {
|
||||
bail!("invalid facet type, expecting {} found out-of-bound integer (64bit)", ftype)
|
||||
},
|
||||
},
|
||||
},
|
||||
Value::String(string) => {
|
||||
let string = string.trim().to_lowercase();
|
||||
if string.is_empty() { return Ok(()) }
|
||||
match ftype {
|
||||
FacetType::String => {
|
||||
let string = SmallString32::from(string);
|
||||
output.push(String(string));
|
||||
Ok(())
|
||||
},
|
||||
FacetType::Float => match string.parse() {
|
||||
Ok(float) => {
|
||||
output.push(Float(OrderedFloat(float)));
|
||||
Ok(())
|
||||
},
|
||||
Err(_err) => bail!("invalid facet type, expecting {} found string", ftype),
|
||||
},
|
||||
FacetType::Integer => match string.parse() {
|
||||
Ok(integer) => {
|
||||
output.push(Integer(integer));
|
||||
Ok(())
|
||||
},
|
||||
Err(_err) => bail!("invalid facet type, expecting {} found string", ftype),
|
||||
},
|
||||
}
|
||||
},
|
||||
Value::Array(values) => if can_recurse {
|
||||
values.iter().map(|v| inner_parse_facet_value(ftype, v, false, output)).collect()
|
||||
} else {
|
||||
bail!("invalid facet type, expecting {} found sub-array ()", ftype)
|
||||
},
|
||||
Value::Object(_) => bail!("invalid facet type, expecting {} found object", ftype),
|
||||
}
|
||||
}
|
||||
|
||||
let mut facet_values = SmallVec8::new();
|
||||
inner_parse_facet_value(ftype, value, true, &mut facet_values)?;
|
||||
Ok(facet_values)
|
||||
}
|
641
milli/src/update/index_documents/transform.rs
Normal file
641
milli/src/update/index_documents/transform.rs
Normal file
|
@ -0,0 +1,641 @@
|
|||
use std::borrow::Cow;
|
||||
use std::fs::File;
|
||||
use std::io::{Read, Seek, SeekFrom};
|
||||
use std::iter::Peekable;
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use grenad::CompressionType;
|
||||
use log::info;
|
||||
use roaring::RoaringBitmap;
|
||||
use serde_json::{Map, Value};
|
||||
|
||||
use crate::{Index, BEU32, MergeFn, FieldsIdsMap, ExternalDocumentsIds, FieldId};
|
||||
use crate::update::{AvailableDocumentsIds, UpdateIndexingStep};
|
||||
use super::merge_function::merge_two_obkvs;
|
||||
use super::{create_writer, create_sorter, IndexDocumentsMethod};
|
||||
|
||||
const DEFAULT_PRIMARY_KEY_NAME: &str = "id";
|
||||
|
||||
pub struct TransformOutput {
|
||||
pub primary_key: String,
|
||||
pub fields_ids_map: FieldsIdsMap,
|
||||
pub external_documents_ids: ExternalDocumentsIds<'static>,
|
||||
pub new_documents_ids: RoaringBitmap,
|
||||
pub replaced_documents_ids: RoaringBitmap,
|
||||
pub documents_count: usize,
|
||||
pub documents_file: File,
|
||||
}
|
||||
|
||||
/// Extract the external ids, deduplicate and compute the new internal documents ids
|
||||
/// and fields ids, writing all the documents under their internal ids into a final file.
|
||||
///
|
||||
/// Outputs the new `FieldsIdsMap`, the new `UsersIdsDocumentsIds` map, the new documents ids,
|
||||
/// the replaced documents ids, the number of documents in this update and the file
|
||||
/// containing all those documents.
|
||||
pub struct Transform<'t, 'i> {
|
||||
pub rtxn: &'t heed::RoTxn<'i>,
|
||||
pub index: &'i Index,
|
||||
pub log_every_n: Option<usize>,
|
||||
pub chunk_compression_type: CompressionType,
|
||||
pub chunk_compression_level: Option<u32>,
|
||||
pub chunk_fusing_shrink_size: Option<u64>,
|
||||
pub max_nb_chunks: Option<usize>,
|
||||
pub max_memory: Option<usize>,
|
||||
pub index_documents_method: IndexDocumentsMethod,
|
||||
pub autogenerate_docids: bool,
|
||||
}
|
||||
|
||||
impl Transform<'_, '_> {
|
||||
pub fn output_from_json<R, F>(self, reader: R, progress_callback: F) -> anyhow::Result<TransformOutput>
|
||||
where
|
||||
R: Read,
|
||||
F: Fn(UpdateIndexingStep) + Sync,
|
||||
{
|
||||
self.output_from_generic_json(reader, false, progress_callback)
|
||||
}
|
||||
|
||||
pub fn output_from_json_stream<R, F>(self, reader: R, progress_callback: F) -> anyhow::Result<TransformOutput>
|
||||
where
|
||||
R: Read,
|
||||
F: Fn(UpdateIndexingStep) + Sync,
|
||||
{
|
||||
self.output_from_generic_json(reader, true, progress_callback)
|
||||
}
|
||||
|
||||
fn output_from_generic_json<R, F>(
|
||||
self,
|
||||
reader: R,
|
||||
is_stream: bool,
|
||||
progress_callback: F,
|
||||
) -> anyhow::Result<TransformOutput>
|
||||
where
|
||||
R: Read,
|
||||
F: Fn(UpdateIndexingStep) + Sync,
|
||||
{
|
||||
let mut fields_ids_map = self.index.fields_ids_map(self.rtxn)?;
|
||||
let external_documents_ids = self.index.external_documents_ids(self.rtxn).unwrap();
|
||||
|
||||
// Deserialize the whole batch of documents in memory.
|
||||
let mut documents: Peekable<Box<dyn Iterator<Item=serde_json::Result<Map<String, Value>>>>> = if is_stream {
|
||||
let iter = serde_json::Deserializer::from_reader(reader).into_iter();
|
||||
let iter = Box::new(iter) as Box<dyn Iterator<Item=_>>;
|
||||
iter.peekable()
|
||||
} else {
|
||||
let vec: Vec<_> = serde_json::from_reader(reader)?;
|
||||
let iter = vec.into_iter().map(Ok);
|
||||
let iter = Box::new(iter) as Box<dyn Iterator<Item=_>>;
|
||||
iter.peekable()
|
||||
};
|
||||
|
||||
// We extract the primary key from the first document in
|
||||
// the batch if it hasn't already been defined in the index
|
||||
let first = documents.peek().and_then(|r| r.as_ref().ok());
|
||||
let alternative_name = first.and_then(|doc| doc.keys().find(|k| k.contains(DEFAULT_PRIMARY_KEY_NAME)).cloned());
|
||||
let (primary_key_id, primary_key) = compute_primary_key_pair(
|
||||
self.index.primary_key(self.rtxn)?,
|
||||
&mut fields_ids_map,
|
||||
alternative_name,
|
||||
self.autogenerate_docids
|
||||
)?;
|
||||
|
||||
if documents.peek().is_none() {
|
||||
return Ok(TransformOutput {
|
||||
primary_key,
|
||||
fields_ids_map,
|
||||
external_documents_ids: ExternalDocumentsIds::default(),
|
||||
new_documents_ids: RoaringBitmap::new(),
|
||||
replaced_documents_ids: RoaringBitmap::new(),
|
||||
documents_count: 0,
|
||||
documents_file: tempfile::tempfile()?,
|
||||
});
|
||||
}
|
||||
|
||||
// We must choose the appropriate merge function for when two or more documents
|
||||
// with the same user id must be merged or fully replaced in the same batch.
|
||||
let merge_function = match self.index_documents_method {
|
||||
IndexDocumentsMethod::ReplaceDocuments => keep_latest_obkv,
|
||||
IndexDocumentsMethod::UpdateDocuments => merge_obkvs,
|
||||
};
|
||||
|
||||
// We initialize the sorter with the user indexing settings.
|
||||
let mut sorter = create_sorter(
|
||||
merge_function,
|
||||
self.chunk_compression_type,
|
||||
self.chunk_compression_level,
|
||||
self.chunk_fusing_shrink_size,
|
||||
self.max_nb_chunks,
|
||||
self.max_memory,
|
||||
);
|
||||
|
||||
let mut json_buffer = Vec::new();
|
||||
let mut obkv_buffer = Vec::new();
|
||||
let mut uuid_buffer = [0; uuid::adapter::Hyphenated::LENGTH];
|
||||
let mut documents_count = 0;
|
||||
|
||||
for result in documents {
|
||||
let document = result?;
|
||||
|
||||
if self.log_every_n.map_or(false, |len| documents_count % len == 0) {
|
||||
progress_callback(UpdateIndexingStep::TransformFromUserIntoGenericFormat {
|
||||
documents_seen: documents_count,
|
||||
});
|
||||
}
|
||||
|
||||
obkv_buffer.clear();
|
||||
let mut writer = obkv::KvWriter::new(&mut obkv_buffer);
|
||||
|
||||
// We prepare the fields ids map with the documents keys.
|
||||
for (key, _value) in &document {
|
||||
fields_ids_map.insert(&key).context("field id limit reached")?;
|
||||
}
|
||||
|
||||
// We retrieve the user id from the document based on the primary key name,
|
||||
// if the document id isn't present we generate a uuid.
|
||||
let external_id = match document.get(&primary_key) {
|
||||
Some(value) => match value {
|
||||
Value::String(string) => Cow::Borrowed(string.as_str()),
|
||||
Value::Number(number) => Cow::Owned(number.to_string()),
|
||||
_ => return Err(anyhow!("documents ids must be either strings or numbers")),
|
||||
},
|
||||
None => {
|
||||
if !self.autogenerate_docids {
|
||||
return Err(anyhow!("missing primary key"));
|
||||
}
|
||||
let uuid = uuid::Uuid::new_v4().to_hyphenated().encode_lower(&mut uuid_buffer);
|
||||
Cow::Borrowed(uuid)
|
||||
},
|
||||
};
|
||||
|
||||
// We iterate in the fields ids ordered.
|
||||
for (field_id, name) in fields_ids_map.iter() {
|
||||
json_buffer.clear();
|
||||
|
||||
// We try to extract the value from the document and if we don't find anything
|
||||
// and this should be the document id we return the one we generated.
|
||||
if let Some(value) = document.get(name) {
|
||||
// We serialize the attribute values.
|
||||
serde_json::to_writer(&mut json_buffer, value)?;
|
||||
writer.insert(field_id, &json_buffer)?;
|
||||
}
|
||||
else if field_id == primary_key_id {
|
||||
// We validate the document id [a-zA-Z0-9\-_].
|
||||
let external_id = match validate_document_id(&external_id) {
|
||||
Some(valid) => valid,
|
||||
None => return Err(anyhow!("invalid document id: {:?}", external_id)),
|
||||
};
|
||||
|
||||
// We serialize the document id.
|
||||
serde_json::to_writer(&mut json_buffer, &external_id)?;
|
||||
writer.insert(field_id, &json_buffer)?;
|
||||
}
|
||||
}
|
||||
|
||||
// We use the extracted/generated user id as the key for this document.
|
||||
sorter.insert(external_id.as_bytes(), &obkv_buffer)?;
|
||||
documents_count += 1;
|
||||
}
|
||||
|
||||
progress_callback(UpdateIndexingStep::TransformFromUserIntoGenericFormat {
|
||||
documents_seen: documents_count,
|
||||
});
|
||||
|
||||
// Now that we have a valid sorter that contains the user id and the obkv we
|
||||
// give it to the last transforming function which returns the TransformOutput.
|
||||
self.output_from_sorter(
|
||||
sorter,
|
||||
primary_key,
|
||||
fields_ids_map,
|
||||
documents_count,
|
||||
external_documents_ids,
|
||||
progress_callback,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn output_from_csv<R, F>(self, reader: R, progress_callback: F) -> anyhow::Result<TransformOutput>
|
||||
where
|
||||
R: Read,
|
||||
F: Fn(UpdateIndexingStep) + Sync,
|
||||
{
|
||||
let mut fields_ids_map = self.index.fields_ids_map(self.rtxn)?;
|
||||
let external_documents_ids = self.index.external_documents_ids(self.rtxn).unwrap();
|
||||
|
||||
let mut csv = csv::Reader::from_reader(reader);
|
||||
let headers = csv.headers()?;
|
||||
|
||||
let mut fields_ids = Vec::new();
|
||||
// Generate the new fields ids based on the current fields ids and this CSV headers.
|
||||
for (i, header) in headers.iter().enumerate() {
|
||||
let id = fields_ids_map.insert(header).context("field id limit reached)")?;
|
||||
fields_ids.push((id, i));
|
||||
}
|
||||
|
||||
// Extract the position of the primary key in the current headers, None if not found.
|
||||
let primary_key_pos = match self.index.primary_key(self.rtxn)? {
|
||||
Some(primary_key) => {
|
||||
// The primary key is known so we must find the position in the CSV headers.
|
||||
headers.iter().position(|h| h == primary_key)
|
||||
},
|
||||
None => headers.iter().position(|h| h.contains("id")),
|
||||
};
|
||||
|
||||
// Returns the field id in the fields ids map, create an "id" field
|
||||
// in case it is not in the current headers.
|
||||
let alternative_name = primary_key_pos.map(|pos| headers[pos].to_string());
|
||||
let (primary_key_id, _) = compute_primary_key_pair(
|
||||
self.index.primary_key(self.rtxn)?,
|
||||
&mut fields_ids_map,
|
||||
alternative_name,
|
||||
self.autogenerate_docids
|
||||
)?;
|
||||
|
||||
// The primary key field is not present in the header, so we need to create it.
|
||||
if primary_key_pos.is_none() {
|
||||
fields_ids.push((primary_key_id, usize::max_value()));
|
||||
}
|
||||
|
||||
// We sort the fields ids by the fields ids map id, this way we are sure to iterate over
|
||||
// the records fields in the fields ids map order and correctly generate the obkv.
|
||||
fields_ids.sort_unstable_by_key(|(field_id, _)| *field_id);
|
||||
|
||||
// We initialize the sorter with the user indexing settings.
|
||||
let mut sorter = create_sorter(
|
||||
keep_latest_obkv,
|
||||
self.chunk_compression_type,
|
||||
self.chunk_compression_level,
|
||||
self.chunk_fusing_shrink_size,
|
||||
self.max_nb_chunks,
|
||||
self.max_memory,
|
||||
);
|
||||
|
||||
// We write into the sorter to merge and deduplicate the documents
|
||||
// based on the external ids.
|
||||
let mut json_buffer = Vec::new();
|
||||
let mut obkv_buffer = Vec::new();
|
||||
let mut uuid_buffer = [0; uuid::adapter::Hyphenated::LENGTH];
|
||||
let mut documents_count = 0;
|
||||
|
||||
let mut record = csv::StringRecord::new();
|
||||
while csv.read_record(&mut record)? {
|
||||
obkv_buffer.clear();
|
||||
let mut writer = obkv::KvWriter::new(&mut obkv_buffer);
|
||||
|
||||
if self.log_every_n.map_or(false, |len| documents_count % len == 0) {
|
||||
progress_callback(UpdateIndexingStep::TransformFromUserIntoGenericFormat {
|
||||
documents_seen: documents_count,
|
||||
});
|
||||
}
|
||||
|
||||
// We extract the user id if we know where it is or generate an UUID V4 otherwise.
|
||||
let external_id = match primary_key_pos {
|
||||
Some(pos) => {
|
||||
let external_id = &record[pos];
|
||||
// We validate the document id [a-zA-Z0-9\-_].
|
||||
match validate_document_id(&external_id) {
|
||||
Some(valid) => valid,
|
||||
None => return Err(anyhow!("invalid document id: {:?}", external_id)),
|
||||
}
|
||||
},
|
||||
None => uuid::Uuid::new_v4().to_hyphenated().encode_lower(&mut uuid_buffer),
|
||||
};
|
||||
|
||||
// When the primary_key_field_id is found in the fields ids list
|
||||
// we return the generated document id instead of the record field.
|
||||
let iter = fields_ids.iter()
|
||||
.map(|(fi, i)| {
|
||||
let field = if *fi == primary_key_id { external_id } else { &record[*i] };
|
||||
(fi, field)
|
||||
});
|
||||
|
||||
// We retrieve the field id based on the fields ids map fields ids order.
|
||||
for (field_id, field) in iter {
|
||||
// We serialize the attribute values as JSON strings.
|
||||
json_buffer.clear();
|
||||
serde_json::to_writer(&mut json_buffer, &field)?;
|
||||
writer.insert(*field_id, &json_buffer)?;
|
||||
}
|
||||
|
||||
// We use the extracted/generated user id as the key for this document.
|
||||
sorter.insert(external_id, &obkv_buffer)?;
|
||||
documents_count += 1;
|
||||
}
|
||||
|
||||
progress_callback(UpdateIndexingStep::TransformFromUserIntoGenericFormat {
|
||||
documents_seen: documents_count,
|
||||
});
|
||||
|
||||
// Now that we have a valid sorter that contains the user id and the obkv we
|
||||
// give it to the last transforming function which returns the TransformOutput.
|
||||
let primary_key_name = fields_ids_map
|
||||
.name(primary_key_id)
|
||||
.map(String::from)
|
||||
.expect("Primary key must be present in fields id map");
|
||||
self.output_from_sorter(
|
||||
sorter,
|
||||
primary_key_name,
|
||||
fields_ids_map,
|
||||
documents_count,
|
||||
external_documents_ids,
|
||||
progress_callback,
|
||||
)
|
||||
}
|
||||
|
||||
/// Generate the `TransformOutput` based on the given sorter that can be generated from any
|
||||
/// format like CSV, JSON or JSON stream. This sorter must contain a key that is the document
|
||||
/// id for the user side and the value must be an obkv where keys are valid fields ids.
|
||||
fn output_from_sorter<F>(
|
||||
self,
|
||||
sorter: grenad::Sorter<MergeFn>,
|
||||
primary_key: String,
|
||||
fields_ids_map: FieldsIdsMap,
|
||||
approximate_number_of_documents: usize,
|
||||
mut external_documents_ids: ExternalDocumentsIds<'_>,
|
||||
progress_callback: F,
|
||||
) -> anyhow::Result<TransformOutput>
|
||||
where
|
||||
F: Fn(UpdateIndexingStep) + Sync,
|
||||
{
|
||||
let documents_ids = self.index.documents_ids(self.rtxn)?;
|
||||
let mut available_documents_ids = AvailableDocumentsIds::from_documents_ids(&documents_ids);
|
||||
|
||||
// Once we have sort and deduplicated the documents we write them into a final file.
|
||||
let mut final_sorter = create_sorter(
|
||||
|_docid, _obkvs| Err(anyhow!("cannot merge two documents")),
|
||||
self.chunk_compression_type,
|
||||
self.chunk_compression_level,
|
||||
self.chunk_fusing_shrink_size,
|
||||
self.max_nb_chunks,
|
||||
self.max_memory,
|
||||
);
|
||||
let mut new_external_documents_ids_builder = fst::MapBuilder::memory();
|
||||
let mut replaced_documents_ids = RoaringBitmap::new();
|
||||
let mut new_documents_ids = RoaringBitmap::new();
|
||||
let mut obkv_buffer = Vec::new();
|
||||
|
||||
// While we write into final file we get or generate the internal documents ids.
|
||||
let mut documents_count = 0;
|
||||
let mut iter = sorter.into_iter()?;
|
||||
while let Some((external_id, update_obkv)) = iter.next()? {
|
||||
|
||||
if self.log_every_n.map_or(false, |len| documents_count % len == 0) {
|
||||
progress_callback(UpdateIndexingStep::ComputeIdsAndMergeDocuments {
|
||||
documents_seen: documents_count,
|
||||
total_documents: approximate_number_of_documents,
|
||||
});
|
||||
}
|
||||
|
||||
let (docid, obkv) = match external_documents_ids.get(external_id) {
|
||||
Some(docid) => {
|
||||
// If we find the user id in the current external documents ids map
|
||||
// we use it and insert it in the list of replaced documents.
|
||||
replaced_documents_ids.insert(docid);
|
||||
|
||||
// Depending on the update indexing method we will merge
|
||||
// the document update with the current document or not.
|
||||
match self.index_documents_method {
|
||||
IndexDocumentsMethod::ReplaceDocuments => (docid, update_obkv),
|
||||
IndexDocumentsMethod::UpdateDocuments => {
|
||||
let key = BEU32::new(docid);
|
||||
let base_obkv = self.index.documents.get(&self.rtxn, &key)?
|
||||
.context("document not found")?;
|
||||
let update_obkv = obkv::KvReader::new(update_obkv);
|
||||
merge_two_obkvs(base_obkv, update_obkv, &mut obkv_buffer);
|
||||
(docid, obkv_buffer.as_slice())
|
||||
}
|
||||
}
|
||||
},
|
||||
None => {
|
||||
// If this user id is new we add it to the external documents ids map
|
||||
// for new ids and into the list of new documents.
|
||||
let new_docid = available_documents_ids.next()
|
||||
.context("no more available documents ids")?;
|
||||
new_external_documents_ids_builder.insert(external_id, new_docid as u64)?;
|
||||
new_documents_ids.insert(new_docid);
|
||||
(new_docid, update_obkv)
|
||||
},
|
||||
};
|
||||
|
||||
// We insert the document under the documents ids map into the final file.
|
||||
final_sorter.insert(docid.to_be_bytes(), obkv)?;
|
||||
documents_count += 1;
|
||||
}
|
||||
|
||||
progress_callback(UpdateIndexingStep::ComputeIdsAndMergeDocuments {
|
||||
documents_seen: documents_count,
|
||||
total_documents: documents_count,
|
||||
});
|
||||
|
||||
// We create a final writer to write the new documents in order from the sorter.
|
||||
let file = tempfile::tempfile()?;
|
||||
let mut writer = create_writer(self.chunk_compression_type, self.chunk_compression_level, file)?;
|
||||
|
||||
// Once we have written all the documents into the final sorter, we write the documents
|
||||
// into this writer, extract the file and reset the seek to be able to read it again.
|
||||
final_sorter.write_into(&mut writer)?;
|
||||
let mut documents_file = writer.into_inner()?;
|
||||
documents_file.seek(SeekFrom::Start(0))?;
|
||||
|
||||
let before_docids_merging = Instant::now();
|
||||
// We merge the new external ids with existing external documents ids.
|
||||
let new_external_documents_ids = new_external_documents_ids_builder.into_map();
|
||||
external_documents_ids.insert_ids(&new_external_documents_ids)?;
|
||||
|
||||
info!("Documents external merging took {:.02?}", before_docids_merging.elapsed());
|
||||
|
||||
Ok(TransformOutput {
|
||||
primary_key,
|
||||
fields_ids_map,
|
||||
external_documents_ids: external_documents_ids.into_static(),
|
||||
new_documents_ids,
|
||||
replaced_documents_ids,
|
||||
documents_count,
|
||||
documents_file,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a `TransformOutput` with a file that contains the documents of the index
|
||||
/// with the attributes reordered accordingly to the `FieldsIdsMap` given as argument.
|
||||
// TODO this can be done in parallel by using the rayon `ThreadPool`.
|
||||
pub fn remap_index_documents(
|
||||
self,
|
||||
primary_key: String,
|
||||
old_fields_ids_map: FieldsIdsMap,
|
||||
new_fields_ids_map: FieldsIdsMap,
|
||||
) -> anyhow::Result<TransformOutput>
|
||||
{
|
||||
let external_documents_ids = self.index.external_documents_ids(self.rtxn)?;
|
||||
let documents_ids = self.index.documents_ids(self.rtxn)?;
|
||||
let documents_count = documents_ids.len() as usize;
|
||||
|
||||
// We create a final writer to write the new documents in order from the sorter.
|
||||
let file = tempfile::tempfile()?;
|
||||
let mut writer = create_writer(self.chunk_compression_type, self.chunk_compression_level, file)?;
|
||||
|
||||
let mut obkv_buffer = Vec::new();
|
||||
for result in self.index.documents.iter(self.rtxn)? {
|
||||
let (docid, obkv) = result?;
|
||||
let docid = docid.get();
|
||||
|
||||
obkv_buffer.clear();
|
||||
let mut obkv_writer = obkv::KvWriter::new(&mut obkv_buffer);
|
||||
|
||||
// We iterate over the new `FieldsIdsMap` ids in order and construct the new obkv.
|
||||
for (id, name) in new_fields_ids_map.iter() {
|
||||
if let Some(val) = old_fields_ids_map.id(name).and_then(|id| obkv.get(id)) {
|
||||
obkv_writer.insert(id, val)?;
|
||||
}
|
||||
}
|
||||
|
||||
let buffer = obkv_writer.into_inner()?;
|
||||
writer.insert(docid.to_be_bytes(), buffer)?;
|
||||
}
|
||||
|
||||
// Once we have written all the documents, we extract
|
||||
// the file and reset the seek to be able to read it again.
|
||||
let mut documents_file = writer.into_inner()?;
|
||||
documents_file.seek(SeekFrom::Start(0))?;
|
||||
|
||||
Ok(TransformOutput {
|
||||
primary_key,
|
||||
fields_ids_map: new_fields_ids_map,
|
||||
external_documents_ids: external_documents_ids.into_static(),
|
||||
new_documents_ids: documents_ids,
|
||||
replaced_documents_ids: RoaringBitmap::default(),
|
||||
documents_count,
|
||||
documents_file,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Given an optional primary key and an optional alternative name, returns the (field_id, attr_name)
|
||||
/// for the primary key according to the following rules:
|
||||
/// - if primary_key is `Some`, returns the id and the name, else
|
||||
/// - if alternative_name is Some, adds alternative to the fields_ids_map, and returns the pair, else
|
||||
/// - if autogenerate_docids is true, insert the default id value in the field ids map ("id") and
|
||||
/// returns the pair, else
|
||||
/// - returns an error.
|
||||
fn compute_primary_key_pair(
|
||||
primary_key: Option<&str>,
|
||||
fields_ids_map: &mut FieldsIdsMap,
|
||||
alternative_name: Option<String>,
|
||||
autogenerate_docids: bool,
|
||||
) -> anyhow::Result<(FieldId, String)> {
|
||||
match primary_key {
|
||||
Some(primary_key) => {
|
||||
let id = fields_ids_map.id(primary_key).expect("primary key must be present in the fields id map");
|
||||
Ok((id, primary_key.to_string()))
|
||||
}
|
||||
None => {
|
||||
let name = match alternative_name {
|
||||
Some(key) => key,
|
||||
None => {
|
||||
if !autogenerate_docids {
|
||||
// If there is no primary key in the current document batch, we must
|
||||
// return an error and not automatically generate any document id.
|
||||
anyhow::bail!("missing primary key")
|
||||
}
|
||||
DEFAULT_PRIMARY_KEY_NAME.to_string()
|
||||
},
|
||||
};
|
||||
let id = fields_ids_map.insert(&name).context("field id limit reached")?;
|
||||
Ok((id, name))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Only the last value associated with an id is kept.
|
||||
fn keep_latest_obkv(_key: &[u8], obkvs: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
obkvs.last().context("no last value").map(|last| last.clone().into_owned())
|
||||
}
|
||||
|
||||
/// Merge all the obks in the order we see them.
|
||||
fn merge_obkvs(_key: &[u8], obkvs: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||
let mut iter = obkvs.iter();
|
||||
let first = iter.next().map(|b| b.clone().into_owned()).context("no first value")?;
|
||||
Ok(iter.fold(first, |acc, current| {
|
||||
let first = obkv::KvReader::new(&acc);
|
||||
let second = obkv::KvReader::new(current);
|
||||
let mut buffer = Vec::new();
|
||||
merge_two_obkvs(first, second, &mut buffer);
|
||||
buffer
|
||||
}))
|
||||
}
|
||||
|
||||
fn validate_document_id(document_id: &str) -> Option<&str> {
|
||||
let document_id = document_id.trim();
|
||||
Some(document_id).filter(|id| {
|
||||
!id.is_empty() && id.chars().all(|c| {
|
||||
matches!(c, 'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_')
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
mod compute_primary_key {
|
||||
use super::compute_primary_key_pair;
|
||||
use super::FieldsIdsMap;
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn should_panic_primary_key_not_in_map() {
|
||||
let mut fields_map = FieldsIdsMap::new();
|
||||
let _result = compute_primary_key_pair(
|
||||
Some("toto"),
|
||||
&mut fields_map,
|
||||
None,
|
||||
false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_return_primary_key_if_is_some() {
|
||||
let mut fields_map = FieldsIdsMap::new();
|
||||
fields_map.insert("toto").unwrap();
|
||||
let result = compute_primary_key_pair(
|
||||
Some("toto"),
|
||||
&mut fields_map,
|
||||
Some("tata".to_string()),
|
||||
false);
|
||||
assert_eq!(result.unwrap(), (0u8, "toto".to_string()));
|
||||
assert_eq!(fields_map.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_return_alternative_if_primary_is_none() {
|
||||
let mut fields_map = FieldsIdsMap::new();
|
||||
let result = compute_primary_key_pair(
|
||||
None,
|
||||
&mut fields_map,
|
||||
Some("tata".to_string()),
|
||||
false);
|
||||
assert_eq!(result.unwrap(), (0u8, "tata".to_string()));
|
||||
assert_eq!(fields_map.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_return_default_if_both_are_none() {
|
||||
let mut fields_map = FieldsIdsMap::new();
|
||||
let result = compute_primary_key_pair(
|
||||
None,
|
||||
&mut fields_map,
|
||||
None,
|
||||
true);
|
||||
assert_eq!(result.unwrap(), (0u8, "id".to_string()));
|
||||
assert_eq!(fields_map.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_return_err_if_both_are_none_and_recompute_is_false(){
|
||||
let mut fields_map = FieldsIdsMap::new();
|
||||
let result = compute_primary_key_pair(
|
||||
None,
|
||||
&mut fields_map,
|
||||
None,
|
||||
false);
|
||||
assert!(result.is_err());
|
||||
assert_eq!(fields_map.len(), 0);
|
||||
}
|
||||
}
|
||||
}
|
17
milli/src/update/mod.rs
Normal file
17
milli/src/update/mod.rs
Normal file
|
@ -0,0 +1,17 @@
|
|||
mod available_documents_ids;
|
||||
mod clear_documents;
|
||||
mod delete_documents;
|
||||
mod facets;
|
||||
mod index_documents;
|
||||
mod settings;
|
||||
mod update_builder;
|
||||
mod update_step;
|
||||
|
||||
pub use self::available_documents_ids::AvailableDocumentsIds;
|
||||
pub use self::clear_documents::ClearDocuments;
|
||||
pub use self::delete_documents::DeleteDocuments;
|
||||
pub use self::index_documents::{IndexDocuments, IndexDocumentsMethod, UpdateFormat, DocumentAdditionResult};
|
||||
pub use self::facets::Facets;
|
||||
pub use self::settings::Settings;
|
||||
pub use self::update_builder::UpdateBuilder;
|
||||
pub use self::update_step::UpdateIndexingStep;
|
522
milli/src/update/settings.rs
Normal file
522
milli/src/update/settings.rs
Normal file
|
@ -0,0 +1,522 @@
|
|||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Context;
|
||||
use grenad::CompressionType;
|
||||
use itertools::Itertools;
|
||||
use rayon::ThreadPool;
|
||||
|
||||
use crate::criterion::Criterion;
|
||||
use crate::facet::FacetType;
|
||||
use crate::update::index_documents::{Transform, IndexDocumentsMethod};
|
||||
use crate::update::{ClearDocuments, IndexDocuments, UpdateIndexingStep};
|
||||
use crate::{Index, FieldsIdsMap};
|
||||
|
||||
pub struct Settings<'a, 't, 'u, 'i> {
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
pub(crate) log_every_n: Option<usize>,
|
||||
pub(crate) max_nb_chunks: Option<usize>,
|
||||
pub(crate) max_memory: Option<usize>,
|
||||
pub(crate) linked_hash_map_size: Option<usize>,
|
||||
pub(crate) chunk_compression_type: CompressionType,
|
||||
pub(crate) chunk_compression_level: Option<u32>,
|
||||
pub(crate) chunk_fusing_shrink_size: Option<u64>,
|
||||
pub(crate) thread_pool: Option<&'a ThreadPool>,
|
||||
update_id: u64,
|
||||
|
||||
// If a struct field is set to `None` it means that it hasn't been set by the user,
|
||||
// however if it is `Some(None)` it means that the user forced a reset of the setting.
|
||||
searchable_fields: Option<Option<Vec<String>>>,
|
||||
displayed_fields: Option<Option<Vec<String>>>,
|
||||
faceted_fields: Option<Option<HashMap<String, String>>>,
|
||||
criteria: Option<Option<Vec<String>>>,
|
||||
}
|
||||
|
||||
impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
||||
pub fn new(
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
update_id: u64,
|
||||
) -> Settings<'a, 't, 'u, 'i> {
|
||||
Settings {
|
||||
wtxn,
|
||||
index,
|
||||
log_every_n: None,
|
||||
max_nb_chunks: None,
|
||||
max_memory: None,
|
||||
linked_hash_map_size: None,
|
||||
chunk_compression_type: CompressionType::None,
|
||||
chunk_compression_level: None,
|
||||
chunk_fusing_shrink_size: None,
|
||||
thread_pool: None,
|
||||
searchable_fields: None,
|
||||
displayed_fields: None,
|
||||
faceted_fields: None,
|
||||
criteria: None,
|
||||
update_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset_searchable_fields(&mut self) {
|
||||
self.searchable_fields = Some(None);
|
||||
}
|
||||
|
||||
pub fn set_searchable_fields(&mut self, names: Vec<String>) {
|
||||
self.searchable_fields = Some(Some(names));
|
||||
}
|
||||
|
||||
pub fn reset_displayed_fields(&mut self) {
|
||||
self.displayed_fields = Some(None);
|
||||
}
|
||||
|
||||
pub fn set_displayed_fields(&mut self, names: Vec<String>) {
|
||||
self.displayed_fields = Some(Some(names));
|
||||
}
|
||||
|
||||
pub fn set_faceted_fields(&mut self, names_facet_types: HashMap<String, String>) {
|
||||
self.faceted_fields = Some(Some(names_facet_types));
|
||||
}
|
||||
|
||||
pub fn reset_faceted_fields(&mut self) {
|
||||
self.faceted_fields = Some(None);
|
||||
}
|
||||
|
||||
pub fn reset_criteria(&mut self) {
|
||||
self.criteria = Some(None);
|
||||
}
|
||||
|
||||
pub fn set_criteria(&mut self, criteria: Vec<String>) {
|
||||
self.criteria = Some(Some(criteria));
|
||||
}
|
||||
|
||||
fn reindex<F>(&mut self, cb: &F, old_fields_ids_map: FieldsIdsMap) -> anyhow::Result<()>
|
||||
where
|
||||
F: Fn(UpdateIndexingStep, u64) + Sync
|
||||
{
|
||||
let fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
||||
let update_id = self.update_id;
|
||||
let cb = |step| cb(step, update_id);
|
||||
// if the settings are set before any document update, we don't need to do anything, and
|
||||
// will set the primary key during the first document addition.
|
||||
if self.index.number_of_documents(&self.wtxn)? == 0 {
|
||||
return Ok(())
|
||||
}
|
||||
|
||||
let transform = Transform {
|
||||
rtxn: &self.wtxn,
|
||||
index: self.index,
|
||||
log_every_n: self.log_every_n,
|
||||
chunk_compression_type: self.chunk_compression_type,
|
||||
chunk_compression_level: self.chunk_compression_level,
|
||||
chunk_fusing_shrink_size: self.chunk_fusing_shrink_size,
|
||||
max_nb_chunks: self.max_nb_chunks,
|
||||
max_memory: self.max_memory,
|
||||
index_documents_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||
autogenerate_docids: false,
|
||||
};
|
||||
|
||||
// There already has been a document addition, the primary key should be set by now.
|
||||
let primary_key = self.index.primary_key(&self.wtxn)?.context("Index must have a primary key")?;
|
||||
|
||||
// We remap the documents fields based on the new `FieldsIdsMap`.
|
||||
let output = transform.remap_index_documents(
|
||||
primary_key.to_string(),
|
||||
old_fields_ids_map,
|
||||
fields_ids_map.clone())?;
|
||||
|
||||
// We clear the full database (words-fst, documents ids and documents content).
|
||||
ClearDocuments::new(self.wtxn, self.index, self.update_id).execute()?;
|
||||
|
||||
// We index the generated `TransformOutput` which must contain
|
||||
// all the documents with fields in the newly defined searchable order.
|
||||
let mut indexing_builder = IndexDocuments::new(self.wtxn, self.index, self.update_id);
|
||||
indexing_builder.log_every_n = self.log_every_n;
|
||||
indexing_builder.max_nb_chunks = self.max_nb_chunks;
|
||||
indexing_builder.max_memory = self.max_memory;
|
||||
indexing_builder.linked_hash_map_size = self.linked_hash_map_size;
|
||||
indexing_builder.chunk_compression_type = self.chunk_compression_type;
|
||||
indexing_builder.chunk_compression_level = self.chunk_compression_level;
|
||||
indexing_builder.chunk_fusing_shrink_size = self.chunk_fusing_shrink_size;
|
||||
indexing_builder.thread_pool = self.thread_pool;
|
||||
indexing_builder.execute_raw(output, &cb)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_displayed(&mut self) -> anyhow::Result<bool> {
|
||||
match self.displayed_fields {
|
||||
Some(Some(ref fields)) => {
|
||||
let mut fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
||||
// fields are deduplicated, only the first occurrence is taken into account
|
||||
let names: Vec<_> = fields
|
||||
.iter()
|
||||
.unique()
|
||||
.map(String::as_str)
|
||||
.collect();
|
||||
|
||||
for name in names.iter() {
|
||||
fields_ids_map
|
||||
.insert(name)
|
||||
.context("field id limit exceeded")?;
|
||||
}
|
||||
self.index.put_displayed_fields(self.wtxn, &names)?;
|
||||
self.index.put_fields_ids_map(self.wtxn, &fields_ids_map)?;
|
||||
}
|
||||
Some(None) => { self.index.delete_displayed_fields(self.wtxn)?; },
|
||||
None => return Ok(false),
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Udpates the index's searchable attributes. This causes the field map to be recomputed to
|
||||
/// reflect the order of the searchable attributes.
|
||||
fn update_searchable(&mut self) -> anyhow::Result<bool> {
|
||||
match self.searchable_fields {
|
||||
Some(Some(ref fields)) => {
|
||||
// every time the searchable attributes are updated, we need to update the
|
||||
// ids for any settings that uses the facets. (displayed_fields,
|
||||
// faceted_fields)
|
||||
let old_fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
||||
|
||||
let mut new_fields_ids_map = FieldsIdsMap::new();
|
||||
// fields are deduplicated, only the first occurrence is taken into account
|
||||
let names = fields
|
||||
.iter()
|
||||
.unique()
|
||||
.map(String::as_str)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Add all the searchable attributes to the field map, and then add the
|
||||
// remaining fields from the old field map to the new one
|
||||
for name in names.iter() {
|
||||
new_fields_ids_map
|
||||
.insert(&name)
|
||||
.context("field id limit exceeded")?;
|
||||
}
|
||||
|
||||
for (_, name) in old_fields_ids_map.iter() {
|
||||
new_fields_ids_map
|
||||
.insert(&name)
|
||||
.context("field id limit exceeded")?;
|
||||
}
|
||||
|
||||
self.index.put_searchable_fields(self.wtxn, &names)?;
|
||||
self.index.put_fields_ids_map(self.wtxn, &new_fields_ids_map)?;
|
||||
}
|
||||
Some(None) => { self.index.delete_searchable_fields(self.wtxn)?; },
|
||||
None => return Ok(false),
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn update_facets(&mut self) -> anyhow::Result<bool> {
|
||||
match self.faceted_fields {
|
||||
Some(Some(ref fields)) => {
|
||||
let mut fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
||||
let mut new_facets = HashMap::new();
|
||||
for (name, ty) in fields {
|
||||
fields_ids_map.insert(name).context("field id limit exceeded")?;
|
||||
let ty = FacetType::from_str(&ty)?;
|
||||
new_facets.insert(name.clone(), ty);
|
||||
}
|
||||
self.index.put_faceted_fields(self.wtxn, &new_facets)?;
|
||||
self.index.put_fields_ids_map(self.wtxn, &fields_ids_map)?;
|
||||
}
|
||||
Some(None) => { self.index.delete_faceted_fields(self.wtxn)?; },
|
||||
None => return Ok(false)
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn update_criteria(&mut self) -> anyhow::Result<()> {
|
||||
match self.criteria {
|
||||
Some(Some(ref fields)) => {
|
||||
let faceted_fields = self.index.faceted_fields(&self.wtxn)?;
|
||||
let mut new_criteria = Vec::new();
|
||||
for name in fields {
|
||||
let criterion = Criterion::from_str(&faceted_fields, &name)?;
|
||||
new_criteria.push(criterion);
|
||||
}
|
||||
self.index.put_criteria(self.wtxn, &new_criteria)?;
|
||||
}
|
||||
Some(None) => { self.index.delete_criteria(self.wtxn)?; }
|
||||
None => (),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn execute<F>(mut self, progress_callback: F) -> anyhow::Result<()>
|
||||
where
|
||||
F: Fn(UpdateIndexingStep, u64) + Sync
|
||||
{
|
||||
let old_fields_ids_map = self.index.fields_ids_map(&self.wtxn)?;
|
||||
self.update_displayed()?;
|
||||
let facets_updated = self.update_facets()?;
|
||||
// update_criteria MUST be called after update_facets, since criterion fields must be set
|
||||
// as facets.
|
||||
self.update_criteria()?;
|
||||
let searchable_updated = self.update_searchable()?;
|
||||
|
||||
if facets_updated || searchable_updated {
|
||||
self.reindex(&progress_callback, old_fields_ids_map)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use heed::EnvOpenOptions;
|
||||
use maplit::hashmap;
|
||||
|
||||
use crate::facet::FacetType;
|
||||
use crate::update::{IndexDocuments, UpdateFormat};
|
||||
|
||||
#[test]
|
||||
fn set_and_reset_searchable_fields() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// First we send 3 documents with ids from 1 to 3.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let content = &b"name,age\nkevin,23\nkevina,21\nbenoit,34\n"[..];
|
||||
let mut builder = IndexDocuments::new(&mut wtxn, &index, 0);
|
||||
builder.update_format(UpdateFormat::Csv);
|
||||
builder.execute(content, |_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// We change the searchable fields to be the "name" field only.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 1);
|
||||
builder.set_searchable_fields(vec!["name".into()]);
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Check that the searchable field is correctly set to "name" only.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
// When we search for something that is not in
|
||||
// the searchable fields it must not return any document.
|
||||
let result = index.search(&rtxn).query("23").execute().unwrap();
|
||||
assert!(result.documents_ids.is_empty());
|
||||
|
||||
// When we search for something that is in the searchable fields
|
||||
// we must find the appropriate document.
|
||||
let result = index.search(&rtxn).query(r#""kevin""#).execute().unwrap();
|
||||
let documents = index.documents(&rtxn, result.documents_ids).unwrap();
|
||||
assert_eq!(documents.len(), 1);
|
||||
assert_eq!(documents[0].1.get(0), Some(&br#""kevin""#[..]));
|
||||
drop(rtxn);
|
||||
|
||||
// We change the searchable fields to be the "name" field only.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 2);
|
||||
builder.reset_searchable_fields();
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Check that the searchable field have been reset and documents are found now.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let searchable_fields = index.searchable_fields(&rtxn).unwrap();
|
||||
assert_eq!(searchable_fields, None);
|
||||
let result = index.search(&rtxn).query("23").execute().unwrap();
|
||||
assert_eq!(result.documents_ids.len(), 1);
|
||||
let documents = index.documents(&rtxn, result.documents_ids).unwrap();
|
||||
assert_eq!(documents[0].1.get(0), Some(&br#""kevin""#[..]));
|
||||
drop(rtxn);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixup_searchable_with_displayed_fields() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// First we send 3 documents with ids from 1 to 3.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let content = &b"name,age\nkevin,23\nkevina,21\nbenoit,34\n"[..];
|
||||
let mut builder = IndexDocuments::new(&mut wtxn, &index, 0);
|
||||
builder.update_format(UpdateFormat::Csv);
|
||||
builder.execute(content, |_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// In the same transaction we change the displayed fields to be only the "age".
|
||||
// We also change the searchable fields to be the "name" field only.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 1);
|
||||
builder.set_displayed_fields(vec!["age".into()]);
|
||||
builder.set_searchable_fields(vec!["name".into()]);
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Check that the displayed fields are correctly set to `None` (default value).
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let fields_ids = index.displayed_fields(&rtxn).unwrap();
|
||||
assert_eq!(fields_ids.unwrap(), (&["age"][..]));
|
||||
drop(rtxn);
|
||||
|
||||
// We change the searchable fields to be the "name" field only.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 2);
|
||||
builder.reset_searchable_fields();
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Check that the displayed fields always contains only the "age" field.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let fields_ids = index.displayed_fields(&rtxn).unwrap();
|
||||
assert_eq!(fields_ids.unwrap(), &["age"][..]);
|
||||
drop(rtxn);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_displayed_fields() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// First we send 3 documents with ids from 1 to 3.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let content = &b"name,age\nkevin,23\nkevina,21\nbenoit,34\n"[..];
|
||||
let mut builder = IndexDocuments::new(&mut wtxn, &index, 0);
|
||||
builder.update_format(UpdateFormat::Csv);
|
||||
builder.execute(content, |_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Check that the displayed fields are correctly set to `None` (default value).
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let fields_ids = index.displayed_fields(&rtxn).unwrap();
|
||||
assert_eq!(fields_ids, None);
|
||||
drop(rtxn);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_and_reset_displayed_field() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// First we send 3 documents with ids from 1 to 3.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let content = &b"name,age\nkevin,23\nkevina,21\nbenoit,34\n"[..];
|
||||
let mut builder = IndexDocuments::new(&mut wtxn, &index, 0);
|
||||
builder.update_format(UpdateFormat::Csv);
|
||||
builder.execute(content, |_, _| ()).unwrap();
|
||||
|
||||
// In the same transaction we change the displayed fields to be only the age.
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
builder.set_displayed_fields(vec!["age".into()]);
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Check that the displayed fields are correctly set to only the "age" field.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let fields_ids = index.displayed_fields(&rtxn).unwrap();
|
||||
assert_eq!(fields_ids.unwrap(), &["age"][..]);
|
||||
drop(rtxn);
|
||||
|
||||
// We reset the fields ids to become `None`, the default value.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
builder.reset_displayed_fields();
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Check that the displayed fields are correctly set to `None` (default value).
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let fields_ids = index.displayed_fields(&rtxn).unwrap();
|
||||
assert_eq!(fields_ids, None);
|
||||
drop(rtxn);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_faceted_fields() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// Set the faceted fields to be the age.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
builder.set_faceted_fields(hashmap!{ "age".into() => "integer".into() });
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
|
||||
// Then index some documents.
|
||||
let content = &b"name,age\nkevin,23\nkevina,21\nbenoit,34\n"[..];
|
||||
let mut builder = IndexDocuments::new(&mut wtxn, &index, 1);
|
||||
builder.update_format(UpdateFormat::Csv);
|
||||
builder.execute(content, |_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// Check that the displayed fields are correctly set.
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let fields_ids = index.faceted_fields(&rtxn).unwrap();
|
||||
assert_eq!(fields_ids, hashmap!{ "age".to_string() => FacetType::Integer });
|
||||
// Only count the field_id 0 and level 0 facet values.
|
||||
let count = index.facet_field_id_value_docids.prefix_iter(&rtxn, &[0, 0]).unwrap().count();
|
||||
assert_eq!(count, 3);
|
||||
drop(rtxn);
|
||||
|
||||
// Index a little more documents with new and current facets values.
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let content = &b"name,age\nkevin2,23\nkevina2,21\nbenoit2,35\n"[..];
|
||||
let mut builder = IndexDocuments::new(&mut wtxn, &index, 2);
|
||||
builder.update_format(UpdateFormat::Csv);
|
||||
builder.execute(content, |_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
// Only count the field_id 0 and level 0 facet values.
|
||||
let count = index.facet_field_id_value_docids.prefix_iter(&rtxn, &[0, 0]).unwrap().count();
|
||||
assert_eq!(count, 4);
|
||||
drop(rtxn);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn setting_searchable_recomputes_other_settings() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
// Set all the settings except searchable
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
builder.set_displayed_fields(vec!["hello".to_string()]);
|
||||
builder.set_faceted_fields(hashmap!{
|
||||
"age".into() => "integer".into(),
|
||||
"toto".into() => "integer".into(),
|
||||
});
|
||||
builder.set_criteria(vec!["asc(toto)".to_string()]);
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
// check the output
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
assert_eq!(&["hello"][..], index.displayed_fields(&rtxn).unwrap().unwrap());
|
||||
// since no documents have been pushed the primary key is still unset
|
||||
assert!(index.primary_key(&rtxn).unwrap().is_none());
|
||||
assert_eq!(vec![Criterion::Asc("toto".to_string())], index.criteria(&rtxn).unwrap());
|
||||
drop(rtxn);
|
||||
|
||||
// We set toto and age as searchable to force reordering of the fields
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 1);
|
||||
builder.set_searchable_fields(vec!["toto".to_string(), "age".to_string()]);
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
assert_eq!(&["hello"][..], index.displayed_fields(&rtxn).unwrap().unwrap());
|
||||
assert!(index.primary_key(&rtxn).unwrap().is_none());
|
||||
assert_eq!(vec![Criterion::Asc("toto".to_string())], index.criteria(&rtxn).unwrap());
|
||||
drop(rtxn);
|
||||
}
|
||||
}
|
138
milli/src/update/update_builder.rs
Normal file
138
milli/src/update/update_builder.rs
Normal file
|
@ -0,0 +1,138 @@
|
|||
use grenad::CompressionType;
|
||||
use rayon::ThreadPool;
|
||||
|
||||
use crate::Index;
|
||||
use super::{ClearDocuments, DeleteDocuments, IndexDocuments, Settings, Facets};
|
||||
|
||||
pub struct UpdateBuilder<'a> {
|
||||
pub(crate) log_every_n: Option<usize>,
|
||||
pub(crate) max_nb_chunks: Option<usize>,
|
||||
pub(crate) max_memory: Option<usize>,
|
||||
pub(crate) linked_hash_map_size: Option<usize>,
|
||||
pub(crate) chunk_compression_type: CompressionType,
|
||||
pub(crate) chunk_compression_level: Option<u32>,
|
||||
pub(crate) chunk_fusing_shrink_size: Option<u64>,
|
||||
pub(crate) thread_pool: Option<&'a ThreadPool>,
|
||||
pub(crate) update_id: u64,
|
||||
}
|
||||
|
||||
impl<'a> UpdateBuilder<'a> {
|
||||
pub fn new(update_id: u64) -> UpdateBuilder<'a> {
|
||||
UpdateBuilder {
|
||||
log_every_n: None,
|
||||
max_nb_chunks: None,
|
||||
max_memory: None,
|
||||
linked_hash_map_size: None,
|
||||
chunk_compression_type: CompressionType::None,
|
||||
chunk_compression_level: None,
|
||||
chunk_fusing_shrink_size: None,
|
||||
thread_pool: None,
|
||||
update_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn log_every_n(&mut self, log_every_n: usize) {
|
||||
self.log_every_n = Some(log_every_n);
|
||||
}
|
||||
|
||||
pub fn max_nb_chunks(&mut self, max_nb_chunks: usize) {
|
||||
self.max_nb_chunks = Some(max_nb_chunks);
|
||||
}
|
||||
|
||||
pub fn max_memory(&mut self, max_memory: usize) {
|
||||
self.max_memory = Some(max_memory);
|
||||
}
|
||||
|
||||
pub fn linked_hash_map_size(&mut self, linked_hash_map_size: usize) {
|
||||
self.linked_hash_map_size = Some(linked_hash_map_size);
|
||||
}
|
||||
|
||||
pub fn chunk_compression_type(&mut self, chunk_compression_type: CompressionType) {
|
||||
self.chunk_compression_type = chunk_compression_type;
|
||||
}
|
||||
|
||||
pub fn chunk_compression_level(&mut self, chunk_compression_level: u32) {
|
||||
self.chunk_compression_level = Some(chunk_compression_level);
|
||||
}
|
||||
|
||||
pub fn chunk_fusing_shrink_size(&mut self, chunk_fusing_shrink_size: u64) {
|
||||
self.chunk_fusing_shrink_size = Some(chunk_fusing_shrink_size);
|
||||
}
|
||||
|
||||
pub fn thread_pool(&mut self, thread_pool: &'a ThreadPool) {
|
||||
self.thread_pool = Some(thread_pool);
|
||||
}
|
||||
|
||||
pub fn clear_documents<'t, 'u, 'i>(
|
||||
self,
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
) -> ClearDocuments<'t, 'u, 'i>
|
||||
{
|
||||
ClearDocuments::new(wtxn, index, self.update_id)
|
||||
}
|
||||
|
||||
pub fn delete_documents<'t, 'u, 'i>(
|
||||
self,
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
) -> anyhow::Result<DeleteDocuments<'t, 'u, 'i>>
|
||||
{
|
||||
DeleteDocuments::new(wtxn, index, self.update_id)
|
||||
}
|
||||
|
||||
pub fn index_documents<'t, 'u, 'i>(
|
||||
self,
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
) -> IndexDocuments<'t, 'u, 'i, 'a>
|
||||
{
|
||||
let mut builder = IndexDocuments::new(wtxn, index, self.update_id);
|
||||
|
||||
builder.log_every_n = self.log_every_n;
|
||||
builder.max_nb_chunks = self.max_nb_chunks;
|
||||
builder.max_memory = self.max_memory;
|
||||
builder.linked_hash_map_size = self.linked_hash_map_size;
|
||||
builder.chunk_compression_type = self.chunk_compression_type;
|
||||
builder.chunk_compression_level = self.chunk_compression_level;
|
||||
builder.chunk_fusing_shrink_size = self.chunk_fusing_shrink_size;
|
||||
builder.thread_pool = self.thread_pool;
|
||||
|
||||
builder
|
||||
}
|
||||
|
||||
pub fn settings<'t, 'u, 'i>(
|
||||
self,
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
) -> Settings<'a, 't, 'u, 'i>
|
||||
{
|
||||
let mut builder = Settings::new(wtxn, index, self.update_id);
|
||||
|
||||
builder.log_every_n = self.log_every_n;
|
||||
builder.max_nb_chunks = self.max_nb_chunks;
|
||||
builder.max_memory = self.max_memory;
|
||||
builder.linked_hash_map_size = self.linked_hash_map_size;
|
||||
builder.chunk_compression_type = self.chunk_compression_type;
|
||||
builder.chunk_compression_level = self.chunk_compression_level;
|
||||
builder.chunk_fusing_shrink_size = self.chunk_fusing_shrink_size;
|
||||
builder.thread_pool = self.thread_pool;
|
||||
|
||||
builder
|
||||
}
|
||||
|
||||
pub fn facets<'t, 'u, 'i>(
|
||||
self,
|
||||
wtxn: &'t mut heed::RwTxn<'i, 'u>,
|
||||
index: &'i Index,
|
||||
) -> Facets<'t, 'u, 'i>
|
||||
{
|
||||
let mut builder = Facets::new(wtxn, index, self.update_id);
|
||||
|
||||
builder.chunk_compression_type = self.chunk_compression_type;
|
||||
builder.chunk_compression_level = self.chunk_compression_level;
|
||||
builder.chunk_fusing_shrink_size = self.chunk_fusing_shrink_size;
|
||||
|
||||
builder
|
||||
}
|
||||
}
|
36
milli/src/update/update_step.rs
Normal file
36
milli/src/update/update_step.rs
Normal file
|
@ -0,0 +1,36 @@
|
|||
use UpdateIndexingStep::*;
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum UpdateIndexingStep {
|
||||
/// Transform from the original user given format (CSV, JSON, JSON lines)
|
||||
/// into a generic format based on the obkv and grenad crates. This step also
|
||||
/// deduplicate potential documents in this batch update by merging or replacing them.
|
||||
TransformFromUserIntoGenericFormat { documents_seen: usize },
|
||||
|
||||
/// This step check the external document id, computes the internal ids and merge
|
||||
/// the documents that are already present in the database.
|
||||
ComputeIdsAndMergeDocuments { documents_seen: usize, total_documents: usize },
|
||||
|
||||
/// Extract the documents words using the tokenizer and compute the documents
|
||||
/// facets. Stores those words, facets and documents ids on disk.
|
||||
IndexDocuments { documents_seen: usize, total_documents: usize },
|
||||
|
||||
/// Merge the previously extracted data (words and facets) into the final LMDB database.
|
||||
/// These extracted data are split into multiple databases.
|
||||
MergeDataIntoFinalDatabase { databases_seen: usize, total_databases: usize },
|
||||
}
|
||||
|
||||
impl UpdateIndexingStep {
|
||||
pub const fn step(&self) -> usize {
|
||||
match self {
|
||||
TransformFromUserIntoGenericFormat { .. } => 0,
|
||||
ComputeIdsAndMergeDocuments { .. } => 1,
|
||||
IndexDocuments { .. } => 2,
|
||||
MergeDataIntoFinalDatabase { .. } => 3,
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn number_of_steps(&self) -> usize {
|
||||
4
|
||||
}
|
||||
}
|
360
milli/src/update_store.rs
Normal file
360
milli/src/update_store.rs
Normal file
|
@ -0,0 +1,360 @@
|
|||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crossbeam_channel::Sender;
|
||||
use heed::types::{OwnedType, DecodeIgnore, SerdeJson, ByteSlice};
|
||||
use heed::{EnvOpenOptions, Env, Database};
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use crate::BEU64;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UpdateStore<M, N> {
|
||||
env: Env,
|
||||
pending_meta: Database<OwnedType<BEU64>, SerdeJson<M>>,
|
||||
pending: Database<OwnedType<BEU64>, ByteSlice>,
|
||||
processed_meta: Database<OwnedType<BEU64>, SerdeJson<N>>,
|
||||
aborted_meta: Database<OwnedType<BEU64>, SerdeJson<M>>,
|
||||
notification_sender: Sender<()>,
|
||||
}
|
||||
|
||||
pub trait UpdateHandler<M, N> {
|
||||
fn handle_update(&mut self, update_id: u64, meta: M, content: &[u8]) -> heed::Result<N>;
|
||||
}
|
||||
|
||||
impl<M, N, F> UpdateHandler<M, N> for F
|
||||
where F: FnMut(u64, M, &[u8]) -> heed::Result<N> + Send + 'static {
|
||||
fn handle_update(&mut self, update_id: u64, meta: M, content: &[u8]) -> heed::Result<N> {
|
||||
self(update_id, meta, content)
|
||||
}
|
||||
}
|
||||
|
||||
impl<M: 'static, N: 'static> UpdateStore<M, N> {
|
||||
pub fn open<P, U>(
|
||||
mut options: EnvOpenOptions,
|
||||
path: P,
|
||||
mut update_handler: U,
|
||||
) -> heed::Result<Arc<UpdateStore<M, N>>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
U: UpdateHandler<M, N> + Send + 'static,
|
||||
M: for<'a> Deserialize<'a>,
|
||||
N: Serialize,
|
||||
{
|
||||
options.max_dbs(4);
|
||||
let env = options.open(path)?;
|
||||
let pending_meta = env.create_database(Some("pending-meta"))?;
|
||||
let pending = env.create_database(Some("pending"))?;
|
||||
let processed_meta = env.create_database(Some("processed-meta"))?;
|
||||
let aborted_meta = env.create_database(Some("aborted-meta"))?;
|
||||
|
||||
let (notification_sender, notification_receiver) = crossbeam_channel::bounded(1);
|
||||
// Send a first notification to trigger the process.
|
||||
let _ = notification_sender.send(());
|
||||
|
||||
let update_store = Arc::new(UpdateStore {
|
||||
env,
|
||||
pending,
|
||||
pending_meta,
|
||||
processed_meta,
|
||||
aborted_meta,
|
||||
notification_sender,
|
||||
});
|
||||
|
||||
let update_store_cloned = update_store.clone();
|
||||
std::thread::spawn(move || {
|
||||
// Block and wait for something to process.
|
||||
for () in notification_receiver {
|
||||
loop {
|
||||
match update_store_cloned.process_pending_update(&mut update_handler) {
|
||||
Ok(Some(_)) => (),
|
||||
Ok(None) => break,
|
||||
Err(e) => eprintln!("error while processing update: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(update_store)
|
||||
}
|
||||
|
||||
/// Returns the new biggest id to use to store the new update.
|
||||
fn new_update_id(&self, txn: &heed::RoTxn) -> heed::Result<u64> {
|
||||
let last_pending = self.pending_meta
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.last(txn)?
|
||||
.map(|(k, _)| k.get());
|
||||
|
||||
let last_processed = self.processed_meta
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.last(txn)?
|
||||
.map(|(k, _)| k.get());
|
||||
|
||||
let last_aborted = self.aborted_meta
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.last(txn)?
|
||||
.map(|(k, _)| k.get());
|
||||
|
||||
let last_update_id = [last_pending, last_processed, last_aborted]
|
||||
.iter()
|
||||
.copied()
|
||||
.flatten()
|
||||
.max();
|
||||
|
||||
match last_update_id {
|
||||
Some(last_id) => Ok(last_id + 1),
|
||||
None => Ok(0),
|
||||
}
|
||||
}
|
||||
|
||||
/// Registers the update content in the pending store and the meta
|
||||
/// into the pending-meta store. Returns the new unique update id.
|
||||
pub fn register_update(&self, meta: &M, content: &[u8]) -> heed::Result<u64>
|
||||
where M: Serialize,
|
||||
{
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
|
||||
// We ask the update store to give us a new update id, this is safe,
|
||||
// no other update can have the same id because we use a write txn before
|
||||
// asking for the id and registering it so other update registering
|
||||
// will be forced to wait for a new write txn.
|
||||
let update_id = self.new_update_id(&wtxn)?;
|
||||
let update_key = BEU64::new(update_id);
|
||||
|
||||
self.pending_meta.put(&mut wtxn, &update_key, meta)?;
|
||||
self.pending.put(&mut wtxn, &update_key, content)?;
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
if let Err(e) = self.notification_sender.try_send(()) {
|
||||
assert!(!e.is_disconnected(), "update notification channel is disconnected");
|
||||
}
|
||||
|
||||
Ok(update_id)
|
||||
}
|
||||
|
||||
/// Executes the user provided function on the next pending update (the one with the lowest id).
|
||||
/// This is asynchronous as it let the user process the update with a read-only txn and
|
||||
/// only writing the result meta to the processed-meta store *after* it has been processed.
|
||||
fn process_pending_update<U>(&self, handler: &mut U) -> heed::Result<Option<(u64, N)>>
|
||||
where
|
||||
U: UpdateHandler<M, N>,
|
||||
M: for<'a> Deserialize<'a>,
|
||||
N: Serialize,
|
||||
{
|
||||
// Create a read transaction to be able to retrieve the pending update in order.
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let first_meta = self.pending_meta.first(&rtxn)?;
|
||||
|
||||
// If there is a pending update we process and only keep
|
||||
// a reader while processing it, not a writer.
|
||||
match first_meta {
|
||||
Some((first_id, first_meta)) => {
|
||||
let first_content = self.pending
|
||||
.get(&rtxn, &first_id)?
|
||||
.expect("associated update content");
|
||||
|
||||
// Process the pending update using the provided user function.
|
||||
let new_meta = handler.handle_update(first_id.get(), first_meta, first_content)?;
|
||||
drop(rtxn);
|
||||
|
||||
// Once the pending update have been successfully processed
|
||||
// we must remove the content from the pending stores and
|
||||
// write the *new* meta to the processed-meta store and commit.
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
self.pending_meta.delete(&mut wtxn, &first_id)?;
|
||||
self.pending.delete(&mut wtxn, &first_id)?;
|
||||
self.processed_meta.put(&mut wtxn, &first_id, &new_meta)?;
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(Some((first_id.get(), new_meta)))
|
||||
},
|
||||
None => Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// The id and metadata of the update that is currently being processed,
|
||||
/// `None` if no update is being processed.
|
||||
pub fn processing_update(&self) -> heed::Result<Option<(u64, M)>>
|
||||
where M: for<'a> Deserialize<'a>,
|
||||
{
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match self.pending_meta.first(&rtxn)? {
|
||||
Some((key, meta)) => Ok(Some((key.get(), meta))),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute the user defined function with the meta-store iterators, the first
|
||||
/// iterator is the *processed* meta one, the second the *aborted* meta one
|
||||
/// and, the last is the *pending* meta one.
|
||||
pub fn iter_metas<F, T>(&self, mut f: F) -> heed::Result<T>
|
||||
where
|
||||
M: for<'a> Deserialize<'a>,
|
||||
N: for<'a> Deserialize<'a>,
|
||||
F: for<'a> FnMut(
|
||||
heed::RoIter<'a, OwnedType<BEU64>, SerdeJson<N>>,
|
||||
heed::RoIter<'a, OwnedType<BEU64>, SerdeJson<M>>,
|
||||
heed::RoIter<'a, OwnedType<BEU64>, SerdeJson<M>>,
|
||||
) -> heed::Result<T>,
|
||||
{
|
||||
let rtxn = self.env.read_txn()?;
|
||||
|
||||
// We get the pending, processed and aborted meta iterators.
|
||||
let processed_iter = self.processed_meta.iter(&rtxn)?;
|
||||
let aborted_iter = self.aborted_meta.iter(&rtxn)?;
|
||||
let pending_iter = self.pending_meta.iter(&rtxn)?;
|
||||
|
||||
// We execute the user defined function with both iterators.
|
||||
(f)(processed_iter, aborted_iter, pending_iter)
|
||||
}
|
||||
|
||||
/// Returns the update associated meta or `None` if the update doesn't exist.
|
||||
pub fn meta(&self, update_id: u64) -> heed::Result<Option<UpdateStatusMeta<M, N>>>
|
||||
where
|
||||
M: for<'a> Deserialize<'a>,
|
||||
N: for<'a> Deserialize<'a>,
|
||||
{
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let key = BEU64::new(update_id);
|
||||
|
||||
if let Some(meta) = self.pending_meta.get(&rtxn, &key)? {
|
||||
return Ok(Some(UpdateStatusMeta::Pending(meta)));
|
||||
}
|
||||
|
||||
if let Some(meta) = self.processed_meta.get(&rtxn, &key)? {
|
||||
return Ok(Some(UpdateStatusMeta::Processed(meta)));
|
||||
}
|
||||
|
||||
if let Some(meta) = self.aborted_meta.get(&rtxn, &key)? {
|
||||
return Ok(Some(UpdateStatusMeta::Aborted(meta)));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Aborts an update, an aborted update content is deleted and
|
||||
/// the meta of it is moved into the aborted updates database.
|
||||
///
|
||||
/// Trying to abort an update that is currently being processed, an update
|
||||
/// that as already been processed or which doesn't actually exist, will
|
||||
/// return `None`.
|
||||
pub fn abort_update(&self, update_id: u64) -> heed::Result<Option<M>>
|
||||
where M: Serialize + for<'a> Deserialize<'a>,
|
||||
{
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let key = BEU64::new(update_id);
|
||||
|
||||
// We cannot abort an update that is currently being processed.
|
||||
if self.pending_meta.first(&wtxn)?.map(|(key, _)| key.get()) == Some(update_id) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let meta = match self.pending_meta.get(&wtxn, &key)? {
|
||||
Some(meta) => meta,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
self.aborted_meta.put(&mut wtxn, &key, &meta)?;
|
||||
self.pending_meta.delete(&mut wtxn, &key)?;
|
||||
self.pending.delete(&mut wtxn, &key)?;
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(Some(meta))
|
||||
}
|
||||
|
||||
/// Aborts all the pending updates, and not the one being currently processed.
|
||||
/// Returns the update metas and ids that were successfully aborted.
|
||||
pub fn abort_pendings(&self) -> heed::Result<Vec<(u64, M)>>
|
||||
where M: Serialize + for<'a> Deserialize<'a>,
|
||||
{
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let mut aborted_updates = Vec::new();
|
||||
|
||||
// We skip the first pending update as it is currently being processed.
|
||||
for result in self.pending_meta.iter(&wtxn)?.skip(1) {
|
||||
let (key, meta) = result?;
|
||||
let id = key.get();
|
||||
aborted_updates.push((id, meta));
|
||||
}
|
||||
|
||||
for (id, meta) in &aborted_updates {
|
||||
let key = BEU64::new(*id);
|
||||
self.aborted_meta.put(&mut wtxn, &key, &meta)?;
|
||||
self.pending_meta.delete(&mut wtxn, &key)?;
|
||||
self.pending.delete(&mut wtxn, &key)?;
|
||||
}
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(aborted_updates)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum UpdateStatusMeta<M, N> {
|
||||
Pending(M),
|
||||
Processed(N),
|
||||
Aborted(M),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::thread;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let options = EnvOpenOptions::new();
|
||||
let update_store = UpdateStore::open(options, dir, |_id, meta: String, _content:&_| {
|
||||
Ok(meta + " processed")
|
||||
}).unwrap();
|
||||
|
||||
let meta = String::from("kiki");
|
||||
let update_id = update_store.register_update(&meta, &[]).unwrap();
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
let meta = update_store.meta(update_id).unwrap().unwrap();
|
||||
assert_eq!(meta, UpdateStatusMeta::Processed(format!("kiki processed")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn long_running_update() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let options = EnvOpenOptions::new();
|
||||
let update_store = UpdateStore::open(options, dir, |_id, meta: String, _content:&_| {
|
||||
thread::sleep(Duration::from_millis(400));
|
||||
Ok(meta + " processed")
|
||||
}).unwrap();
|
||||
|
||||
let before_register = Instant::now();
|
||||
|
||||
let meta = String::from("kiki");
|
||||
let update_id_kiki = update_store.register_update(&meta, &[]).unwrap();
|
||||
assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
let meta = String::from("coco");
|
||||
let update_id_coco = update_store.register_update(&meta, &[]).unwrap();
|
||||
assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
let meta = String::from("cucu");
|
||||
let update_id_cucu = update_store.register_update(&meta, &[]).unwrap();
|
||||
assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
thread::sleep(Duration::from_millis(400 * 3 + 100));
|
||||
|
||||
let meta = update_store.meta(update_id_kiki).unwrap().unwrap();
|
||||
assert_eq!(meta, UpdateStatusMeta::Processed(format!("kiki processed")));
|
||||
|
||||
let meta = update_store.meta(update_id_coco).unwrap().unwrap();
|
||||
assert_eq!(meta, UpdateStatusMeta::Processed(format!("coco processed")));
|
||||
|
||||
let meta = update_store.meta(update_id_cucu).unwrap().unwrap();
|
||||
assert_eq!(meta, UpdateStatusMeta::Processed(format!("cucu processed")));
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue