mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-23 13:24:27 +01:00
Merge pull request #36 from meilisearch/index-facets
Index facets values and support facet numbers
This commit is contained in:
commit
b0c5f59c07
17
Cargo.lock
generated
17
Cargo.lock
generated
@ -550,6 +550,12 @@ dependencies = [
|
|||||||
"cfg-if 0.1.10",
|
"cfg-if 0.1.10",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "maplit"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "matches"
|
name = "matches"
|
||||||
version = "0.1.8"
|
version = "0.1.8"
|
||||||
@ -608,10 +614,12 @@ dependencies = [
|
|||||||
"levenshtein_automata",
|
"levenshtein_automata",
|
||||||
"linked-hash-map",
|
"linked-hash-map",
|
||||||
"log",
|
"log",
|
||||||
|
"maplit",
|
||||||
"memmap",
|
"memmap",
|
||||||
"near-proximity",
|
"near-proximity",
|
||||||
"obkv",
|
"obkv",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
"ordered-float",
|
||||||
"rayon",
|
"rayon",
|
||||||
"ringtail",
|
"ringtail",
|
||||||
"roaring",
|
"roaring",
|
||||||
@ -708,6 +716,15 @@ version = "11.1.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a170cebd8021a008ea92e4db85a72f80b35df514ec664b296fdcbb654eac0b2c"
|
checksum = "a170cebd8021a008ea92e4db85a72f80b35df514ec664b296fdcbb654eac0b2c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ordered-float"
|
||||||
|
version = "2.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9fe9037165d7023b1228bc4ae9a2fa1a2b0095eca6c2998c624723dfd01314a5"
|
||||||
|
dependencies = [
|
||||||
|
"num-traits",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "page_size"
|
name = "page_size"
|
||||||
version = "0.4.2"
|
version = "0.4.2"
|
||||||
|
@ -23,6 +23,7 @@ memmap = "0.7.0"
|
|||||||
near-proximity = { git = "https://github.com/Kerollmops/plane-sweep-proximity", rev = "6608205" }
|
near-proximity = { git = "https://github.com/Kerollmops/plane-sweep-proximity", rev = "6608205" }
|
||||||
obkv = "0.1.0"
|
obkv = "0.1.0"
|
||||||
once_cell = "1.4.0"
|
once_cell = "1.4.0"
|
||||||
|
ordered-float = "2.0.0"
|
||||||
rayon = "1.3.1"
|
rayon = "1.3.1"
|
||||||
ringtail = "0.3.0"
|
ringtail = "0.3.0"
|
||||||
roaring = "0.6.1"
|
roaring = "0.6.1"
|
||||||
@ -44,6 +45,7 @@ stderrlog = "0.5.0"
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = "0.3.3"
|
criterion = "0.3.3"
|
||||||
|
maplit = "1.0.2"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
fst = "0.4.4"
|
fst = "0.4.4"
|
||||||
|
10
http-ui/Cargo.lock
generated
10
http-ui/Cargo.lock
generated
@ -989,6 +989,7 @@ dependencies = [
|
|||||||
"near-proximity",
|
"near-proximity",
|
||||||
"obkv",
|
"obkv",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
"ordered-float",
|
||||||
"rayon",
|
"rayon",
|
||||||
"ringtail",
|
"ringtail",
|
||||||
"roaring",
|
"roaring",
|
||||||
@ -1205,6 +1206,15 @@ version = "0.3.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
|
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ordered-float"
|
||||||
|
version = "2.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9fe9037165d7023b1228bc4ae9a2fa1a2b0095eca6c2998c624723dfd01314a5"
|
||||||
|
dependencies = [
|
||||||
|
"num-traits",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "page_size"
|
name = "page_size"
|
||||||
version = "0.4.2"
|
version = "0.4.2"
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::HashSet;
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::fs::{File, create_dir_all};
|
use std::fs::{File, create_dir_all};
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -210,6 +210,8 @@ enum UpdateMetaProgress {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(deny_unknown_fields)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
struct Settings {
|
struct Settings {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
@ -224,6 +226,9 @@ struct Settings {
|
|||||||
skip_serializing_if = "Option::is_none",
|
skip_serializing_if = "Option::is_none",
|
||||||
)]
|
)]
|
||||||
searchable_attributes: Option<Option<Vec<String>>>,
|
searchable_attributes: Option<Option<Vec<String>>>,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
|
faceted_attributes: Option<HashMap<String, String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Any value that is present is considered Some value, including null.
|
// Any value that is present is considered Some value, including null.
|
||||||
@ -367,6 +372,11 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We transpose the settings JSON struct into a real setting update.
|
||||||
|
if let Some(facet_types) = settings.faceted_attributes {
|
||||||
|
builder.set_faceted_fields(facet_types);
|
||||||
|
}
|
||||||
|
|
||||||
let result = builder.execute(|indexing_step| {
|
let result = builder.execute(|indexing_step| {
|
||||||
let (current, total) = match indexing_step {
|
let (current, total) = match indexing_step {
|
||||||
TransformFromUserIntoGenericFormat { documents_seen } => (documents_seen, None),
|
TransformFromUserIntoGenericFormat { documents_seen } => (documents_seen, None),
|
||||||
|
50
src/facet/facet_type.rs
Normal file
50
src/facet/facet_type.rs
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
use std::error::Error;
|
||||||
|
use std::fmt;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use serde::{Serialize, Deserialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub enum FacetType {
|
||||||
|
String,
|
||||||
|
Float,
|
||||||
|
Integer,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for FacetType {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
FacetType::String => f.write_str("string"),
|
||||||
|
FacetType::Float => f.write_str("float"),
|
||||||
|
FacetType::Integer => f.write_str("integer"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for FacetType {
|
||||||
|
type Err = InvalidFacetType;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
if s.eq_ignore_ascii_case("string") {
|
||||||
|
Ok(FacetType::String)
|
||||||
|
} else if s.eq_ignore_ascii_case("float") {
|
||||||
|
Ok(FacetType::Float)
|
||||||
|
} else if s.eq_ignore_ascii_case("integer") {
|
||||||
|
Ok(FacetType::Integer)
|
||||||
|
} else {
|
||||||
|
Err(InvalidFacetType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||||
|
pub struct InvalidFacetType;
|
||||||
|
|
||||||
|
impl fmt::Display for InvalidFacetType {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
f.write_str(r#"Invalid facet type, must be "string", "float" or "integer""#)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error for InvalidFacetType { }
|
4
src/facet/mod.rs
Normal file
4
src/facet/mod.rs
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
mod facet_type;
|
||||||
|
pub mod value_encoding;
|
||||||
|
|
||||||
|
pub use self::facet_type::FacetType;
|
69
src/facet/value_encoding.rs
Normal file
69
src/facet/value_encoding.rs
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
// https://stackoverflow.com/a/43305015/1941280
|
||||||
|
#[inline]
|
||||||
|
pub fn f64_into_bytes(float: f64) -> Option<[u8; 8]> {
|
||||||
|
if float.is_finite() {
|
||||||
|
if float == 0.0 || float == -0.0 {
|
||||||
|
return Some(xor_first_bit(0.0_f64.to_be_bytes()));
|
||||||
|
} else if float.is_sign_negative() {
|
||||||
|
return Some(xor_all_bits(float.to_be_bytes()));
|
||||||
|
} else if float.is_sign_positive() {
|
||||||
|
return Some(xor_first_bit(float.to_be_bytes()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn i64_into_bytes(int: i64) -> [u8; 8] {
|
||||||
|
xor_first_bit(int.to_be_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn i64_from_bytes(bytes: [u8; 8]) -> i64 {
|
||||||
|
i64::from_be_bytes(xor_first_bit(bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn xor_first_bit(mut x: [u8; 8]) -> [u8; 8] {
|
||||||
|
x[0] ^= 0x80;
|
||||||
|
x
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn xor_all_bits(mut x: [u8; 8]) -> [u8; 8] {
|
||||||
|
x.iter_mut().for_each(|b| *b ^= 0xff);
|
||||||
|
x
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::cmp::Ordering::Less;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn is_sorted<T: Ord>(x: &[T]) -> bool {
|
||||||
|
x.windows(2).map(|x| x[0].cmp(&x[1])).all(|o| o == Less)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ordered_f64_bytes() {
|
||||||
|
let a = -13_f64;
|
||||||
|
let b = -10.0;
|
||||||
|
let c = -0.0;
|
||||||
|
let d = 1.0;
|
||||||
|
let e = 43.0;
|
||||||
|
|
||||||
|
let vec: Vec<_> = [a, b, c, d, e].iter().cloned().map(f64_into_bytes).collect();
|
||||||
|
assert!(is_sorted(&vec), "{:?}", vec);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ordered_i64_bytes() {
|
||||||
|
let a = -10_i64;
|
||||||
|
let b = -0_i64;
|
||||||
|
let c = 1_i64;
|
||||||
|
let d = 43_i64;
|
||||||
|
|
||||||
|
let vec: Vec<_> = [a, b, c, d].iter().cloned().map(i64_into_bytes).collect();
|
||||||
|
assert!(is_sorted(&vec), "{:?}", vec);
|
||||||
|
}
|
||||||
|
}
|
50
src/heed_codec/facet/facet_value_f64_codec.rs
Normal file
50
src/heed_codec/facet/facet_value_f64_codec.rs
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
|
use crate::facet::value_encoding::f64_into_bytes;
|
||||||
|
|
||||||
|
pub struct FacetValueF64Codec;
|
||||||
|
|
||||||
|
impl<'a> heed::BytesDecode<'a> for FacetValueF64Codec {
|
||||||
|
type DItem = (u8, f64);
|
||||||
|
|
||||||
|
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||||
|
let (field_id, buffer) = bytes.split_first()?;
|
||||||
|
let value = buffer[8..].try_into().ok().map(f64::from_be_bytes)?;
|
||||||
|
Some((*field_id, value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl heed::BytesEncode<'_> for FacetValueF64Codec {
|
||||||
|
type EItem = (u8, f64);
|
||||||
|
|
||||||
|
fn bytes_encode((field_id, value): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||||
|
let mut buffer = [0u8; 16];
|
||||||
|
|
||||||
|
// Write the globally ordered float.
|
||||||
|
let bytes = f64_into_bytes(*value)?;
|
||||||
|
buffer[..8].copy_from_slice(&bytes[..]);
|
||||||
|
|
||||||
|
// Then the f64 value just to be able to read it back.
|
||||||
|
let bytes = value.to_be_bytes();
|
||||||
|
buffer[8..].copy_from_slice(&bytes[..]);
|
||||||
|
|
||||||
|
let mut bytes = Vec::with_capacity(buffer.len() + 1);
|
||||||
|
bytes.push(*field_id);
|
||||||
|
bytes.extend_from_slice(&buffer[..]);
|
||||||
|
Some(Cow::Owned(bytes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use heed::{BytesEncode, BytesDecode};
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn globally_ordered_f64() {
|
||||||
|
let bytes = FacetValueF64Codec::bytes_encode(&(3, -32.0)).unwrap();
|
||||||
|
let (name, value) = FacetValueF64Codec::bytes_decode(&bytes).unwrap();
|
||||||
|
assert_eq!((name, value), (3, -32.0));
|
||||||
|
}
|
||||||
|
}
|
28
src/heed_codec/facet/facet_value_i64_codec.rs
Normal file
28
src/heed_codec/facet/facet_value_i64_codec.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
|
use crate::facet::value_encoding::{i64_from_bytes, i64_into_bytes};
|
||||||
|
|
||||||
|
pub struct FacetValueI64Codec;
|
||||||
|
|
||||||
|
impl<'a> heed::BytesDecode<'a> for FacetValueI64Codec {
|
||||||
|
type DItem = (u8, i64);
|
||||||
|
|
||||||
|
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||||
|
let (field_id, buffer) = bytes.split_first()?;
|
||||||
|
let value = buffer.try_into().map(i64_from_bytes).ok()?;
|
||||||
|
Some((*field_id, value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl heed::BytesEncode<'_> for FacetValueI64Codec {
|
||||||
|
type EItem = (u8, i64);
|
||||||
|
|
||||||
|
fn bytes_encode((field_id, value): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||||
|
let value = i64_into_bytes(*value);
|
||||||
|
let mut bytes = Vec::with_capacity(value.len() + 1);
|
||||||
|
bytes.push(*field_id);
|
||||||
|
bytes.extend_from_slice(&value[..]);
|
||||||
|
Some(Cow::Owned(bytes))
|
||||||
|
}
|
||||||
|
}
|
25
src/heed_codec/facet/facet_value_string_codec.rs
Normal file
25
src/heed_codec/facet/facet_value_string_codec.rs
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
use std::str;
|
||||||
|
|
||||||
|
pub struct FacetValueStringCodec;
|
||||||
|
|
||||||
|
impl<'a> heed::BytesDecode<'a> for FacetValueStringCodec {
|
||||||
|
type DItem = (u8, &'a str);
|
||||||
|
|
||||||
|
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||||
|
let (field_id, bytes) = bytes.split_first()?;
|
||||||
|
let value = str::from_utf8(bytes).ok()?;
|
||||||
|
Some((*field_id, value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> heed::BytesEncode<'a> for FacetValueStringCodec {
|
||||||
|
type EItem = (u8, &'a str);
|
||||||
|
|
||||||
|
fn bytes_encode((field_id, value): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||||
|
let mut bytes = Vec::with_capacity(value.len() + 1);
|
||||||
|
bytes.push(*field_id);
|
||||||
|
bytes.extend_from_slice(value.as_bytes());
|
||||||
|
Some(Cow::Owned(bytes))
|
||||||
|
}
|
||||||
|
}
|
7
src/heed_codec/facet/mod.rs
Normal file
7
src/heed_codec/facet/mod.rs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
mod facet_value_f64_codec;
|
||||||
|
mod facet_value_i64_codec;
|
||||||
|
mod facet_value_string_codec;
|
||||||
|
|
||||||
|
pub use self::facet_value_f64_codec::FacetValueF64Codec;
|
||||||
|
pub use self::facet_value_i64_codec::FacetValueI64Codec;
|
||||||
|
pub use self::facet_value_string_codec::FacetValueStringCodec;
|
@ -4,6 +4,7 @@ mod cbo_roaring_bitmap_codec;
|
|||||||
mod obkv_codec;
|
mod obkv_codec;
|
||||||
mod roaring_bitmap_codec;
|
mod roaring_bitmap_codec;
|
||||||
mod str_str_u8_codec;
|
mod str_str_u8_codec;
|
||||||
|
pub mod facet;
|
||||||
|
|
||||||
pub use self::beu32_str_codec::BEU32StrCodec;
|
pub use self::beu32_str_codec::BEU32StrCodec;
|
||||||
pub use self::bo_roaring_bitmap_codec::BoRoaringBitmapCodec;
|
pub use self::bo_roaring_bitmap_codec::BoRoaringBitmapCodec;
|
||||||
|
38
src/index.rs
38
src/index.rs
@ -1,4 +1,5 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
@ -6,9 +7,10 @@ use heed::types::*;
|
|||||||
use heed::{PolyDatabase, Database, RwTxn, RoTxn};
|
use heed::{PolyDatabase, Database, RwTxn, RoTxn};
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
|
||||||
|
use crate::facet::FacetType;
|
||||||
|
use crate::fields_ids_map::FieldsIdsMap;
|
||||||
use crate::Search;
|
use crate::Search;
|
||||||
use crate::{BEU32, DocumentId};
|
use crate::{BEU32, DocumentId};
|
||||||
use crate::fields_ids_map::FieldsIdsMap;
|
|
||||||
use crate::{
|
use crate::{
|
||||||
RoaringBitmapCodec, BEU32StrCodec, StrStrU8Codec, ObkvCodec,
|
RoaringBitmapCodec, BEU32StrCodec, StrStrU8Codec, ObkvCodec,
|
||||||
BoRoaringBitmapCodec, CboRoaringBitmapCodec,
|
BoRoaringBitmapCodec, CboRoaringBitmapCodec,
|
||||||
@ -16,6 +18,7 @@ use crate::{
|
|||||||
|
|
||||||
pub const DISPLAYED_FIELDS_KEY: &str = "displayed-fields";
|
pub const DISPLAYED_FIELDS_KEY: &str = "displayed-fields";
|
||||||
pub const DOCUMENTS_IDS_KEY: &str = "documents-ids";
|
pub const DOCUMENTS_IDS_KEY: &str = "documents-ids";
|
||||||
|
pub const FACETED_FIELDS_KEY: &str = "faceted-fields";
|
||||||
pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map";
|
pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map";
|
||||||
pub const PRIMARY_KEY_KEY: &str = "primary-key";
|
pub const PRIMARY_KEY_KEY: &str = "primary-key";
|
||||||
pub const SEARCHABLE_FIELDS_KEY: &str = "searchable-fields";
|
pub const SEARCHABLE_FIELDS_KEY: &str = "searchable-fields";
|
||||||
@ -34,22 +37,33 @@ pub struct Index {
|
|||||||
pub docid_word_positions: Database<BEU32StrCodec, BoRoaringBitmapCodec>,
|
pub docid_word_positions: Database<BEU32StrCodec, BoRoaringBitmapCodec>,
|
||||||
/// Maps the proximity between a pair of words with all the docids where this relation appears.
|
/// Maps the proximity between a pair of words with all the docids where this relation appears.
|
||||||
pub word_pair_proximity_docids: Database<StrStrU8Codec, CboRoaringBitmapCodec>,
|
pub word_pair_proximity_docids: Database<StrStrU8Codec, CboRoaringBitmapCodec>,
|
||||||
|
/// Maps the facet field id and the globally ordered value with the docids that corresponds to it.
|
||||||
|
pub facet_field_id_value_docids: Database<ByteSlice, CboRoaringBitmapCodec>,
|
||||||
/// Maps the document id to the document as an obkv store.
|
/// Maps the document id to the document as an obkv store.
|
||||||
pub documents: Database<OwnedType<BEU32>, ObkvCodec>,
|
pub documents: Database<OwnedType<BEU32>, ObkvCodec>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Index {
|
impl Index {
|
||||||
pub fn new<P: AsRef<Path>>(mut options: heed::EnvOpenOptions, path: P) -> anyhow::Result<Index> {
|
pub fn new<P: AsRef<Path>>(mut options: heed::EnvOpenOptions, path: P) -> anyhow::Result<Index> {
|
||||||
options.max_dbs(5);
|
options.max_dbs(6);
|
||||||
|
|
||||||
let env = options.open(path)?;
|
let env = options.open(path)?;
|
||||||
let main = env.create_poly_database(Some("main"))?;
|
let main = env.create_poly_database(Some("main"))?;
|
||||||
let word_docids = env.create_database(Some("word-docids"))?;
|
let word_docids = env.create_database(Some("word-docids"))?;
|
||||||
let docid_word_positions = env.create_database(Some("docid-word-positions"))?;
|
let docid_word_positions = env.create_database(Some("docid-word-positions"))?;
|
||||||
let word_pair_proximity_docids = env.create_database(Some("word-pair-proximity-docids"))?;
|
let word_pair_proximity_docids = env.create_database(Some("word-pair-proximity-docids"))?;
|
||||||
|
let facet_field_id_value_docids = env.create_database(Some("facet-field-id-value-docids"))?;
|
||||||
let documents = env.create_database(Some("documents"))?;
|
let documents = env.create_database(Some("documents"))?;
|
||||||
|
|
||||||
Ok(Index { env, main, word_docids, docid_word_positions, word_pair_proximity_docids, documents })
|
Ok(Index {
|
||||||
|
env,
|
||||||
|
main,
|
||||||
|
word_docids,
|
||||||
|
docid_word_positions,
|
||||||
|
word_pair_proximity_docids,
|
||||||
|
facet_field_id_value_docids,
|
||||||
|
documents,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a write transaction to be able to write into the index.
|
/// Create a write transaction to be able to write into the index.
|
||||||
@ -175,6 +189,24 @@ impl Index {
|
|||||||
self.main.get::<_, Str, ByteSlice>(rtxn, SEARCHABLE_FIELDS_KEY)
|
self.main.get::<_, Str, ByteSlice>(rtxn, SEARCHABLE_FIELDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* faceted fields */
|
||||||
|
|
||||||
|
/// Writes the facet fields ids associated with their facet type or `None` if
|
||||||
|
/// the facet type is currently unknown.
|
||||||
|
pub fn put_faceted_fields(&self, wtxn: &mut RwTxn, fields_types: &HashMap<u8, FacetType>) -> heed::Result<()> {
|
||||||
|
self.main.put::<_, Str, SerdeJson<_>>(wtxn, FACETED_FIELDS_KEY, fields_types)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deletes the facet fields ids associated with their facet type.
|
||||||
|
pub fn delete_faceted_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
|
self.main.delete::<_, Str>(wtxn, FACETED_FIELDS_KEY)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the facet fields ids associated with their facet type.
|
||||||
|
pub fn faceted_fields(&self, wtxn: &RoTxn) -> heed::Result<HashMap<u8, FacetType>> {
|
||||||
|
Ok(self.main.get::<_, Str, SerdeJson<_>>(wtxn, FACETED_FIELDS_KEY)?.unwrap_or_default())
|
||||||
|
}
|
||||||
|
|
||||||
/* words fst */
|
/* words fst */
|
||||||
|
|
||||||
/// Writes the FST which is the words dictionnary of the engine.
|
/// Writes the FST which is the words dictionnary of the engine.
|
||||||
|
10
src/lib.rs
10
src/lib.rs
@ -4,6 +4,7 @@ mod index;
|
|||||||
mod mdfs;
|
mod mdfs;
|
||||||
mod query_tokens;
|
mod query_tokens;
|
||||||
mod search;
|
mod search;
|
||||||
|
pub mod facet;
|
||||||
pub mod heed_codec;
|
pub mod heed_codec;
|
||||||
pub mod proximity;
|
pub mod proximity;
|
||||||
pub mod subcommand;
|
pub mod subcommand;
|
||||||
@ -33,6 +34,7 @@ pub type FastMap8<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher64>>;
|
|||||||
pub type SmallString32 = smallstr::SmallString<[u8; 32]>;
|
pub type SmallString32 = smallstr::SmallString<[u8; 32]>;
|
||||||
pub type SmallVec32<T> = smallvec::SmallVec<[T; 32]>;
|
pub type SmallVec32<T> = smallvec::SmallVec<[T; 32]>;
|
||||||
pub type SmallVec16<T> = smallvec::SmallVec<[T; 16]>;
|
pub type SmallVec16<T> = smallvec::SmallVec<[T; 16]>;
|
||||||
|
pub type SmallVec8<T> = smallvec::SmallVec<[T; 8]>;
|
||||||
pub type BEU32 = heed::zerocopy::U32<heed::byteorder::BE>;
|
pub type BEU32 = heed::zerocopy::U32<heed::byteorder::BE>;
|
||||||
pub type BEU64 = heed::zerocopy::U64<heed::byteorder::BE>;
|
pub type BEU64 = heed::zerocopy::U64<heed::byteorder::BE>;
|
||||||
pub type DocumentId = u32;
|
pub type DocumentId = u32;
|
||||||
@ -60,9 +62,9 @@ pub fn obkv_to_json(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Transform a JSON value into a string that can be indexed.
|
/// Transform a JSON value into a string that can be indexed.
|
||||||
pub fn json_to_string(value: Value) -> Option<String> {
|
pub fn json_to_string(value: &Value) -> Option<String> {
|
||||||
|
|
||||||
fn inner(value: Value, output: &mut String) -> bool {
|
fn inner(value: &Value, output: &mut String) -> bool {
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
match value {
|
match value {
|
||||||
Value::Null => false,
|
Value::Null => false,
|
||||||
@ -121,7 +123,7 @@ mod tests {
|
|||||||
"not_there": null,
|
"not_there": null,
|
||||||
});
|
});
|
||||||
|
|
||||||
let string = json_to_string(value).unwrap();
|
let string = json_to_string(&value).unwrap();
|
||||||
assert_eq!(string, "name: John Doe. age: 43. ");
|
assert_eq!(string, "name: John Doe. age: 43. ");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,7 +137,7 @@ mod tests {
|
|||||||
null,
|
null,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
let string = json_to_string(value).unwrap();
|
let string = json_to_string(&value).unwrap();
|
||||||
// We don't care about having two point (.) after the other as
|
// We don't care about having two point (.) after the other as
|
||||||
// the distance of hard separators is clamped to 8 anyway.
|
// the distance of hard separators is clamped to 8 anyway.
|
||||||
assert_eq!(string, "name: John Doe. . 43. hello. I. am. fine. . ");
|
assert_eq!(string, "name: John Doe. . 43. hello. I. am. fine. . ");
|
||||||
|
@ -78,6 +78,16 @@ enum Command {
|
|||||||
words: Vec<String>,
|
words: Vec<String>,
|
||||||
},
|
},
|
||||||
|
|
||||||
|
/// Outputs a CSV with the documents ids along with the facet values where it appears.
|
||||||
|
FacetValuesDocids {
|
||||||
|
/// Display the whole documents ids in details.
|
||||||
|
#[structopt(long)]
|
||||||
|
full_display: bool,
|
||||||
|
|
||||||
|
/// The field name in the document.
|
||||||
|
field_name: String,
|
||||||
|
},
|
||||||
|
|
||||||
/// Outputs the total size of all the docid-word-positions keys and values.
|
/// Outputs the total size of all the docid-word-positions keys and values.
|
||||||
TotalDocidWordPositionsSize,
|
TotalDocidWordPositionsSize,
|
||||||
|
|
||||||
@ -147,6 +157,9 @@ pub fn run(opt: Opt) -> anyhow::Result<()> {
|
|||||||
MostCommonWords { limit } => most_common_words(&index, &rtxn, limit),
|
MostCommonWords { limit } => most_common_words(&index, &rtxn, limit),
|
||||||
BiggestValues { limit } => biggest_value_sizes(&index, &rtxn, limit),
|
BiggestValues { limit } => biggest_value_sizes(&index, &rtxn, limit),
|
||||||
WordsDocids { full_display, words } => words_docids(&index, &rtxn, !full_display, words),
|
WordsDocids { full_display, words } => words_docids(&index, &rtxn, !full_display, words),
|
||||||
|
FacetValuesDocids { full_display, field_name } => {
|
||||||
|
facet_values_docids(&index, &rtxn, !full_display, field_name)
|
||||||
|
},
|
||||||
TotalDocidWordPositionsSize => total_docid_word_positions_size(&index, &rtxn),
|
TotalDocidWordPositionsSize => total_docid_word_positions_size(&index, &rtxn),
|
||||||
AverageNumberOfWordsByDoc => average_number_of_words_by_doc(&index, &rtxn),
|
AverageNumberOfWordsByDoc => average_number_of_words_by_doc(&index, &rtxn),
|
||||||
AverageNumberOfPositionsByWord => {
|
AverageNumberOfPositionsByWord => {
|
||||||
@ -256,6 +269,64 @@ fn words_docids(index: &Index, rtxn: &heed::RoTxn, debug: bool, words: Vec<Strin
|
|||||||
Ok(wtr.flush()?)
|
Ok(wtr.flush()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn facet_values_docids(index: &Index, rtxn: &heed::RoTxn, debug: bool, field_name: String) -> anyhow::Result<()> {
|
||||||
|
use crate::facet::FacetType;
|
||||||
|
use crate::heed_codec::facet::{FacetValueStringCodec, FacetValueF64Codec, FacetValueI64Codec};
|
||||||
|
use heed::{BytesDecode, Error::Decoding};
|
||||||
|
|
||||||
|
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
||||||
|
let faceted_fields = index.faceted_fields(&rtxn)?;
|
||||||
|
|
||||||
|
let field_id = fields_ids_map.id(&field_name)
|
||||||
|
.with_context(|| format!("field {} not found", field_name))?;
|
||||||
|
let field_type = faceted_fields.get(&field_id)
|
||||||
|
.with_context(|| format!("field {} is not faceted", field_name))?;
|
||||||
|
|
||||||
|
let iter = index.facet_field_id_value_docids.prefix_iter(&rtxn, &[field_id])?;
|
||||||
|
let iter = match field_type {
|
||||||
|
FacetType::String => {
|
||||||
|
let iter = iter
|
||||||
|
.map(|result| result.and_then(|(key, value)| {
|
||||||
|
let (_, key) = FacetValueStringCodec::bytes_decode(key).ok_or(Decoding)?;
|
||||||
|
Ok((key.to_string(), value))
|
||||||
|
}));
|
||||||
|
Box::new(iter) as Box<dyn Iterator<Item=_>>
|
||||||
|
},
|
||||||
|
FacetType::Float => {
|
||||||
|
let iter = iter
|
||||||
|
.map(|result| result.and_then(|(key, value)| {
|
||||||
|
let (_, key) = FacetValueF64Codec::bytes_decode(key).ok_or(Decoding)?;
|
||||||
|
Ok((key.to_string(), value))
|
||||||
|
}));
|
||||||
|
Box::new(iter)
|
||||||
|
},
|
||||||
|
FacetType::Integer => {
|
||||||
|
let iter = iter
|
||||||
|
.map(|result| result.and_then(|(key, value)| {
|
||||||
|
let (_, key) = FacetValueI64Codec::bytes_decode(key).ok_or(Decoding)?;
|
||||||
|
Ok((key.to_string(), value))
|
||||||
|
}));
|
||||||
|
Box::new(iter)
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let stdout = io::stdout();
|
||||||
|
let mut wtr = csv::Writer::from_writer(stdout.lock());
|
||||||
|
wtr.write_record(&["facet_value", "documents_ids"])?;
|
||||||
|
|
||||||
|
for result in iter {
|
||||||
|
let (value, docids) = result?;
|
||||||
|
let docids = if debug {
|
||||||
|
format!("{:?}", docids)
|
||||||
|
} else {
|
||||||
|
format!("{:?}", docids.iter().collect::<Vec<_>>())
|
||||||
|
};
|
||||||
|
wtr.write_record(&[value, docids])?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(wtr.flush()?)
|
||||||
|
}
|
||||||
|
|
||||||
fn export_words_fst(index: &Index, rtxn: &heed::RoTxn, output: PathBuf) -> anyhow::Result<()> {
|
fn export_words_fst(index: &Index, rtxn: &heed::RoTxn, output: PathBuf) -> anyhow::Result<()> {
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Write as _;
|
use std::io::Write as _;
|
||||||
|
@ -18,33 +18,25 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
|
|||||||
word_docids,
|
word_docids,
|
||||||
docid_word_positions,
|
docid_word_positions,
|
||||||
word_pair_proximity_docids,
|
word_pair_proximity_docids,
|
||||||
|
facet_field_id_value_docids,
|
||||||
documents,
|
documents,
|
||||||
} = self.index;
|
} = self.index;
|
||||||
|
|
||||||
// We clear the word fst.
|
// We retrieve the number of documents ids that we are deleting.
|
||||||
|
let number_of_documents = self.index.number_of_documents(self.wtxn)?;
|
||||||
|
|
||||||
|
// We clean some of the main engine datastructures.
|
||||||
self.index.put_words_fst(self.wtxn, &fst::Set::default())?;
|
self.index.put_words_fst(self.wtxn, &fst::Set::default())?;
|
||||||
|
|
||||||
// We clear the users ids documents ids.
|
|
||||||
self.index.put_users_ids_documents_ids(self.wtxn, &fst::Map::default())?;
|
self.index.put_users_ids_documents_ids(self.wtxn, &fst::Map::default())?;
|
||||||
|
|
||||||
// We retrieve the documents ids.
|
|
||||||
let documents_ids = self.index.documents_ids(self.wtxn)?;
|
|
||||||
|
|
||||||
// We clear the internal documents ids.
|
|
||||||
self.index.put_documents_ids(self.wtxn, &RoaringBitmap::default())?;
|
self.index.put_documents_ids(self.wtxn, &RoaringBitmap::default())?;
|
||||||
|
|
||||||
// We clear the word docids.
|
// Clear the other databases.
|
||||||
word_docids.clear(self.wtxn)?;
|
word_docids.clear(self.wtxn)?;
|
||||||
|
|
||||||
// We clear the docid word positions.
|
|
||||||
docid_word_positions.clear(self.wtxn)?;
|
docid_word_positions.clear(self.wtxn)?;
|
||||||
|
|
||||||
// We clear the word pair proximity docids.
|
|
||||||
word_pair_proximity_docids.clear(self.wtxn)?;
|
word_pair_proximity_docids.clear(self.wtxn)?;
|
||||||
|
facet_field_id_value_docids.clear(self.wtxn)?;
|
||||||
// We clear the documents themselves.
|
|
||||||
documents.clear(self.wtxn)?;
|
documents.clear(self.wtxn)?;
|
||||||
|
|
||||||
Ok(documents_ids.len() as usize)
|
Ok(number_of_documents)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -76,6 +76,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
|||||||
word_docids,
|
word_docids,
|
||||||
docid_word_positions,
|
docid_word_positions,
|
||||||
word_pair_proximity_docids,
|
word_pair_proximity_docids,
|
||||||
|
facet_field_id_value_docids,
|
||||||
documents,
|
documents,
|
||||||
} = self.index;
|
} = self.index;
|
||||||
|
|
||||||
@ -158,7 +159,9 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// We construct an FST set that contains the words to delete from the words FST.
|
// We construct an FST set that contains the words to delete from the words FST.
|
||||||
let words_to_delete = words.iter().filter_map(|(w, d)| if *d { Some(w.as_ref()) } else { None });
|
let words_to_delete = words.iter().filter_map(|(word, must_remove)| {
|
||||||
|
if *must_remove { Some(word.as_ref()) } else { None }
|
||||||
|
});
|
||||||
let words_to_delete = fst::Set::from_iter(words_to_delete)?;
|
let words_to_delete = fst::Set::from_iter(words_to_delete)?;
|
||||||
|
|
||||||
let new_words_fst = {
|
let new_words_fst = {
|
||||||
@ -191,6 +194,20 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
drop(iter);
|
||||||
|
|
||||||
|
// We delete the documents ids that are under the facet field id values.
|
||||||
|
let mut iter = facet_field_id_value_docids.iter_mut(self.wtxn)?;
|
||||||
|
while let Some(result) = iter.next() {
|
||||||
|
let (bytes, mut docids) = result?;
|
||||||
|
docids.difference_with(&self.documents_ids);
|
||||||
|
if docids.is_empty() {
|
||||||
|
iter.del_current()?;
|
||||||
|
} else {
|
||||||
|
iter.put_current(bytes, &docids)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(self.documents_ids.len() as usize)
|
Ok(self.documents_ids.len() as usize)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,23 +29,13 @@ pub fn main_merge(key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
|||||||
ensure!(values.windows(2).all(|vs| vs[0] == vs[1]), "fields ids map doesn't match");
|
ensure!(values.windows(2).all(|vs| vs[0] == vs[1]), "fields ids map doesn't match");
|
||||||
Ok(values[0].to_vec())
|
Ok(values[0].to_vec())
|
||||||
},
|
},
|
||||||
DOCUMENTS_IDS_KEY => word_docids_merge(&[], values),
|
DOCUMENTS_IDS_KEY => roaring_bitmap_merge(values),
|
||||||
otherwise => bail!("wut {:?}", otherwise),
|
otherwise => bail!("wut {:?}", otherwise),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn word_docids_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
pub fn word_docids_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||||
let (head, tail) = values.split_first().unwrap();
|
roaring_bitmap_merge(values)
|
||||||
let mut head = RoaringBitmap::deserialize_from(&head[..])?;
|
|
||||||
|
|
||||||
for value in tail {
|
|
||||||
let bitmap = RoaringBitmap::deserialize_from(&value[..])?;
|
|
||||||
head.union_with(&bitmap);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut vec = Vec::with_capacity(head.serialized_size());
|
|
||||||
head.serialize_into(&mut vec)?;
|
|
||||||
Ok(vec)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn docid_word_positions_merge(key: &[u8], _values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
pub fn docid_word_positions_merge(key: &[u8], _values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||||
@ -53,17 +43,11 @@ pub fn docid_word_positions_merge(key: &[u8], _values: &[Cow<[u8]>]) -> anyhow::
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn words_pairs_proximities_docids_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
pub fn words_pairs_proximities_docids_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||||
let (head, tail) = values.split_first().unwrap();
|
cbo_roaring_bitmap_merge(values)
|
||||||
let mut head = CboRoaringBitmapCodec::deserialize_from(&head[..])?;
|
}
|
||||||
|
|
||||||
for value in tail {
|
pub fn facet_field_value_docids_merge(_key: &[u8], values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||||
let bitmap = CboRoaringBitmapCodec::deserialize_from(&value[..])?;
|
cbo_roaring_bitmap_merge(values)
|
||||||
head.union_with(&bitmap);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut vec = Vec::new();
|
|
||||||
CboRoaringBitmapCodec::serialize_into(&head, &mut vec)?;
|
|
||||||
Ok(vec)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn documents_merge(key: &[u8], _values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
pub fn documents_merge(key: &[u8], _values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||||
@ -85,3 +69,31 @@ pub fn merge_two_obkvs(base: obkv::KvReader, update: obkv::KvReader, buffer: &mu
|
|||||||
|
|
||||||
writer.finish().unwrap();
|
writer.finish().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn roaring_bitmap_merge(values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||||
|
let (head, tail) = values.split_first().unwrap();
|
||||||
|
let mut head = RoaringBitmap::deserialize_from(&head[..])?;
|
||||||
|
|
||||||
|
for value in tail {
|
||||||
|
let bitmap = RoaringBitmap::deserialize_from(&value[..])?;
|
||||||
|
head.union_with(&bitmap);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut vec = Vec::with_capacity(head.serialized_size());
|
||||||
|
head.serialize_into(&mut vec)?;
|
||||||
|
Ok(vec)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cbo_roaring_bitmap_merge(values: &[Cow<[u8]>]) -> anyhow::Result<Vec<u8>> {
|
||||||
|
let (head, tail) = values.split_first().unwrap();
|
||||||
|
let mut head = CboRoaringBitmapCodec::deserialize_from(&head[..])?;
|
||||||
|
|
||||||
|
for value in tail {
|
||||||
|
let bitmap = CboRoaringBitmapCodec::deserialize_from(&value[..])?;
|
||||||
|
head.union_with(&bitmap);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut vec = Vec::new();
|
||||||
|
CboRoaringBitmapCodec::serialize_into(&head, &mut vec)?;
|
||||||
|
Ok(vec)
|
||||||
|
}
|
||||||
|
@ -16,10 +16,10 @@ use rayon::ThreadPool;
|
|||||||
|
|
||||||
use crate::index::Index;
|
use crate::index::Index;
|
||||||
use crate::update::UpdateIndexingStep;
|
use crate::update::UpdateIndexingStep;
|
||||||
use self::store::Store;
|
use self::store::{Store, Readers};
|
||||||
use self::merge_function::{
|
use self::merge_function::{
|
||||||
main_merge, word_docids_merge, words_pairs_proximities_docids_merge,
|
main_merge, word_docids_merge, words_pairs_proximities_docids_merge,
|
||||||
docid_word_positions_merge, documents_merge,
|
docid_word_positions_merge, documents_merge, facet_field_value_docids_merge,
|
||||||
};
|
};
|
||||||
pub use self::transform::{Transform, TransformOutput};
|
pub use self::transform::{Transform, TransformOutput};
|
||||||
|
|
||||||
@ -327,8 +327,10 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
enum DatabaseType {
|
enum DatabaseType {
|
||||||
Main,
|
Main,
|
||||||
WordDocids,
|
WordDocids,
|
||||||
|
FacetValuesDocids,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let faceted_fields = self.index.faceted_fields(self.wtxn)?;
|
||||||
let searchable_fields: HashSet<_> = match self.index.searchable_fields(self.wtxn)? {
|
let searchable_fields: HashSet<_> = match self.index.searchable_fields(self.wtxn)? {
|
||||||
Some(fields) => fields.iter().copied().collect(),
|
Some(fields) => fields.iter().copied().collect(),
|
||||||
None => fields_ids_map.iter().map(|(id, _name)| id).collect(),
|
None => fields_ids_map.iter().map(|(id, _name)| id).collect(),
|
||||||
@ -362,6 +364,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
.map(|(i, documents)| {
|
.map(|(i, documents)| {
|
||||||
let store = Store::new(
|
let store = Store::new(
|
||||||
searchable_fields.clone(),
|
searchable_fields.clone(),
|
||||||
|
faceted_fields.clone(),
|
||||||
linked_hash_map_size,
|
linked_hash_map_size,
|
||||||
max_nb_chunks,
|
max_nb_chunks,
|
||||||
max_memory_by_job,
|
max_memory_by_job,
|
||||||
@ -384,13 +387,23 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
let mut word_docids_readers = Vec::with_capacity(readers.len());
|
let mut word_docids_readers = Vec::with_capacity(readers.len());
|
||||||
let mut docid_word_positions_readers = Vec::with_capacity(readers.len());
|
let mut docid_word_positions_readers = Vec::with_capacity(readers.len());
|
||||||
let mut words_pairs_proximities_docids_readers = Vec::with_capacity(readers.len());
|
let mut words_pairs_proximities_docids_readers = Vec::with_capacity(readers.len());
|
||||||
|
let mut facet_field_value_docids_readers = Vec::with_capacity(readers.len());
|
||||||
let mut documents_readers = Vec::with_capacity(readers.len());
|
let mut documents_readers = Vec::with_capacity(readers.len());
|
||||||
readers.into_iter().for_each(|readers| {
|
readers.into_iter().for_each(|readers| {
|
||||||
main_readers.push(readers.main);
|
let Readers {
|
||||||
word_docids_readers.push(readers.word_docids);
|
main,
|
||||||
docid_word_positions_readers.push(readers.docid_word_positions);
|
word_docids,
|
||||||
words_pairs_proximities_docids_readers.push(readers.words_pairs_proximities_docids);
|
docid_word_positions,
|
||||||
documents_readers.push(readers.documents);
|
words_pairs_proximities_docids,
|
||||||
|
facet_field_value_docids,
|
||||||
|
documents
|
||||||
|
} = readers;
|
||||||
|
main_readers.push(main);
|
||||||
|
word_docids_readers.push(word_docids);
|
||||||
|
docid_word_positions_readers.push(docid_word_positions);
|
||||||
|
words_pairs_proximities_docids_readers.push(words_pairs_proximities_docids);
|
||||||
|
facet_field_value_docids_readers.push(facet_field_value_docids);
|
||||||
|
documents_readers.push(documents);
|
||||||
});
|
});
|
||||||
|
|
||||||
// This is the function that merge the readers
|
// This is the function that merge the readers
|
||||||
@ -413,6 +426,11 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
vec![
|
vec![
|
||||||
(DatabaseType::Main, main_readers, main_merge as MergeFn),
|
(DatabaseType::Main, main_readers, main_merge as MergeFn),
|
||||||
(DatabaseType::WordDocids, word_docids_readers, word_docids_merge),
|
(DatabaseType::WordDocids, word_docids_readers, word_docids_merge),
|
||||||
|
(
|
||||||
|
DatabaseType::FacetValuesDocids,
|
||||||
|
facet_field_value_docids_readers,
|
||||||
|
facet_field_value_docids_merge,
|
||||||
|
),
|
||||||
]
|
]
|
||||||
.into_par_iter()
|
.into_par_iter()
|
||||||
.for_each(|(dbtype, readers, merge)| {
|
.for_each(|(dbtype, readers, merge)| {
|
||||||
@ -463,9 +481,11 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
self.index.put_documents_ids(self.wtxn, &documents_ids)?;
|
self.index.put_documents_ids(self.wtxn, &documents_ids)?;
|
||||||
|
|
||||||
let mut database_count = 0;
|
let mut database_count = 0;
|
||||||
|
let total_databases = 6;
|
||||||
|
|
||||||
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
||||||
databases_seen: 0,
|
databases_seen: 0,
|
||||||
total_databases: 5,
|
total_databases,
|
||||||
});
|
});
|
||||||
|
|
||||||
debug!("Writing the docid word positions into LMDB on disk...");
|
debug!("Writing the docid word positions into LMDB on disk...");
|
||||||
@ -480,7 +500,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
database_count += 1;
|
database_count += 1;
|
||||||
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
||||||
databases_seen: database_count,
|
databases_seen: database_count,
|
||||||
total_databases: 5,
|
total_databases,
|
||||||
});
|
});
|
||||||
|
|
||||||
debug!("Writing the documents into LMDB on disk...");
|
debug!("Writing the documents into LMDB on disk...");
|
||||||
@ -495,7 +515,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
database_count += 1;
|
database_count += 1;
|
||||||
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
||||||
databases_seen: database_count,
|
databases_seen: database_count,
|
||||||
total_databases: 5,
|
total_databases,
|
||||||
});
|
});
|
||||||
|
|
||||||
debug!("Writing the words pairs proximities docids into LMDB on disk...");
|
debug!("Writing the words pairs proximities docids into LMDB on disk...");
|
||||||
@ -510,7 +530,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
database_count += 1;
|
database_count += 1;
|
||||||
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
||||||
databases_seen: database_count,
|
databases_seen: database_count,
|
||||||
total_databases: 5,
|
total_databases,
|
||||||
});
|
});
|
||||||
|
|
||||||
for (db_type, result) in receiver {
|
for (db_type, result) in receiver {
|
||||||
@ -537,16 +557,27 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
write_method,
|
write_method,
|
||||||
)?;
|
)?;
|
||||||
},
|
},
|
||||||
|
DatabaseType::FacetValuesDocids => {
|
||||||
|
debug!("Writing the facet values docids into LMDB on disk...");
|
||||||
|
let db = *self.index.facet_field_id_value_docids.as_polymorph();
|
||||||
|
write_into_lmdb_database(
|
||||||
|
self.wtxn,
|
||||||
|
db,
|
||||||
|
content,
|
||||||
|
facet_field_value_docids_merge,
|
||||||
|
write_method,
|
||||||
|
)?;
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
database_count += 1;
|
database_count += 1;
|
||||||
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
progress_callback(UpdateIndexingStep::MergeDataIntoFinalDatabase {
|
||||||
databases_seen: database_count,
|
databases_seen: database_count,
|
||||||
total_databases: 5,
|
total_databases,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
debug_assert_eq!(database_count, 5);
|
debug_assert_eq!(database_count, total_databases);
|
||||||
|
|
||||||
info!("Transform output indexed in {:.02?}", before_indexing.elapsed());
|
info!("Transform output indexed in {:.02?}", before_indexing.elapsed());
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||||
use std::convert::{TryFrom, TryInto};
|
use std::convert::{TryFrom, TryInto};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
@ -5,22 +6,29 @@ use std::iter::FromIterator;
|
|||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
use std::{cmp, iter};
|
use std::{cmp, iter};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::{bail, Context};
|
||||||
use bstr::ByteSlice as _;
|
use bstr::ByteSlice as _;
|
||||||
|
use grenad::{Reader, FileFuse, Writer, Sorter, CompressionType};
|
||||||
use heed::BytesEncode;
|
use heed::BytesEncode;
|
||||||
use linked_hash_map::LinkedHashMap;
|
use linked_hash_map::LinkedHashMap;
|
||||||
use log::{debug, info};
|
use log::{debug, info};
|
||||||
use grenad::{Reader, FileFuse, Writer, Sorter, CompressionType};
|
use ordered_float::OrderedFloat;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
use serde_json::Value;
|
||||||
use tempfile::tempfile;
|
use tempfile::tempfile;
|
||||||
|
|
||||||
|
use crate::facet::FacetType;
|
||||||
use crate::heed_codec::{BoRoaringBitmapCodec, CboRoaringBitmapCodec};
|
use crate::heed_codec::{BoRoaringBitmapCodec, CboRoaringBitmapCodec};
|
||||||
|
use crate::heed_codec::facet::{FacetValueStringCodec, FacetValueF64Codec, FacetValueI64Codec};
|
||||||
use crate::tokenizer::{simple_tokenizer, only_token};
|
use crate::tokenizer::{simple_tokenizer, only_token};
|
||||||
use crate::update::UpdateIndexingStep;
|
use crate::update::UpdateIndexingStep;
|
||||||
use crate::{json_to_string, SmallVec32, Position, DocumentId};
|
use crate::{json_to_string, SmallVec8, SmallVec32, SmallString32, Position, DocumentId};
|
||||||
|
|
||||||
use super::{MergeFn, create_writer, create_sorter, writer_into_reader};
|
use super::{MergeFn, create_writer, create_sorter, writer_into_reader};
|
||||||
use super::merge_function::{main_merge, word_docids_merge, words_pairs_proximities_docids_merge};
|
use super::merge_function::{
|
||||||
|
main_merge, word_docids_merge, words_pairs_proximities_docids_merge,
|
||||||
|
facet_field_value_docids_merge,
|
||||||
|
};
|
||||||
|
|
||||||
const LMDB_MAX_KEY_LENGTH: usize = 511;
|
const LMDB_MAX_KEY_LENGTH: usize = 511;
|
||||||
const ONE_KILOBYTE: usize = 1024 * 1024;
|
const ONE_KILOBYTE: usize = 1024 * 1024;
|
||||||
@ -33,17 +41,21 @@ pub struct Readers {
|
|||||||
pub word_docids: Reader<FileFuse>,
|
pub word_docids: Reader<FileFuse>,
|
||||||
pub docid_word_positions: Reader<FileFuse>,
|
pub docid_word_positions: Reader<FileFuse>,
|
||||||
pub words_pairs_proximities_docids: Reader<FileFuse>,
|
pub words_pairs_proximities_docids: Reader<FileFuse>,
|
||||||
|
pub facet_field_value_docids: Reader<FileFuse>,
|
||||||
pub documents: Reader<FileFuse>,
|
pub documents: Reader<FileFuse>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Store {
|
pub struct Store {
|
||||||
// Indexing parameters
|
// Indexing parameters
|
||||||
searchable_fields: HashSet<u8>,
|
searchable_fields: HashSet<u8>,
|
||||||
|
faceted_fields: HashMap<u8, FacetType>,
|
||||||
// Caches
|
// Caches
|
||||||
word_docids: LinkedHashMap<SmallVec32<u8>, RoaringBitmap>,
|
word_docids: LinkedHashMap<SmallVec32<u8>, RoaringBitmap>,
|
||||||
word_docids_limit: usize,
|
word_docids_limit: usize,
|
||||||
words_pairs_proximities_docids: LinkedHashMap<(SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap>,
|
words_pairs_proximities_docids: LinkedHashMap<(SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap>,
|
||||||
words_pairs_proximities_docids_limit: usize,
|
words_pairs_proximities_docids_limit: usize,
|
||||||
|
facet_field_value_docids: LinkedHashMap<(u8, FacetValue), RoaringBitmap>,
|
||||||
|
facet_field_value_docids_limit: usize,
|
||||||
// MTBL parameters
|
// MTBL parameters
|
||||||
chunk_compression_type: CompressionType,
|
chunk_compression_type: CompressionType,
|
||||||
chunk_compression_level: Option<u32>,
|
chunk_compression_level: Option<u32>,
|
||||||
@ -52,6 +64,7 @@ pub struct Store {
|
|||||||
main_sorter: Sorter<MergeFn>,
|
main_sorter: Sorter<MergeFn>,
|
||||||
word_docids_sorter: Sorter<MergeFn>,
|
word_docids_sorter: Sorter<MergeFn>,
|
||||||
words_pairs_proximities_docids_sorter: Sorter<MergeFn>,
|
words_pairs_proximities_docids_sorter: Sorter<MergeFn>,
|
||||||
|
facet_field_value_docids_sorter: Sorter<MergeFn>,
|
||||||
// MTBL writers
|
// MTBL writers
|
||||||
docid_word_positions_writer: Writer<File>,
|
docid_word_positions_writer: Writer<File>,
|
||||||
documents_writer: Writer<File>,
|
documents_writer: Writer<File>,
|
||||||
@ -60,6 +73,7 @@ pub struct Store {
|
|||||||
impl Store {
|
impl Store {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
searchable_fields: HashSet<u8>,
|
searchable_fields: HashSet<u8>,
|
||||||
|
faceted_fields: HashMap<u8, FacetType>,
|
||||||
linked_hash_map_size: Option<usize>,
|
linked_hash_map_size: Option<usize>,
|
||||||
max_nb_chunks: Option<usize>,
|
max_nb_chunks: Option<usize>,
|
||||||
max_memory: Option<usize>,
|
max_memory: Option<usize>,
|
||||||
@ -69,7 +83,7 @@ impl Store {
|
|||||||
) -> anyhow::Result<Store>
|
) -> anyhow::Result<Store>
|
||||||
{
|
{
|
||||||
// We divide the max memory by the number of sorter the Store have.
|
// We divide the max memory by the number of sorter the Store have.
|
||||||
let max_memory = max_memory.map(|mm| cmp::max(ONE_KILOBYTE, mm / 3));
|
let max_memory = max_memory.map(|mm| cmp::max(ONE_KILOBYTE, mm / 4));
|
||||||
let linked_hash_map_size = linked_hash_map_size.unwrap_or(500);
|
let linked_hash_map_size = linked_hash_map_size.unwrap_or(500);
|
||||||
|
|
||||||
let main_sorter = create_sorter(
|
let main_sorter = create_sorter(
|
||||||
@ -96,6 +110,14 @@ impl Store {
|
|||||||
max_nb_chunks,
|
max_nb_chunks,
|
||||||
max_memory,
|
max_memory,
|
||||||
);
|
);
|
||||||
|
let facet_field_value_docids_sorter = create_sorter(
|
||||||
|
facet_field_value_docids_merge,
|
||||||
|
chunk_compression_type,
|
||||||
|
chunk_compression_level,
|
||||||
|
chunk_fusing_shrink_size,
|
||||||
|
max_nb_chunks,
|
||||||
|
max_memory,
|
||||||
|
);
|
||||||
|
|
||||||
let documents_writer = tempfile().and_then(|f| {
|
let documents_writer = tempfile().and_then(|f| {
|
||||||
create_writer(chunk_compression_type, chunk_compression_level, f)
|
create_writer(chunk_compression_type, chunk_compression_level, f)
|
||||||
@ -107,11 +129,14 @@ impl Store {
|
|||||||
Ok(Store {
|
Ok(Store {
|
||||||
// Indexing parameters.
|
// Indexing parameters.
|
||||||
searchable_fields,
|
searchable_fields,
|
||||||
|
faceted_fields,
|
||||||
// Caches
|
// Caches
|
||||||
word_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
|
word_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
|
||||||
word_docids_limit: linked_hash_map_size,
|
word_docids_limit: linked_hash_map_size,
|
||||||
words_pairs_proximities_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
|
words_pairs_proximities_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
|
||||||
words_pairs_proximities_docids_limit: linked_hash_map_size,
|
words_pairs_proximities_docids_limit: linked_hash_map_size,
|
||||||
|
facet_field_value_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
|
||||||
|
facet_field_value_docids_limit: linked_hash_map_size,
|
||||||
// MTBL parameters
|
// MTBL parameters
|
||||||
chunk_compression_type,
|
chunk_compression_type,
|
||||||
chunk_compression_level,
|
chunk_compression_level,
|
||||||
@ -120,6 +145,7 @@ impl Store {
|
|||||||
main_sorter,
|
main_sorter,
|
||||||
word_docids_sorter,
|
word_docids_sorter,
|
||||||
words_pairs_proximities_docids_sorter,
|
words_pairs_proximities_docids_sorter,
|
||||||
|
facet_field_value_docids_sorter,
|
||||||
// MTBL writers
|
// MTBL writers
|
||||||
docid_word_positions_writer,
|
docid_word_positions_writer,
|
||||||
documents_writer,
|
documents_writer,
|
||||||
@ -147,6 +173,35 @@ impl Store {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Save the documents ids under the facet field id and value we have seen it.
|
||||||
|
fn insert_facet_values_docid(
|
||||||
|
&mut self,
|
||||||
|
field_id: u8,
|
||||||
|
field_value: FacetValue,
|
||||||
|
id: DocumentId,
|
||||||
|
) -> anyhow::Result<()>
|
||||||
|
{
|
||||||
|
let key = (field_id, field_value);
|
||||||
|
// if get_refresh finds the element it is assured to be at the end of the linked hash map.
|
||||||
|
match self.facet_field_value_docids.get_refresh(&key) {
|
||||||
|
Some(old) => { old.insert(id); },
|
||||||
|
None => {
|
||||||
|
// A newly inserted element is append at the end of the linked hash map.
|
||||||
|
self.facet_field_value_docids.insert(key, RoaringBitmap::from_iter(Some(id)));
|
||||||
|
// If the word docids just reached it's capacity we must make sure to remove
|
||||||
|
// one element, this way next time we insert we doesn't grow the capacity.
|
||||||
|
if self.facet_field_value_docids.len() == self.facet_field_value_docids_limit {
|
||||||
|
// Removing the front element is equivalent to removing the LRU element.
|
||||||
|
Self::write_docid_facet_field_values(
|
||||||
|
&mut self.facet_field_value_docids_sorter,
|
||||||
|
self.facet_field_value_docids.pop_front(),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
// Save the documents ids under the words pairs proximities that it contains.
|
// Save the documents ids under the words pairs proximities that it contains.
|
||||||
fn insert_words_pairs_proximities_docids<'a>(
|
fn insert_words_pairs_proximities_docids<'a>(
|
||||||
&mut self,
|
&mut self,
|
||||||
@ -187,7 +242,8 @@ impl Store {
|
|||||||
fn write_document(
|
fn write_document(
|
||||||
&mut self,
|
&mut self,
|
||||||
document_id: DocumentId,
|
document_id: DocumentId,
|
||||||
words_positions: &HashMap<String, SmallVec32<Position>>,
|
words_positions: &mut HashMap<String, SmallVec32<Position>>,
|
||||||
|
facet_values: &mut HashMap<u8, SmallVec8<FacetValue>>,
|
||||||
record: &[u8],
|
record: &[u8],
|
||||||
) -> anyhow::Result<()>
|
) -> anyhow::Result<()>
|
||||||
{
|
{
|
||||||
@ -196,13 +252,20 @@ impl Store {
|
|||||||
self.insert_words_pairs_proximities_docids(words_pair_proximities, document_id)?;
|
self.insert_words_pairs_proximities_docids(words_pair_proximities, document_id)?;
|
||||||
|
|
||||||
// We store document_id associated with all the words the record contains.
|
// We store document_id associated with all the words the record contains.
|
||||||
for (word, _) in words_positions {
|
for (word, _) in words_positions.drain() {
|
||||||
self.insert_word_docid(word, document_id)?;
|
self.insert_word_docid(&word, document_id)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.documents_writer.insert(document_id.to_be_bytes(), record)?;
|
self.documents_writer.insert(document_id.to_be_bytes(), record)?;
|
||||||
Self::write_docid_word_positions(&mut self.docid_word_positions_writer, document_id, words_positions)?;
|
Self::write_docid_word_positions(&mut self.docid_word_positions_writer, document_id, words_positions)?;
|
||||||
|
|
||||||
|
// We store document_id associated with all the field id and values.
|
||||||
|
for (field, values) in facet_values.drain() {
|
||||||
|
for value in values {
|
||||||
|
self.insert_facet_values_docid(field, value, document_id)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -263,6 +326,31 @@ impl Store {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn write_docid_facet_field_values<I>(
|
||||||
|
sorter: &mut Sorter<MergeFn>,
|
||||||
|
iter: I,
|
||||||
|
) -> anyhow::Result<()>
|
||||||
|
where I: IntoIterator<Item=((u8, FacetValue), RoaringBitmap)>
|
||||||
|
{
|
||||||
|
use FacetValue::*;
|
||||||
|
|
||||||
|
for ((field_id, value), docids) in iter {
|
||||||
|
let result = match value {
|
||||||
|
String(s) => FacetValueStringCodec::bytes_encode(&(field_id, &s)).map(Cow::into_owned),
|
||||||
|
Float(f) => FacetValueF64Codec::bytes_encode(&(field_id, *f)).map(Cow::into_owned),
|
||||||
|
Integer(i) => FacetValueI64Codec::bytes_encode(&(field_id, i)).map(Cow::into_owned),
|
||||||
|
};
|
||||||
|
let key = result.context("could not serialize facet key")?;
|
||||||
|
let bytes = CboRoaringBitmapCodec::bytes_encode(&docids)
|
||||||
|
.context("could not serialize docids")?;
|
||||||
|
if lmdb_key_valid_size(&key) {
|
||||||
|
sorter.insert(&key, &bytes)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn write_word_docids<I>(sorter: &mut Sorter<MergeFn>, iter: I) -> anyhow::Result<()>
|
fn write_word_docids<I>(sorter: &mut Sorter<MergeFn>, iter: I) -> anyhow::Result<()>
|
||||||
where I: IntoIterator<Item=(SmallVec32<u8>, RoaringBitmap)>
|
where I: IntoIterator<Item=(SmallVec32<u8>, RoaringBitmap)>
|
||||||
{
|
{
|
||||||
@ -301,6 +389,7 @@ impl Store {
|
|||||||
|
|
||||||
let mut before = Instant::now();
|
let mut before = Instant::now();
|
||||||
let mut words_positions = HashMap::new();
|
let mut words_positions = HashMap::new();
|
||||||
|
let mut facet_values = HashMap::new();
|
||||||
|
|
||||||
let mut count: usize = 0;
|
let mut count: usize = 0;
|
||||||
while let Some((key, value)) = documents.next()? {
|
while let Some((key, value)) = documents.next()? {
|
||||||
@ -320,27 +409,34 @@ impl Store {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (attr, content) in document.iter() {
|
for (attr, content) in document.iter() {
|
||||||
if !self.searchable_fields.contains(&attr) {
|
if self.faceted_fields.contains_key(&attr) || self.searchable_fields.contains(&attr) {
|
||||||
continue;
|
let value = serde_json::from_slice(content)?;
|
||||||
}
|
|
||||||
|
|
||||||
let value = serde_json::from_slice(content)?;
|
if let Some(ftype) = self.faceted_fields.get(&attr) {
|
||||||
let content = match json_to_string(value) {
|
let mut values = parse_facet_value(*ftype, &value).with_context(|| {
|
||||||
Some(content) => content,
|
format!("extracting facets from the value {}", value)
|
||||||
None => continue,
|
})?;
|
||||||
};
|
facet_values.entry(attr).or_insert_with(SmallVec8::new).extend(values.drain(..));
|
||||||
|
}
|
||||||
|
|
||||||
let tokens = simple_tokenizer(&content).filter_map(only_token);
|
if self.searchable_fields.contains(&attr) {
|
||||||
for (pos, token) in tokens.enumerate().take(MAX_POSITION) {
|
let content = match json_to_string(&value) {
|
||||||
let word = token.to_lowercase();
|
Some(content) => content,
|
||||||
let position = (attr as usize * MAX_POSITION + pos) as u32;
|
None => continue,
|
||||||
words_positions.entry(word).or_insert_with(SmallVec32::new).push(position);
|
};
|
||||||
|
|
||||||
|
let tokens = simple_tokenizer(&content).filter_map(only_token);
|
||||||
|
for (pos, token) in tokens.enumerate().take(MAX_POSITION) {
|
||||||
|
let word = token.to_lowercase();
|
||||||
|
let position = (attr as usize * MAX_POSITION + pos) as u32;
|
||||||
|
words_positions.entry(word).or_insert_with(SmallVec32::new).push(position);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// We write the document in the documents store.
|
// We write the document in the documents store.
|
||||||
self.write_document(document_id, &words_positions, value)?;
|
self.write_document(document_id, &mut words_positions, &mut facet_values, value)?;
|
||||||
words_positions.clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compute the document id of the next document.
|
// Compute the document id of the next document.
|
||||||
@ -367,6 +463,10 @@ impl Store {
|
|||||||
&mut self.words_pairs_proximities_docids_sorter,
|
&mut self.words_pairs_proximities_docids_sorter,
|
||||||
self.words_pairs_proximities_docids,
|
self.words_pairs_proximities_docids,
|
||||||
)?;
|
)?;
|
||||||
|
Self::write_docid_facet_field_values(
|
||||||
|
&mut self.facet_field_value_docids_sorter,
|
||||||
|
self.facet_field_value_docids,
|
||||||
|
)?;
|
||||||
|
|
||||||
let mut word_docids_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
let mut word_docids_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
||||||
let mut builder = fst::SetBuilder::memory();
|
let mut builder = fst::SetBuilder::memory();
|
||||||
@ -388,9 +488,13 @@ impl Store {
|
|||||||
let mut words_pairs_proximities_docids_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
let mut words_pairs_proximities_docids_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
||||||
self.words_pairs_proximities_docids_sorter.write_into(&mut words_pairs_proximities_docids_wtr)?;
|
self.words_pairs_proximities_docids_sorter.write_into(&mut words_pairs_proximities_docids_wtr)?;
|
||||||
|
|
||||||
|
let mut facet_field_value_docids_wtr = tempfile().and_then(|f| create_writer(comp_type, comp_level, f))?;
|
||||||
|
self.facet_field_value_docids_sorter.write_into(&mut facet_field_value_docids_wtr)?;
|
||||||
|
|
||||||
let main = writer_into_reader(main_wtr, shrink_size)?;
|
let main = writer_into_reader(main_wtr, shrink_size)?;
|
||||||
let word_docids = writer_into_reader(word_docids_wtr, shrink_size)?;
|
let word_docids = writer_into_reader(word_docids_wtr, shrink_size)?;
|
||||||
let words_pairs_proximities_docids = writer_into_reader(words_pairs_proximities_docids_wtr, shrink_size)?;
|
let words_pairs_proximities_docids = writer_into_reader(words_pairs_proximities_docids_wtr, shrink_size)?;
|
||||||
|
let facet_field_value_docids = writer_into_reader(facet_field_value_docids_wtr, shrink_size)?;
|
||||||
let docid_word_positions = writer_into_reader(self.docid_word_positions_writer, shrink_size)?;
|
let docid_word_positions = writer_into_reader(self.docid_word_positions_writer, shrink_size)?;
|
||||||
let documents = writer_into_reader(self.documents_writer, shrink_size)?;
|
let documents = writer_into_reader(self.documents_writer, shrink_size)?;
|
||||||
|
|
||||||
@ -399,6 +503,7 @@ impl Store {
|
|||||||
word_docids,
|
word_docids,
|
||||||
docid_word_positions,
|
docid_word_positions,
|
||||||
words_pairs_proximities_docids,
|
words_pairs_proximities_docids,
|
||||||
|
facet_field_value_docids,
|
||||||
documents,
|
documents,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -444,3 +549,70 @@ fn format_count(n: usize) -> String {
|
|||||||
fn lmdb_key_valid_size(key: &[u8]) -> bool {
|
fn lmdb_key_valid_size(key: &[u8]) -> bool {
|
||||||
!key.is_empty() && key.len() <= LMDB_MAX_KEY_LENGTH
|
!key.is_empty() && key.len() <= LMDB_MAX_KEY_LENGTH
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
enum FacetValue {
|
||||||
|
String(SmallString32),
|
||||||
|
Float(OrderedFloat<f64>),
|
||||||
|
Integer(i64),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_facet_value(ftype: FacetType, value: &Value) -> anyhow::Result<SmallVec8<FacetValue>> {
|
||||||
|
use FacetValue::*;
|
||||||
|
|
||||||
|
fn inner_parse_facet_value(
|
||||||
|
ftype: FacetType,
|
||||||
|
value: &Value,
|
||||||
|
can_recurse: bool,
|
||||||
|
output: &mut SmallVec8<FacetValue>,
|
||||||
|
) -> anyhow::Result<()>
|
||||||
|
{
|
||||||
|
match value {
|
||||||
|
Value::Null => Ok(()),
|
||||||
|
Value::Bool(b) => Ok(output.push(Integer(*b as i64))),
|
||||||
|
Value::Number(number) => match ftype {
|
||||||
|
FacetType::String => bail!("invalid facet type, expecting {} found number", ftype),
|
||||||
|
FacetType::Float => match number.as_f64() {
|
||||||
|
Some(float) => Ok(output.push(Float(OrderedFloat(float)))),
|
||||||
|
None => bail!("invalid facet type, expecting {} found integer", ftype),
|
||||||
|
},
|
||||||
|
FacetType::Integer => match number.as_i64() {
|
||||||
|
Some(integer) => Ok(output.push(Integer(integer))),
|
||||||
|
None => if number.is_f64() {
|
||||||
|
bail!("invalid facet type, expecting {} found float", ftype)
|
||||||
|
} else {
|
||||||
|
bail!("invalid facet type, expecting {} found out-of-bound integer (64bit)", ftype)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Value::String(string) => {
|
||||||
|
let string = string.trim();
|
||||||
|
if string.is_empty() { return Ok(()) }
|
||||||
|
match ftype {
|
||||||
|
FacetType::String => {
|
||||||
|
let string = SmallString32::from(string);
|
||||||
|
Ok(output.push(String(string)))
|
||||||
|
},
|
||||||
|
FacetType::Float => match string.parse() {
|
||||||
|
Ok(float) => Ok(output.push(Float(OrderedFloat(float)))),
|
||||||
|
Err(_err) => bail!("invalid facet type, expecting {} found string", ftype),
|
||||||
|
},
|
||||||
|
FacetType::Integer => match string.parse() {
|
||||||
|
Ok(integer) => Ok(output.push(Integer(integer))),
|
||||||
|
Err(_err) => bail!("invalid facet type, expecting {} found string", ftype),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Value::Array(values) => if can_recurse {
|
||||||
|
values.iter().map(|v| inner_parse_facet_value(ftype, v, false, output)).collect()
|
||||||
|
} else {
|
||||||
|
bail!("invalid facet type, expecting {} found sub-array ()", ftype)
|
||||||
|
},
|
||||||
|
Value::Object(_) => bail!("invalid facet type, expecting {} found object", ftype),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut facet_values = SmallVec8::new();
|
||||||
|
inner_parse_facet_value(ftype, value, true, &mut facet_values)?;
|
||||||
|
Ok(facet_values)
|
||||||
|
}
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
use anyhow::Context;
|
use std::collections::HashMap;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::{ensure, Context};
|
||||||
use grenad::CompressionType;
|
use grenad::CompressionType;
|
||||||
use rayon::ThreadPool;
|
use rayon::ThreadPool;
|
||||||
|
|
||||||
use crate::update::index_documents::{Transform, IndexDocumentsMethod};
|
use crate::update::index_documents::{Transform, IndexDocumentsMethod};
|
||||||
use crate::update::{ClearDocuments, IndexDocuments, UpdateIndexingStep};
|
use crate::update::{ClearDocuments, IndexDocuments, UpdateIndexingStep};
|
||||||
|
use crate::facet::FacetType;
|
||||||
use crate::{Index, FieldsIdsMap};
|
use crate::{Index, FieldsIdsMap};
|
||||||
|
|
||||||
pub struct Settings<'a, 't, 'u, 'i> {
|
pub struct Settings<'a, 't, 'u, 'i> {
|
||||||
@ -22,6 +26,7 @@ pub struct Settings<'a, 't, 'u, 'i> {
|
|||||||
// however if it is `Some(None)` it means that the user forced a reset of the setting.
|
// however if it is `Some(None)` it means that the user forced a reset of the setting.
|
||||||
searchable_fields: Option<Option<Vec<String>>>,
|
searchable_fields: Option<Option<Vec<String>>>,
|
||||||
displayed_fields: Option<Option<Vec<String>>>,
|
displayed_fields: Option<Option<Vec<String>>>,
|
||||||
|
faceted_fields: Option<HashMap<String, String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
||||||
@ -39,6 +44,7 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
thread_pool: None,
|
thread_pool: None,
|
||||||
searchable_fields: None,
|
searchable_fields: None,
|
||||||
displayed_fields: None,
|
displayed_fields: None,
|
||||||
|
faceted_fields: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,71 +64,92 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
self.displayed_fields = Some(Some(names));
|
self.displayed_fields = Some(Some(names));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_faceted_fields(&mut self, names_facet_types: HashMap<String, String>) {
|
||||||
|
self.faceted_fields = Some(names_facet_types);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn execute<F>(self, progress_callback: F) -> anyhow::Result<()>
|
pub fn execute<F>(self, progress_callback: F) -> anyhow::Result<()>
|
||||||
where
|
where
|
||||||
F: Fn(UpdateIndexingStep) + Sync
|
F: Fn(UpdateIndexingStep) + Sync
|
||||||
{
|
{
|
||||||
// Check that the searchable attributes have been specified.
|
let mut updated_searchable_fields = None;
|
||||||
if let Some(value) = self.searchable_fields {
|
let mut updated_faceted_fields = None;
|
||||||
let current_displayed_fields = self.index.displayed_fields(self.wtxn)?;
|
let mut updated_displayed_fields = None;
|
||||||
let current_fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
|
||||||
|
|
||||||
let result = match value {
|
// Construct the new FieldsIdsMap based on the searchable fields order.
|
||||||
Some(fields_names) => {
|
let fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
||||||
let mut fields_ids_map = current_fields_ids_map.clone();
|
let mut fields_ids_map = match self.searchable_fields {
|
||||||
let searchable_fields: Vec<_> =
|
Some(Some(searchable_fields)) => {
|
||||||
fields_names.iter()
|
let mut new_fields_ids_map = FieldsIdsMap::new();
|
||||||
.map(|name| fields_ids_map.insert(name))
|
let mut new_searchable_fields = Vec::new();
|
||||||
.collect::<Option<Vec<_>>>()
|
|
||||||
.context("field id limit reached")?;
|
|
||||||
|
|
||||||
// If the searchable fields are ordered we don't have to generate a new `FieldsIdsMap`.
|
for name in searchable_fields {
|
||||||
if searchable_fields.windows(2).all(|win| win[0] < win[1]) {
|
let id = new_fields_ids_map.insert(&name).context("field id limit reached")?;
|
||||||
(
|
new_searchable_fields.push(id);
|
||||||
fields_ids_map,
|
}
|
||||||
Some(searchable_fields),
|
|
||||||
current_displayed_fields.map(ToOwned::to_owned),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
// We create or generate the fields ids corresponding to those names.
|
|
||||||
let mut fields_ids_map = FieldsIdsMap::new();
|
|
||||||
let mut searchable_fields = Vec::new();
|
|
||||||
for name in fields_names {
|
|
||||||
let id = fields_ids_map.insert(&name).context("field id limit reached")?;
|
|
||||||
searchable_fields.push(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
// We complete the new FieldsIdsMap with the previous names.
|
for (_, name) in fields_ids_map.iter() {
|
||||||
for (_id, name) in current_fields_ids_map.iter() {
|
new_fields_ids_map.insert(name).context("field id limit reached")?;
|
||||||
fields_ids_map.insert(name).context("field id limit reached")?;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// We must also update the displayed fields according to the new `FieldsIdsMap`.
|
updated_searchable_fields = Some(Some(new_searchable_fields));
|
||||||
let displayed_fields = match current_displayed_fields {
|
new_fields_ids_map
|
||||||
Some(fields) => {
|
},
|
||||||
let mut displayed_fields = Vec::new();
|
Some(None) => {
|
||||||
for id in fields {
|
updated_searchable_fields = Some(None);
|
||||||
let name = current_fields_ids_map.name(*id).unwrap();
|
fields_ids_map
|
||||||
let id = fields_ids_map.id(name).context("field id limit reached")?;
|
},
|
||||||
displayed_fields.push(id);
|
None => fields_ids_map,
|
||||||
}
|
};
|
||||||
Some(displayed_fields)
|
|
||||||
},
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
(fields_ids_map, Some(searchable_fields), displayed_fields)
|
// We compute or generate the new primary key field id.
|
||||||
|
// TODO make the primary key settable.
|
||||||
|
let primary_key = match self.index.primary_key(&self.wtxn)? {
|
||||||
|
Some(id) => {
|
||||||
|
let current_fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
||||||
|
let name = current_fields_ids_map.name(id).unwrap();
|
||||||
|
fields_ids_map.insert(name).context("field id limit reached")?
|
||||||
|
},
|
||||||
|
None => fields_ids_map.insert("id").context("field id limit reached")?,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(fields_names_facet_types) = self.faceted_fields {
|
||||||
|
let current_faceted_fields = self.index.faceted_fields(self.wtxn)?;
|
||||||
|
|
||||||
|
let mut faceted_fields = HashMap::new();
|
||||||
|
for (name, sftype) in fields_names_facet_types {
|
||||||
|
let ftype = FacetType::from_str(&sftype).with_context(|| format!("parsing facet type {:?}", sftype))?;
|
||||||
|
let id = fields_ids_map.insert(&name).context("field id limit reached")?;
|
||||||
|
match current_faceted_fields.get(&id) {
|
||||||
|
Some(pftype) => {
|
||||||
|
ensure!(ftype == *pftype, "{} facet type changed from {} to {}", name, ftype, pftype);
|
||||||
|
faceted_fields.insert(id, ftype)
|
||||||
|
},
|
||||||
|
None => faceted_fields.insert(id, ftype),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
updated_faceted_fields = Some(faceted_fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that the displayed attributes have been specified.
|
||||||
|
if let Some(value) = self.displayed_fields {
|
||||||
|
match value {
|
||||||
|
Some(names) => {
|
||||||
|
let mut new_displayed_fields = Vec::new();
|
||||||
|
for name in names {
|
||||||
|
let id = fields_ids_map.insert(&name).context("field id limit reached")?;
|
||||||
|
new_displayed_fields.push(id);
|
||||||
}
|
}
|
||||||
},
|
updated_displayed_fields = Some(Some(new_displayed_fields));
|
||||||
None => (
|
}
|
||||||
current_fields_ids_map.clone(),
|
None => updated_displayed_fields = Some(None),
|
||||||
None,
|
}
|
||||||
current_displayed_fields.map(ToOwned::to_owned),
|
}
|
||||||
),
|
|
||||||
};
|
|
||||||
|
|
||||||
let (mut fields_ids_map, searchable_fields, displayed_fields) = result;
|
|
||||||
|
|
||||||
|
// If any setting have modified any of the datastructures it means that we need
|
||||||
|
// to retrieve the documents and then reindex then with the new settings.
|
||||||
|
if updated_searchable_fields.is_some() || updated_faceted_fields.is_some() {
|
||||||
let transform = Transform {
|
let transform = Transform {
|
||||||
rtxn: &self.wtxn,
|
rtxn: &self.wtxn,
|
||||||
index: self.index,
|
index: self.index,
|
||||||
@ -136,15 +163,6 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
autogenerate_docids: false,
|
autogenerate_docids: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
// We compute or generate the new primary key field id.
|
|
||||||
let primary_key = match self.index.primary_key(&self.wtxn)? {
|
|
||||||
Some(id) => {
|
|
||||||
let name = current_fields_ids_map.name(id).unwrap();
|
|
||||||
fields_ids_map.insert(name).context("field id limit reached")?
|
|
||||||
},
|
|
||||||
None => fields_ids_map.insert("id").context("field id limit reached")?,
|
|
||||||
};
|
|
||||||
|
|
||||||
// We remap the documents fields based on the new `FieldsIdsMap`.
|
// We remap the documents fields based on the new `FieldsIdsMap`.
|
||||||
let output = transform.remap_index_documents(primary_key, fields_ids_map.clone())?;
|
let output = transform.remap_index_documents(primary_key, fields_ids_map.clone())?;
|
||||||
|
|
||||||
@ -152,18 +170,18 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
// this way next indexing methods will be based on that.
|
// this way next indexing methods will be based on that.
|
||||||
self.index.put_fields_ids_map(self.wtxn, &fields_ids_map)?;
|
self.index.put_fields_ids_map(self.wtxn, &fields_ids_map)?;
|
||||||
|
|
||||||
// The new searchable fields are also written down to make sure
|
if let Some(faceted_fields) = updated_faceted_fields {
|
||||||
// that the IndexDocuments system takes only these ones into account.
|
// We write the faceted_fields fields into the database here.
|
||||||
match searchable_fields {
|
self.index.put_faceted_fields(self.wtxn, &faceted_fields)?;
|
||||||
Some(fields) => self.index.put_searchable_fields(self.wtxn, &fields)?,
|
|
||||||
None => self.index.delete_searchable_fields(self.wtxn).map(drop)?,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// We write the displayed fields into the database here
|
if let Some(searchable_fields) = updated_searchable_fields {
|
||||||
// to make sure that the right fields are displayed.
|
// The new searchable fields are also written down to make sure
|
||||||
match displayed_fields {
|
// that the IndexDocuments system takes only these ones into account.
|
||||||
Some(fields) => self.index.put_displayed_fields(self.wtxn, &fields)?,
|
match searchable_fields {
|
||||||
None => self.index.delete_displayed_fields(self.wtxn).map(drop)?,
|
Some(fields) => self.index.put_searchable_fields(self.wtxn, &fields)?,
|
||||||
|
None => self.index.delete_searchable_fields(self.wtxn).map(drop)?,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// We clear the full database (words-fst, documents ids and documents content).
|
// We clear the full database (words-fst, documents ids and documents content).
|
||||||
@ -180,33 +198,15 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
indexing_builder.chunk_compression_level = self.chunk_compression_level;
|
indexing_builder.chunk_compression_level = self.chunk_compression_level;
|
||||||
indexing_builder.chunk_fusing_shrink_size = self.chunk_fusing_shrink_size;
|
indexing_builder.chunk_fusing_shrink_size = self.chunk_fusing_shrink_size;
|
||||||
indexing_builder.thread_pool = self.thread_pool;
|
indexing_builder.thread_pool = self.thread_pool;
|
||||||
indexing_builder.execute_raw(output, progress_callback)?;
|
indexing_builder.execute_raw(output, &progress_callback)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that the displayed attributes have been specified.
|
if let Some(displayed_fields) = updated_displayed_fields {
|
||||||
if let Some(value) = self.displayed_fields {
|
// We write the displayed fields into the database here
|
||||||
match value {
|
// to make sure that the right fields are displayed.
|
||||||
// If it has been set, and it was a list of fields names, we create
|
match displayed_fields {
|
||||||
// or generate the fields ids corresponds to those names and store them
|
Some(fields) => self.index.put_displayed_fields(self.wtxn, &fields)?,
|
||||||
// in the database in the order they were specified.
|
None => self.index.delete_displayed_fields(self.wtxn).map(drop)?,
|
||||||
Some(fields_names) => {
|
|
||||||
let mut fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
|
|
||||||
|
|
||||||
// We create or generate the fields ids corresponding to those names.
|
|
||||||
let mut fields_ids = Vec::new();
|
|
||||||
for name in fields_names {
|
|
||||||
let id = fields_ids_map.insert(&name).context("field id limit reached")?;
|
|
||||||
fields_ids.push(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.index.put_displayed_fields(self.wtxn, &fields_ids)?;
|
|
||||||
},
|
|
||||||
// If it was set to `null` it means that the user wants to get the default behavior
|
|
||||||
// which is displaying all the attributes in no specific order (FieldsIdsMap order),
|
|
||||||
// we just have to delete the displayed fields.
|
|
||||||
None => {
|
|
||||||
self.index.delete_displayed_fields(self.wtxn)?;
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,6 +219,7 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
use crate::update::{IndexDocuments, UpdateFormat};
|
use crate::update::{IndexDocuments, UpdateFormat};
|
||||||
use heed::EnvOpenOptions;
|
use heed::EnvOpenOptions;
|
||||||
|
use maplit::hashmap;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn set_and_reset_searchable_fields() {
|
fn set_and_reset_searchable_fields() {
|
||||||
@ -386,4 +387,31 @@ mod tests {
|
|||||||
assert_eq!(fields_ids, None);
|
assert_eq!(fields_ids, None);
|
||||||
drop(rtxn);
|
drop(rtxn);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn set_faceted_fields() {
|
||||||
|
let path = tempfile::tempdir().unwrap();
|
||||||
|
let mut options = EnvOpenOptions::new();
|
||||||
|
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||||
|
let index = Index::new(options, &path).unwrap();
|
||||||
|
|
||||||
|
// Set the faceted fields to be the age.
|
||||||
|
let mut wtxn = index.write_txn().unwrap();
|
||||||
|
let mut builder = Settings::new(&mut wtxn, &index);
|
||||||
|
builder.set_faceted_fields(hashmap!{ "age".into() => "integer".into() });
|
||||||
|
builder.execute(|_| ()).unwrap();
|
||||||
|
|
||||||
|
// Then index some documents.
|
||||||
|
let content = &b"name,age\nkevin,23\nkevina,21\nbenoit,34\n"[..];
|
||||||
|
let mut builder = IndexDocuments::new(&mut wtxn, &index);
|
||||||
|
builder.update_format(UpdateFormat::Csv);
|
||||||
|
builder.execute(content, |_| ()).unwrap();
|
||||||
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
|
// Check that the displayed fields are correctly set.
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let fields_ids = index.faceted_fields(&rtxn).unwrap();
|
||||||
|
assert_eq!(fields_ids, hashmap!{ 1 => FacetType::Integer });
|
||||||
|
drop(rtxn);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user