Merge remote-tracking branch 'origin/main' into facet-levels-refactor

This commit is contained in:
Loïc Lecrenier 2022-10-26 15:13:34 +02:00
commit 54c0cf93fe
35 changed files with 132 additions and 149 deletions

View File

@ -1,6 +1,6 @@
[package] [package]
name = "benchmarks" name = "benchmarks"
version = "0.34.0" version = "0.35.0"
edition = "2018" edition = "2018"
publish = false publish = false

View File

@ -1,6 +1,6 @@
[package] [package]
name = "cli" name = "cli"
version = "0.34.0" version = "0.35.0"
edition = "2018" edition = "2018"
description = "A CLI to interact with a milli index" description = "A CLI to interact with a milli index"
publish = false publish = false

View File

@ -1,6 +1,6 @@
[package] [package]
name = "filter-parser" name = "filter-parser"
version = "0.34.0" version = "0.35.0"
edition = "2021" edition = "2021"
description = "The parser for the Meilisearch filter syntax" description = "The parser for the Meilisearch filter syntax"
publish = false publish = false

View File

@ -1,6 +1,6 @@
[package] [package]
name = "flatten-serde-json" name = "flatten-serde-json"
version = "0.34.0" version = "0.35.0"
edition = "2021" edition = "2021"
description = "Flatten serde-json objects like elastic search" description = "Flatten serde-json objects like elastic search"
readme = "README.md" readme = "README.md"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "json-depth-checker" name = "json-depth-checker"
version = "0.34.0" version = "0.35.0"
edition = "2021" edition = "2021"
description = "A library that indicates if a JSON must be flattened" description = "A library that indicates if a JSON must be flattened"
publish = false publish = false

View File

@ -1,6 +1,6 @@
[package] [package]
name = "milli" name = "milli"
version = "0.34.0" version = "0.35.0"
authors = ["Kerollmops <clement@meilisearch.com>"] authors = ["Kerollmops <clement@meilisearch.com>"]
edition = "2018" edition = "2018"

View File

@ -70,7 +70,7 @@ impl FromStr for Member {
type Err = AscDescError; type Err = AscDescError;
fn from_str(text: &str) -> Result<Member, Self::Err> { fn from_str(text: &str) -> Result<Member, Self::Err> {
match text.strip_prefix("_geoPoint(").and_then(|text| text.strip_suffix(")")) { match text.strip_prefix("_geoPoint(").and_then(|text| text.strip_suffix(')')) {
Some(point) => { Some(point) => {
let (lat, lng) = point let (lat, lng) = point
.split_once(',') .split_once(',')

View File

@ -60,7 +60,7 @@ impl<W: Write> DocumentsBatchBuilder<W> {
/// Appends a new JSON object into the batch and updates the `DocumentsBatchIndex` accordingly. /// Appends a new JSON object into the batch and updates the `DocumentsBatchIndex` accordingly.
pub fn append_json_object(&mut self, object: &Object) -> io::Result<()> { pub fn append_json_object(&mut self, object: &Object) -> io::Result<()> {
// Make sure that we insert the fields ids in order as the obkv writer has this requirement. // Make sure that we insert the fields ids in order as the obkv writer has this requirement.
let mut fields_ids: Vec<_> = object.keys().map(|k| self.fields_index.insert(&k)).collect(); let mut fields_ids: Vec<_> = object.keys().map(|k| self.fields_index.insert(k)).collect();
fields_ids.sort_unstable(); fields_ids.sort_unstable();
self.obkv_buffer.clear(); self.obkv_buffer.clear();

View File

@ -25,9 +25,9 @@ const DOCUMENTS_BATCH_INDEX_KEY: [u8; 8] = u64::MAX.to_be_bytes();
pub fn obkv_to_object(obkv: &KvReader<FieldId>, index: &DocumentsBatchIndex) -> Result<Object> { pub fn obkv_to_object(obkv: &KvReader<FieldId>, index: &DocumentsBatchIndex) -> Result<Object> {
obkv.iter() obkv.iter()
.map(|(field_id, value)| { .map(|(field_id, value)| {
let field_name = index.name(field_id).ok_or_else(|| { let field_name = index
FieldIdMapMissingEntry::FieldId { field_id, process: "obkv_to_object" } .name(field_id)
})?; .ok_or(FieldIdMapMissingEntry::FieldId { field_id, process: "obkv_to_object" })?;
let value = serde_json::from_slice(value).map_err(InternalError::SerdeJson)?; let value = serde_json::from_slice(value).map_err(InternalError::SerdeJson)?;
Ok((field_name.to_string(), value)) Ok((field_name.to_string(), value))
}) })

View File

@ -65,7 +65,7 @@ impl FieldsIdsMap {
} }
/// Iterate over the ids in the order of the ids. /// Iterate over the ids in the order of the ids.
pub fn ids<'a>(&'a self) -> impl Iterator<Item = FieldId> + 'a { pub fn ids(&'_ self) -> impl Iterator<Item = FieldId> + '_ {
self.ids_names.keys().copied() self.ids_names.keys().copied()
} }

View File

@ -202,7 +202,7 @@ impl Index {
pub fn new<P: AsRef<Path>>(options: heed::EnvOpenOptions, path: P) -> Result<Index> { pub fn new<P: AsRef<Path>>(options: heed::EnvOpenOptions, path: P) -> Result<Index> {
let now = OffsetDateTime::now_utc(); let now = OffsetDateTime::now_utc();
Self::new_with_creation_dates(options, path, now.clone(), now) Self::new_with_creation_dates(options, path, now, now)
} }
fn set_creation_dates( fn set_creation_dates(
@ -322,7 +322,7 @@ impl Index {
/// Writes the documents primary key, this is the field name that is used to store the id. /// Writes the documents primary key, this is the field name that is used to store the id.
pub(crate) fn put_primary_key(&self, wtxn: &mut RwTxn, primary_key: &str) -> heed::Result<()> { pub(crate) fn put_primary_key(&self, wtxn: &mut RwTxn, primary_key: &str) -> heed::Result<()> {
self.set_updated_at(wtxn, &OffsetDateTime::now_utc())?; self.set_updated_at(wtxn, &OffsetDateTime::now_utc())?;
self.main.put::<_, Str, Str>(wtxn, main_key::PRIMARY_KEY_KEY, &primary_key) self.main.put::<_, Str, Str>(wtxn, main_key::PRIMARY_KEY_KEY, primary_key)
} }
/// Deletes the primary key of the documents, this can be done to reset indexes settings. /// Deletes the primary key of the documents, this can be done to reset indexes settings.
@ -985,7 +985,7 @@ impl Index {
let kv = self let kv = self
.documents .documents
.get(rtxn, &BEU32::new(id))? .get(rtxn, &BEU32::new(id))?
.ok_or_else(|| UserError::UnknownInternalDocumentId { document_id: id })?; .ok_or(UserError::UnknownInternalDocumentId { document_id: id })?;
documents.push((id, kv)); documents.push((id, kv));
} }
@ -1044,7 +1044,7 @@ impl Index {
wtxn: &mut RwTxn, wtxn: &mut RwTxn,
time: &OffsetDateTime, time: &OffsetDateTime,
) -> heed::Result<()> { ) -> heed::Result<()> {
self.main.put::<_, Str, SerdeJson<OffsetDateTime>>(wtxn, main_key::UPDATED_AT_KEY, &time) self.main.put::<_, Str, SerdeJson<OffsetDateTime>>(wtxn, main_key::UPDATED_AT_KEY, time)
} }
pub fn authorize_typos(&self, txn: &RoTxn) -> heed::Result<bool> { pub fn authorize_typos(&self, txn: &RoTxn) -> heed::Result<bool> {

View File

@ -1,5 +1,6 @@
#![cfg_attr(all(test, fuzzing), feature(no_coverage))] #![cfg_attr(all(test, fuzzing), feature(no_coverage))]
#![allow(clippy::reversed_empty_ranges)]
#![allow(clippy::too_many_arguments)]
#[macro_use] #[macro_use]
pub mod documents; pub mod documents;

View File

@ -120,7 +120,7 @@ impl<'t> Criterion for AscDesc<'t> {
let mut candidates = match (&self.query_tree, candidates) { let mut candidates = match (&self.query_tree, candidates) {
(_, Some(candidates)) => candidates, (_, Some(candidates)) => candidates,
(Some(qt), None) => { (Some(qt), None) => {
let context = CriteriaBuilder::new(&self.rtxn, &self.index)?; let context = CriteriaBuilder::new(self.rtxn, self.index)?;
resolve_query_tree(&context, qt, params.wdcache)? resolve_query_tree(&context, qt, params.wdcache)?
} }
(None, None) => self.index.documents_ids(self.rtxn)?, (None, None) => self.index.documents_ids(self.rtxn)?,

View File

@ -89,7 +89,7 @@ impl<'t> Criterion for Attribute<'t> {
} }
} }
} else { } else {
let mut set_buckets = match self.set_buckets.as_mut() { let set_buckets = match self.set_buckets.as_mut() {
Some(set_buckets) => set_buckets, Some(set_buckets) => set_buckets,
None => { None => {
let new_buckets = initialize_set_buckets( let new_buckets = initialize_set_buckets(
@ -102,7 +102,7 @@ impl<'t> Criterion for Attribute<'t> {
} }
}; };
match set_compute_candidates(&mut set_buckets, &allowed_candidates)? { match set_compute_candidates(set_buckets, &allowed_candidates)? {
Some((_score, candidates)) => candidates, Some((_score, candidates)) => candidates,
None => { None => {
return Ok(Some(CriterionResult { return Ok(Some(CriterionResult {
@ -199,18 +199,18 @@ impl<'t> QueryPositionIterator<'t> {
let iter = ctx.word_position_iterator(word, in_prefix_cache)?; let iter = ctx.word_position_iterator(word, in_prefix_cache)?;
inner.push(iter.peekable()); inner.push(iter.peekable());
} else { } else {
for (word, _) in word_derivations(&word, true, 0, ctx.words_fst(), wdcache)? for (word, _) in word_derivations(word, true, 0, ctx.words_fst(), wdcache)?
{ {
let iter = ctx.word_position_iterator(&word, in_prefix_cache)?; let iter = ctx.word_position_iterator(word, in_prefix_cache)?;
inner.push(iter.peekable()); inner.push(iter.peekable());
} }
} }
} }
QueryKind::Tolerant { typo, word } => { QueryKind::Tolerant { typo, word } => {
for (word, _) in for (word, _) in
word_derivations(&word, query.prefix, *typo, ctx.words_fst(), wdcache)? word_derivations(word, query.prefix, *typo, ctx.words_fst(), wdcache)?
{ {
let iter = ctx.word_position_iterator(&word, in_prefix_cache)?; let iter = ctx.word_position_iterator(word, in_prefix_cache)?;
inner.push(iter.peekable()); inner.push(iter.peekable());
} }
} }
@ -476,8 +476,7 @@ fn initialize_linear_buckets(
} else { } else {
words_positions words_positions
.get(word) .get(word)
.map(|positions| positions.iter().next()) .and_then(|positions| positions.iter().next())
.flatten()
} }
} }
QueryKind::Tolerant { typo, word } => { QueryKind::Tolerant { typo, word } => {
@ -574,7 +573,7 @@ fn flatten_query_tree(query_tree: &Operation) -> FlattenedQueryTree {
if ops.iter().all(|op| op.query().is_some()) { if ops.iter().all(|op| op.query().is_some()) {
vec![vec![ops.iter().flat_map(|op| op.query()).cloned().collect()]] vec![vec![ops.iter().flat_map(|op| op.query()).cloned().collect()]]
} else { } else {
ops.iter().map(recurse).flatten().collect() ops.iter().flat_map(recurse).collect()
} }
} }
Phrase(words) => { Phrase(words) => {

View File

@ -90,7 +90,7 @@ impl Criterion for Geo<'_> {
let mut candidates = match (&query_tree, candidates) { let mut candidates = match (&query_tree, candidates) {
(_, Some(candidates)) => candidates, (_, Some(candidates)) => candidates,
(Some(qt), None) => { (Some(qt), None) => {
let context = CriteriaBuilder::new(&self.rtxn, &self.index)?; let context = CriteriaBuilder::new(self.rtxn, self.index)?;
resolve_query_tree(&context, qt, params.wdcache)? resolve_query_tree(&context, qt, params.wdcache)?
} }
(None, None) => self.index.documents_ids(self.rtxn)?, (None, None) => self.index.documents_ids(self.rtxn)?,

View File

@ -44,7 +44,7 @@ impl<D: Distinct> Criterion for Initial<'_, D> {
let mut candidates = resolve_query_tree( let mut candidates = resolve_query_tree(
self.ctx, self.ctx,
answer.query_tree.as_ref().unwrap(), answer.query_tree.as_ref().unwrap(),
&mut params.wdcache, params.wdcache,
)?; )?;
// Apply the filters on the documents retrieved with the query tree. // Apply the filters on the documents retrieved with the query tree.

View File

@ -186,19 +186,19 @@ impl<'c> Context<'c> for CriteriaBuilder<'c> {
} }
fn word_docids(&self, word: &str) -> heed::Result<Option<RoaringBitmap>> { fn word_docids(&self, word: &str) -> heed::Result<Option<RoaringBitmap>> {
self.index.word_docids.get(self.rtxn, &word) self.index.word_docids.get(self.rtxn, word)
} }
fn exact_word_docids(&self, word: &str) -> heed::Result<Option<RoaringBitmap>> { fn exact_word_docids(&self, word: &str) -> heed::Result<Option<RoaringBitmap>> {
self.index.exact_word_docids.get(self.rtxn, &word) self.index.exact_word_docids.get(self.rtxn, word)
} }
fn word_prefix_docids(&self, word: &str) -> heed::Result<Option<RoaringBitmap>> { fn word_prefix_docids(&self, word: &str) -> heed::Result<Option<RoaringBitmap>> {
self.index.word_prefix_docids.get(self.rtxn, &word) self.index.word_prefix_docids.get(self.rtxn, word)
} }
fn exact_word_prefix_docids(&self, word: &str) -> heed::Result<Option<RoaringBitmap>> { fn exact_word_prefix_docids(&self, word: &str) -> heed::Result<Option<RoaringBitmap>> {
self.index.exact_word_prefix_docids.get(self.rtxn, &word) self.index.exact_word_prefix_docids.get(self.rtxn, word)
} }
fn word_pair_proximity_docids( fn word_pair_proximity_docids(
@ -321,7 +321,7 @@ impl<'t> CriteriaBuilder<'t> {
exhaustive_number_hits, exhaustive_number_hits,
distinct, distinct,
)) as Box<dyn Criterion>; )) as Box<dyn Criterion>;
for name in self.index.criteria(&self.rtxn)? { for name in self.index.criteria(self.rtxn)? {
criterion = match name { criterion = match name {
Name::Words => Box::new(Words::new(self, criterion)), Name::Words => Box::new(Words::new(self, criterion)),
Name::Typo => Box::new(Typo::new(self, criterion)), Name::Typo => Box::new(Typo::new(self, criterion)),
@ -330,29 +330,23 @@ impl<'t> CriteriaBuilder<'t> {
for asc_desc in sort_criteria { for asc_desc in sort_criteria {
criterion = match asc_desc { criterion = match asc_desc {
AscDescName::Asc(Member::Field(field)) => Box::new(AscDesc::asc( AscDescName::Asc(Member::Field(field)) => Box::new(AscDesc::asc(
&self.index, self.index,
&self.rtxn, self.rtxn,
criterion, criterion,
field.to_string(), field.to_string(),
)?), )?),
AscDescName::Desc(Member::Field(field)) => Box::new(AscDesc::desc( AscDescName::Desc(Member::Field(field)) => Box::new(AscDesc::desc(
&self.index, self.index,
&self.rtxn, self.rtxn,
criterion, criterion,
field.to_string(), field.to_string(),
)?), )?),
AscDescName::Asc(Member::Geo(point)) => Box::new(Geo::asc( AscDescName::Asc(Member::Geo(point)) => {
&self.index, Box::new(Geo::asc(self.index, self.rtxn, criterion, *point)?)
&self.rtxn, }
criterion, AscDescName::Desc(Member::Geo(point)) => {
point.clone(), Box::new(Geo::desc(self.index, self.rtxn, criterion, *point)?)
)?), }
AscDescName::Desc(Member::Geo(point)) => Box::new(Geo::desc(
&self.index,
&self.rtxn,
criterion,
point.clone(),
)?),
}; };
} }
criterion criterion
@ -363,10 +357,10 @@ impl<'t> CriteriaBuilder<'t> {
Name::Attribute => Box::new(Attribute::new(self, criterion)), Name::Attribute => Box::new(Attribute::new(self, criterion)),
Name::Exactness => Box::new(Exactness::new(self, criterion, &primitive_query)?), Name::Exactness => Box::new(Exactness::new(self, criterion, &primitive_query)?),
Name::Asc(field) => { Name::Asc(field) => {
Box::new(AscDesc::asc(&self.index, &self.rtxn, criterion, field)?) Box::new(AscDesc::asc(self.index, self.rtxn, criterion, field)?)
} }
Name::Desc(field) => { Name::Desc(field) => {
Box::new(AscDesc::desc(&self.index, &self.rtxn, criterion, field)?) Box::new(AscDesc::desc(self.index, self.rtxn, criterion, field)?)
} }
}; };
} }
@ -408,7 +402,7 @@ pub fn resolve_query_tree(
} }
Ok(candidates) Ok(candidates)
} }
Phrase(words) => resolve_phrase(ctx, &words), Phrase(words) => resolve_phrase(ctx, words),
Or(_, ops) => { Or(_, ops) => {
let mut candidates = RoaringBitmap::new(); let mut candidates = RoaringBitmap::new();
for op in ops { for op in ops {
@ -457,7 +451,7 @@ pub fn resolve_phrase(ctx: &dyn Context, phrase: &[String]) -> Result<RoaringBit
} }
// We sort the bitmaps so that we perform the small intersections first, which is faster. // We sort the bitmaps so that we perform the small intersections first, which is faster.
bitmaps.sort_unstable_by(|a, b| a.len().cmp(&b.len())); bitmaps.sort_unstable_by_key(|a| a.len());
for bitmap in bitmaps { for bitmap in bitmaps {
if first_iter { if first_iter {
@ -500,40 +494,40 @@ fn query_docids(
) -> Result<RoaringBitmap> { ) -> Result<RoaringBitmap> {
match &query.kind { match &query.kind {
QueryKind::Exact { word, original_typo } => { QueryKind::Exact { word, original_typo } => {
if query.prefix && ctx.in_prefix_cache(&word) { if query.prefix && ctx.in_prefix_cache(word) {
let mut docids = ctx.word_prefix_docids(&word)?.unwrap_or_default(); let mut docids = ctx.word_prefix_docids(word)?.unwrap_or_default();
// only add the exact docids if the word hasn't been derived // only add the exact docids if the word hasn't been derived
if *original_typo == 0 { if *original_typo == 0 {
docids |= ctx.exact_word_prefix_docids(&word)?.unwrap_or_default(); docids |= ctx.exact_word_prefix_docids(word)?.unwrap_or_default();
} }
Ok(docids) Ok(docids)
} else if query.prefix { } else if query.prefix {
let words = word_derivations(&word, true, 0, ctx.words_fst(), wdcache)?; let words = word_derivations(word, true, 0, ctx.words_fst(), wdcache)?;
let mut docids = RoaringBitmap::new(); let mut docids = RoaringBitmap::new();
for (word, _typo) in words { for (word, _typo) in words {
docids |= ctx.word_docids(&word)?.unwrap_or_default(); docids |= ctx.word_docids(word)?.unwrap_or_default();
// only add the exact docids if the word hasn't been derived // only add the exact docids if the word hasn't been derived
if *original_typo == 0 { if *original_typo == 0 {
docids |= ctx.exact_word_docids(&word)?.unwrap_or_default(); docids |= ctx.exact_word_docids(word)?.unwrap_or_default();
} }
} }
Ok(docids) Ok(docids)
} else { } else {
let mut docids = ctx.word_docids(&word)?.unwrap_or_default(); let mut docids = ctx.word_docids(word)?.unwrap_or_default();
// only add the exact docids if the word hasn't been derived // only add the exact docids if the word hasn't been derived
if *original_typo == 0 { if *original_typo == 0 {
docids |= ctx.exact_word_docids(&word)?.unwrap_or_default(); docids |= ctx.exact_word_docids(word)?.unwrap_or_default();
} }
Ok(docids) Ok(docids)
} }
} }
QueryKind::Tolerant { typo, word } => { QueryKind::Tolerant { typo, word } => {
let words = word_derivations(&word, query.prefix, *typo, ctx.words_fst(), wdcache)?; let words = word_derivations(word, query.prefix, *typo, ctx.words_fst(), wdcache)?;
let mut docids = RoaringBitmap::new(); let mut docids = RoaringBitmap::new();
for (word, typo) in words { for (word, typo) in words {
let mut current_docids = ctx.word_docids(&word)?.unwrap_or_default(); let mut current_docids = ctx.word_docids(word)?.unwrap_or_default();
if *typo == 0 { if *typo == 0 {
current_docids |= ctx.exact_word_docids(&word)?.unwrap_or_default() current_docids |= ctx.exact_word_docids(word)?.unwrap_or_default()
} }
docids |= current_docids; docids |= current_docids;
} }
@ -568,11 +562,11 @@ fn query_pair_proximity_docids(
)? { )? {
Some(docids) => Ok(docids), Some(docids) => Ok(docids),
None => { None => {
let r_words = word_derivations(&right, true, 0, ctx.words_fst(), wdcache)?; let r_words = word_derivations(right, true, 0, ctx.words_fst(), wdcache)?;
all_word_pair_overall_proximity_docids( all_word_pair_overall_proximity_docids(
ctx, ctx,
&[(left, 0)], &[(left, 0)],
&r_words, r_words,
proximity, proximity,
) )
} }
@ -585,7 +579,7 @@ fn query_pair_proximity_docids(
} }
(QueryKind::Tolerant { typo, word: left }, QueryKind::Exact { word: right, .. }) => { (QueryKind::Tolerant { typo, word: left }, QueryKind::Exact { word: right, .. }) => {
let l_words = let l_words =
word_derivations(&left, false, *typo, ctx.words_fst(), wdcache)?.to_owned(); word_derivations(left, false, *typo, ctx.words_fst(), wdcache)?.to_owned();
if prefix { if prefix {
let mut docids = RoaringBitmap::new(); let mut docids = RoaringBitmap::new();
for (left, _) in l_words { for (left, _) in l_words {
@ -598,11 +592,11 @@ fn query_pair_proximity_docids(
Some(docids) => Ok(docids), Some(docids) => Ok(docids),
None => { None => {
let r_words = let r_words =
word_derivations(&right, true, 0, ctx.words_fst(), wdcache)?; word_derivations(right, true, 0, ctx.words_fst(), wdcache)?;
all_word_pair_overall_proximity_docids( all_word_pair_overall_proximity_docids(
ctx, ctx,
&[(left, 0)], &[(left, 0)],
&r_words, r_words,
proximity, proximity,
) )
} }
@ -615,17 +609,17 @@ fn query_pair_proximity_docids(
} }
} }
(QueryKind::Exact { word: left, .. }, QueryKind::Tolerant { typo, word: right }) => { (QueryKind::Exact { word: left, .. }, QueryKind::Tolerant { typo, word: right }) => {
let r_words = word_derivations(&right, prefix, *typo, ctx.words_fst(), wdcache)?; let r_words = word_derivations(right, prefix, *typo, ctx.words_fst(), wdcache)?;
all_word_pair_overall_proximity_docids(ctx, &[(left, 0)], &r_words, proximity) all_word_pair_overall_proximity_docids(ctx, &[(left, 0)], r_words, proximity)
} }
( (
QueryKind::Tolerant { typo: l_typo, word: left }, QueryKind::Tolerant { typo: l_typo, word: left },
QueryKind::Tolerant { typo: r_typo, word: right }, QueryKind::Tolerant { typo: r_typo, word: right },
) => { ) => {
let l_words = let l_words =
word_derivations(&left, false, *l_typo, ctx.words_fst(), wdcache)?.to_owned(); word_derivations(left, false, *l_typo, ctx.words_fst(), wdcache)?.to_owned();
let r_words = word_derivations(&right, prefix, *r_typo, ctx.words_fst(), wdcache)?; let r_words = word_derivations(right, prefix, *r_typo, ctx.words_fst(), wdcache)?;
all_word_pair_overall_proximity_docids(ctx, &l_words, &r_words, proximity) all_word_pair_overall_proximity_docids(ctx, &l_words, r_words, proximity)
} }
} }
} }

View File

@ -99,7 +99,7 @@ impl<'t> Criterion for Proximity<'t> {
// use set theory based algorithm // use set theory based algorithm
resolve_candidates( resolve_candidates(
self.ctx, self.ctx,
&query_tree, query_tree,
self.proximity, self.proximity,
&mut self.candidates_cache, &mut self.candidates_cache,
params.wdcache, params.wdcache,
@ -194,7 +194,7 @@ fn resolve_candidates<'t>(
.map(|w| Query { prefix: false, kind: QueryKind::exact(w.clone()) }); .map(|w| Query { prefix: false, kind: QueryKind::exact(w.clone()) });
match (most_left, most_right) { match (most_left, most_right) {
(Some(l), Some(r)) => vec![(l, r, resolve_phrase(ctx, &words)?)], (Some(l), Some(r)) => vec![(l, r, resolve_phrase(ctx, words)?)],
_otherwise => Default::default(), _otherwise => Default::default(),
} }
} else { } else {
@ -496,7 +496,7 @@ fn resolve_plane_sweep_candidates(
match kind { match kind {
QueryKind::Exact { word, .. } => { QueryKind::Exact { word, .. } => {
if *prefix { if *prefix {
let iter = word_derivations(word, true, 0, &words_positions) let iter = word_derivations(word, true, 0, words_positions)
.flat_map(|positions| positions.iter().map(|p| (p, 0, p))); .flat_map(|positions| positions.iter().map(|p| (p, 0, p)));
result.extend(iter); result.extend(iter);
} else if let Some(positions) = words_positions.get(word) { } else if let Some(positions) = words_positions.get(word) {
@ -504,7 +504,7 @@ fn resolve_plane_sweep_candidates(
} }
} }
QueryKind::Tolerant { typo, word } => { QueryKind::Tolerant { typo, word } => {
let iter = word_derivations(word, *prefix, *typo, &words_positions) let iter = word_derivations(word, *prefix, *typo, words_positions)
.flat_map(|positions| positions.iter().map(|p| (p, 0, p))); .flat_map(|positions| positions.iter().map(|p| (p, 0, p)));
result.extend(iter); result.extend(iter);
} }

View File

@ -69,7 +69,7 @@ impl<'t> Criterion for Typo<'t> {
let fst = self.ctx.words_fst(); let fst = self.ctx.words_fst();
let new_query_tree = match self.typos { let new_query_tree = match self.typos {
typos if typos < MAX_TYPOS_PER_WORD => alterate_query_tree( typos if typos < MAX_TYPOS_PER_WORD => alterate_query_tree(
&fst, fst,
query_tree.clone(), query_tree.clone(),
self.typos, self.typos,
params.wdcache, params.wdcache,
@ -78,7 +78,7 @@ impl<'t> Criterion for Typo<'t> {
// When typos >= MAX_TYPOS_PER_WORD, no more alteration of the query tree is possible, // When typos >= MAX_TYPOS_PER_WORD, no more alteration of the query tree is possible,
// we keep the altered query tree // we keep the altered query tree
*query_tree = alterate_query_tree( *query_tree = alterate_query_tree(
&fst, fst,
query_tree.clone(), query_tree.clone(),
self.typos, self.typos,
params.wdcache, params.wdcache,
@ -199,7 +199,7 @@ fn alterate_query_tree(
ops.iter_mut().try_for_each(|op| recurse(words_fst, op, number_typos, wdcache)) ops.iter_mut().try_for_each(|op| recurse(words_fst, op, number_typos, wdcache))
} }
// Because Phrases don't allow typos, no alteration can be done. // Because Phrases don't allow typos, no alteration can be done.
Phrase(_words) => return Ok(()), Phrase(_words) => Ok(()),
Operation::Query(q) => { Operation::Query(q) => {
if let QueryKind::Tolerant { typo, word } = &q.kind { if let QueryKind::Tolerant { typo, word } = &q.kind {
// if no typo is allowed we don't call word_derivations function, // if no typo is allowed we don't call word_derivations function,

View File

@ -53,10 +53,7 @@ impl<'t> Criterion for Words<'t> {
None => None, None => None,
}; };
let bucket_candidates = match self.bucket_candidates.as_mut() { let bucket_candidates = self.bucket_candidates.as_mut().map(take);
Some(bucket_candidates) => Some(take(bucket_candidates)),
None => None,
};
return Ok(Some(CriterionResult { return Ok(Some(CriterionResult {
query_tree: Some(query_tree), query_tree: Some(query_tree),

View File

@ -69,7 +69,7 @@ impl<'a> FacetDistribution<'a> {
) -> heed::Result<()> { ) -> heed::Result<()> {
match facet_type { match facet_type {
FacetType::Number => { FacetType::Number => {
let mut key_buffer: Vec<_> = field_id.to_be_bytes().iter().copied().collect(); let mut key_buffer: Vec<_> = field_id.to_be_bytes().to_vec();
let distribution_prelength = distribution.len(); let distribution_prelength = distribution.len();
let db = self.index.field_id_docid_facet_f64s; let db = self.index.field_id_docid_facet_f64s;
@ -94,7 +94,7 @@ impl<'a> FacetDistribution<'a> {
} }
FacetType::String => { FacetType::String => {
let mut normalized_distribution = BTreeMap::new(); let mut normalized_distribution = BTreeMap::new();
let mut key_buffer: Vec<_> = field_id.to_be_bytes().iter().copied().collect(); let mut key_buffer: Vec<_> = field_id.to_be_bytes().to_vec();
let db = self.index.field_id_docid_facet_strings; let db = self.index.field_id_docid_facet_strings;
'outer: for docid in candidates.into_iter() { 'outer: for docid in candidates.into_iter() {

View File

@ -95,7 +95,7 @@ impl<'a> Filter<'a> {
Either::Left(array) => { Either::Left(array) => {
let mut ors = vec![]; let mut ors = vec![];
for rule in array { for rule in array {
if let Some(filter) = Self::from_str(rule.as_ref())? { if let Some(filter) = Self::from_str(rule)? {
ors.push(filter.condition); ors.push(filter.condition);
} }
} }
@ -107,7 +107,7 @@ impl<'a> Filter<'a> {
} }
} }
Either::Right(rule) => { Either::Right(rule) => {
if let Some(filter) = Self::from_str(rule.as_ref())? { if let Some(filter) = Self::from_str(rule)? {
ands.push(filter.condition); ands.push(filter.condition);
} }
} }
@ -300,7 +300,7 @@ impl<'a> Filter<'a> {
index, index,
filterable_fields, filterable_fields,
)?; )?;
return Ok(all_ids - selected); Ok(all_ids - selected)
} }
FilterCondition::In { fid, els } => { FilterCondition::In { fid, els } => {
if crate::is_faceted(fid.value(), filterable_fields) { if crate::is_faceted(fid.value(), filterable_fields) {
@ -319,38 +319,36 @@ impl<'a> Filter<'a> {
Ok(RoaringBitmap::new()) Ok(RoaringBitmap::new())
} }
} else { } else {
return Err(fid.as_external_error(FilterError::AttributeNotFilterable { Err(fid.as_external_error(FilterError::AttributeNotFilterable {
attribute: fid.value(), attribute: fid.value(),
filterable_fields: filterable_fields.clone(), filterable_fields: filterable_fields.clone(),
}))?; }))?
} }
} }
FilterCondition::Condition { fid, op } => { FilterCondition::Condition { fid, op } => {
if crate::is_faceted(fid.value(), filterable_fields) { if crate::is_faceted(fid.value(), filterable_fields) {
let field_ids_map = index.fields_ids_map(rtxn)?; let field_ids_map = index.fields_ids_map(rtxn)?;
if let Some(fid) = field_ids_map.id(fid.value()) { if let Some(fid) = field_ids_map.id(fid.value()) {
Self::evaluate_operator(rtxn, index, fid, &op) Self::evaluate_operator(rtxn, index, fid, op)
} else { } else {
return Ok(RoaringBitmap::new()); Ok(RoaringBitmap::new())
} }
} else { } else {
match fid.lexeme() { match fid.lexeme() {
attribute @ "_geo" => { attribute @ "_geo" => {
return Err(fid.as_external_error(FilterError::BadGeo(attribute)))?; Err(fid.as_external_error(FilterError::BadGeo(attribute)))?
} }
attribute if attribute.starts_with("_geoPoint(") => { attribute if attribute.starts_with("_geoPoint(") => {
return Err(fid.as_external_error(FilterError::BadGeo("_geoPoint")))?; Err(fid.as_external_error(FilterError::BadGeo("_geoPoint")))?
} }
attribute @ "_geoDistance" => { attribute @ "_geoDistance" => {
return Err(fid.as_external_error(FilterError::Reserved(attribute)))?; Err(fid.as_external_error(FilterError::Reserved(attribute)))?
} }
attribute => { attribute => {
return Err(fid.as_external_error( Err(fid.as_external_error(FilterError::AttributeNotFilterable {
FilterError::AttributeNotFilterable { attribute,
attribute, filterable_fields: filterable_fields.clone(),
filterable_fields: filterable_fields.clone(), }))?
},
))?;
} }
} }
} }
@ -419,10 +417,10 @@ impl<'a> Filter<'a> {
Ok(result) Ok(result)
} else { } else {
return Err(point[0].as_external_error(FilterError::AttributeNotFilterable { Err(point[0].as_external_error(FilterError::AttributeNotFilterable {
attribute: "_geo", attribute: "_geo",
filterable_fields: filterable_fields.clone(), filterable_fields: filterable_fields.clone(),
}))?; }))?
} }
} }
} }

View File

@ -44,7 +44,7 @@ impl<'a> Iterator for MatchesIter<'a, '_> {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
match self.inner.next() { match self.inner.next() {
Some((matching_words, ids)) => match matching_words[0].match_token(&self.token) { Some((matching_words, ids)) => match matching_words[0].match_token(self.token) {
Some(char_len) => { Some(char_len) => {
if matching_words.len() > 1 { if matching_words.len() > 1 {
Some(MatchType::Partial(PartialMatch { Some(MatchType::Partial(PartialMatch {

View File

@ -49,16 +49,16 @@ impl<'a, A> MatcherBuilder<'a, A> {
pub fn build<'t, 'm>(&'m self, text: &'t str) -> Matcher<'t, 'm, A> { pub fn build<'t, 'm>(&'m self, text: &'t str) -> Matcher<'t, 'm, A> {
let crop_marker = match &self.crop_marker { let crop_marker = match &self.crop_marker {
Some(marker) => marker.as_str(), Some(marker) => marker.as_str(),
None => &DEFAULT_CROP_MARKER, None => DEFAULT_CROP_MARKER,
}; };
let highlight_prefix = match &self.highlight_prefix { let highlight_prefix = match &self.highlight_prefix {
Some(marker) => marker.as_str(), Some(marker) => marker.as_str(),
None => &DEFAULT_HIGHLIGHT_PREFIX, None => DEFAULT_HIGHLIGHT_PREFIX,
}; };
let highlight_suffix = match &self.highlight_suffix { let highlight_suffix = match &self.highlight_suffix {
Some(marker) => marker.as_str(), Some(marker) => marker.as_str(),
None => &DEFAULT_HIGHLIGHT_SUFFIX, None => DEFAULT_HIGHLIGHT_SUFFIX,
}; };
Matcher { Matcher {
text, text,
@ -95,7 +95,7 @@ pub struct Match {
token_position: usize, token_position: usize,
} }
#[derive(Serialize, Debug, Clone, PartialEq)] #[derive(Serialize, Debug, Clone, PartialEq, Eq)]
pub struct MatchBounds { pub struct MatchBounds {
pub start: usize, pub start: usize,
pub length: usize, pub length: usize,
@ -131,7 +131,7 @@ impl<'t, A: AsRef<[u8]>> Matcher<'t, '_, A> {
potential_matches.push((token_position, word_position, partial.char_len())); potential_matches.push((token_position, word_position, partial.char_len()));
for (token_position, word_position, word) in words_positions { for (token_position, word_position, word) in words_positions {
partial = match partial.match_token(&word) { partial = match partial.match_token(word) {
// token matches the partial match, but the match is not full, // token matches the partial match, but the match is not full,
// we temporarly save the current token then we try to match the next one. // we temporarly save the current token then we try to match the next one.
Some(MatchType::Partial(partial)) => { Some(MatchType::Partial(partial)) => {

View File

@ -188,8 +188,8 @@ impl<'a> Context for QueryTreeBuilder<'a> {
} }
fn min_word_len_for_typo(&self) -> heed::Result<(u8, u8)> { fn min_word_len_for_typo(&self) -> heed::Result<(u8, u8)> {
let one = self.index.min_word_len_one_typo(&self.rtxn)?; let one = self.index.min_word_len_one_typo(self.rtxn)?;
let two = self.index.min_word_len_two_typos(&self.rtxn)?; let two = self.index.min_word_len_two_typos(self.rtxn)?;
Ok((one, two)) Ok((one, two))
} }
@ -207,7 +207,7 @@ impl<'a> Context for QueryTreeBuilder<'a> {
self.index self.index
.word_pair_proximity_docids .word_pair_proximity_docids
.remap_data_type::<CboRoaringBitmapLenCodec>() .remap_data_type::<CboRoaringBitmapLenCodec>()
.get(&self.rtxn, &key) .get(self.rtxn, &key)
} }
} }
@ -313,7 +313,7 @@ pub struct TypoConfig<'a> {
/// Return the `QueryKind` of a word depending on `authorize_typos` /// Return the `QueryKind` of a word depending on `authorize_typos`
/// and the provided word length. /// and the provided word length.
fn typos<'a>(word: String, authorize_typos: bool, config: TypoConfig<'a>) -> QueryKind { fn typos(word: String, authorize_typos: bool, config: TypoConfig) -> QueryKind {
if authorize_typos && !config.exact_words.map_or(false, |s| s.contains(&word)) { if authorize_typos && !config.exact_words.map_or(false, |s| s.contains(&word)) {
let count = word.chars().count().min(u8::MAX as usize) as u8; let count = word.chars().count().min(u8::MAX as usize) as u8;
if count < config.word_len_one_typo { if count < config.word_len_one_typo {
@ -556,7 +556,7 @@ fn create_matching_words(
for synonym in synonyms { for synonym in synonyms {
let synonym = synonym let synonym = synonym
.into_iter() .into_iter()
.map(|syn| MatchingWord::new(syn.to_string(), 0, false)) .map(|syn| MatchingWord::new(syn, 0, false))
.collect(); .collect();
matching_words.push((synonym, vec![id])); matching_words.push((synonym, vec![id]));
} }
@ -583,8 +583,7 @@ fn create_matching_words(
PrimitiveQueryPart::Phrase(words) => { PrimitiveQueryPart::Phrase(words) => {
let ids: Vec<_> = let ids: Vec<_> =
(0..words.len()).into_iter().map(|i| id + i as PrimitiveWordId).collect(); (0..words.len()).into_iter().map(|i| id + i as PrimitiveWordId).collect();
let words = let words = words.into_iter().map(|w| MatchingWord::new(w, 0, false)).collect();
words.into_iter().map(|w| MatchingWord::new(w.to_string(), 0, false)).collect();
matching_words.push((words, ids)); matching_words.push((words, ids));
} }
} }
@ -639,7 +638,7 @@ fn create_matching_words(
for synonym in synonyms { for synonym in synonyms {
let synonym = synonym let synonym = synonym
.into_iter() .into_iter()
.map(|syn| MatchingWord::new(syn.to_string(), 0, false)) .map(|syn| MatchingWord::new(syn, 0, false))
.collect(); .collect();
matching_words.push((synonym, ids.clone())); matching_words.push((synonym, ids.clone()));
} }

View File

@ -182,12 +182,11 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
// and we can reset the soft deleted bitmap // and we can reset the soft deleted bitmap
self.index.put_soft_deleted_documents_ids(self.wtxn, &RoaringBitmap::new())?; self.index.put_soft_deleted_documents_ids(self.wtxn, &RoaringBitmap::new())?;
let primary_key = self.index.primary_key(self.wtxn)?.ok_or_else(|| { let primary_key =
InternalError::DatabaseMissingEntry { self.index.primary_key(self.wtxn)?.ok_or(InternalError::DatabaseMissingEntry {
db_name: db_name::MAIN, db_name: db_name::MAIN,
key: Some(main_key::PRIMARY_KEY_KEY), key: Some(main_key::PRIMARY_KEY_KEY),
} })?;
})?;
// Since we already checked if the DB was empty, if we can't find the primary key, then // Since we already checked if the DB was empty, if we can't find the primary key, then
// something is wrong, and we must return an error. // something is wrong, and we must return an error.
@ -457,7 +456,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
.map(|point| (point, point.data.0)) .map(|point| (point, point.data.0))
.unzip(); .unzip();
points_to_remove.iter().for_each(|point| { points_to_remove.iter().for_each(|point| {
rtree.remove(&point); rtree.remove(point);
}); });
geo_faceted_doc_ids -= docids_to_remove; geo_faceted_doc_ids -= docids_to_remove;
@ -546,7 +545,7 @@ fn remove_from_word_docids(
// We create an iterator to be able to get the content and delete the word docids. // We create an iterator to be able to get the content and delete the word docids.
// It's faster to acquire a cursor to get and delete or put, as we avoid traversing // It's faster to acquire a cursor to get and delete or put, as we avoid traversing
// the LMDB B-Tree two times but only once. // the LMDB B-Tree two times but only once.
let mut iter = db.prefix_iter_mut(txn, &word)?; let mut iter = db.prefix_iter_mut(txn, word)?;
if let Some((key, mut docids)) = iter.next().transpose()? { if let Some((key, mut docids)) = iter.next().transpose()? {
if key == word { if key == word {
let previous_len = docids.len(); let previous_len = docids.len();

View File

@ -0,0 +1 @@

View File

@ -140,7 +140,7 @@ fn fetch_or_generate_document_id(
} }
None => Ok(Err(UserError::MissingDocumentId { None => Ok(Err(UserError::MissingDocumentId {
primary_key: primary_key.to_string(), primary_key: primary_key.to_string(),
document: obkv_to_object(&document, &documents_batch_index)?, document: obkv_to_object(document, documents_batch_index)?,
})), })),
} }
} }
@ -156,7 +156,7 @@ fn fetch_or_generate_document_id(
if matching_documents_ids.len() >= 2 { if matching_documents_ids.len() >= 2 {
return Ok(Err(UserError::TooManyDocumentIds { return Ok(Err(UserError::TooManyDocumentIds {
primary_key: nested.name().to_string(), primary_key: nested.name().to_string(),
document: obkv_to_object(&document, &documents_batch_index)?, document: obkv_to_object(document, documents_batch_index)?,
})); }));
} }
} }
@ -170,7 +170,7 @@ fn fetch_or_generate_document_id(
}, },
None => Ok(Err(UserError::MissingDocumentId { None => Ok(Err(UserError::MissingDocumentId {
primary_key: nested.name().to_string(), primary_key: nested.name().to_string(),
document: obkv_to_object(&document, &documents_batch_index)?, document: obkv_to_object(document, documents_batch_index)?,
})), })),
} }
} }
@ -313,7 +313,7 @@ pub fn validate_document_id_value(document_id: Value) -> Result<StdResult<String
None => Ok(Err(UserError::InvalidDocumentId { document_id: Value::String(string) })), None => Ok(Err(UserError::InvalidDocumentId { document_id: Value::String(string) })),
}, },
Value::Number(number) if number.is_i64() => Ok(Ok(number.to_string())), Value::Number(number) if number.is_i64() => Ok(Ok(number.to_string())),
content => Ok(Err(UserError::InvalidDocumentId { document_id: content.clone() })), content => Ok(Err(UserError::InvalidDocumentId { document_id: content })),
} }
} }

View File

@ -132,7 +132,7 @@ fn json_to_string<'a>(value: &'a Value, buffer: &'a mut String) -> Option<&'a st
} }
if let Value::String(string) = value { if let Value::String(string) = value {
Some(&string) Some(string)
} else if inner(value, buffer) { } else if inner(value, buffer) {
Some(buffer) Some(buffer)
} else { } else {

View File

@ -67,7 +67,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
facet_exists_docids.entry(field_id).or_default().insert(document); facet_exists_docids.entry(field_id).or_default().insert(document);
// For the other extraction tasks, prefix the key with the field_id and the document_id // For the other extraction tasks, prefix the key with the field_id and the document_id
key_buffer.extend_from_slice(&docid_bytes); key_buffer.extend_from_slice(docid_bytes);
let value = let value =
serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?; serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?;
@ -107,8 +107,8 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
let facet_exists_docids_reader = writer_into_reader(facet_exists_docids_writer)?; let facet_exists_docids_reader = writer_into_reader(facet_exists_docids_writer)?;
Ok(( Ok((
sorter_into_reader(fid_docid_facet_numbers_sorter, indexer.clone())?, sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?,
sorter_into_reader(fid_docid_facet_strings_sorter, indexer.clone())?, sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?,
facet_exists_docids_reader, facet_exists_docids_reader,
)) ))
} }

View File

@ -150,7 +150,7 @@ pub(crate) fn data_from_obkv_documents(
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>( spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
docid_fid_facet_numbers_chunks, docid_fid_facet_numbers_chunks,
indexer, indexer,
lmdb_writer_sx.clone(), lmdb_writer_sx,
extract_facet_number_docids, extract_facet_number_docids,
merge_cbo_roaring_bitmaps, merge_cbo_roaring_bitmaps,
TypedChunk::FieldIdFacetNumberDocids, TypedChunk::FieldIdFacetNumberDocids,

View File

@ -30,9 +30,8 @@ pub fn index_prefix_word_database(
debug!("Computing and writing the word prefix pair proximity docids into LMDB on disk..."); debug!("Computing and writing the word prefix pair proximity docids into LMDB on disk...");
let common_prefixes: Vec<_> = common_prefix_fst_words let common_prefixes: Vec<_> = common_prefix_fst_words
.into_iter() .iter()
.map(|s| s.into_iter()) .flat_map(|s| s.iter())
.flatten()
.map(|s| s.as_str()) .map(|s| s.as_str())
.filter(|s| s.len() <= max_prefix_length) .filter(|s| s.len() <= max_prefix_length)
.collect(); .collect();
@ -73,7 +72,7 @@ pub fn index_prefix_word_database(
// Now we do the same thing with the new prefixes and all word pairs in the DB // Now we do the same thing with the new prefixes and all word pairs in the DB
let new_prefixes: Vec<_> = new_prefix_fst_words let new_prefixes: Vec<_> = new_prefix_fst_words
.into_iter() .iter()
.map(|s| s.as_str()) .map(|s| s.as_str())
.filter(|s| s.len() <= max_prefix_length) .filter(|s| s.len() <= max_prefix_length)
.collect(); .collect();

View File

@ -195,9 +195,8 @@ pub fn index_word_prefix_database(
// Make a prefix trie from the common prefixes that are shorter than self.max_prefix_length // Make a prefix trie from the common prefixes that are shorter than self.max_prefix_length
let prefixes = PrefixTrieNode::from_sorted_prefixes( let prefixes = PrefixTrieNode::from_sorted_prefixes(
common_prefix_fst_words common_prefix_fst_words
.into_iter() .iter()
.map(|s| s.into_iter()) .flat_map(|s| s.iter())
.flatten()
.map(|s| s.as_str()) .map(|s| s.as_str())
.filter(|s| s.len() <= max_prefix_length), .filter(|s| s.len() <= max_prefix_length),
); );
@ -237,10 +236,7 @@ pub fn index_word_prefix_database(
// Now we do the same thing with the new prefixes and all word pairs in the DB // Now we do the same thing with the new prefixes and all word pairs in the DB
let prefixes = PrefixTrieNode::from_sorted_prefixes( let prefixes = PrefixTrieNode::from_sorted_prefixes(
new_prefix_fst_words new_prefix_fst_words.iter().map(|s| s.as_str()).filter(|s| s.len() <= max_prefix_length),
.into_iter()
.map(|s| s.as_str())
.filter(|s| s.len() <= max_prefix_length),
); );
if !prefixes.is_empty() { if !prefixes.is_empty() {
@ -366,7 +362,7 @@ fn execute_on_word_pairs_and_prefixes<I>(
&mut prefix_buffer, &mut prefix_buffer,
&prefix_search_start, &prefix_search_start,
|prefix_buffer| { |prefix_buffer| {
batch.insert(&prefix_buffer, data.to_vec()); batch.insert(prefix_buffer, data.to_vec());
}, },
); );
} }
@ -484,7 +480,7 @@ impl PrefixTrieNode {
fn set_search_start(&self, word: &[u8], search_start: &mut PrefixTrieNodeSearchStart) -> bool { fn set_search_start(&self, word: &[u8], search_start: &mut PrefixTrieNodeSearchStart) -> bool {
let byte = word[0]; let byte = word[0];
if self.children[search_start.0].1 == byte { if self.children[search_start.0].1 == byte {
return true; true
} else { } else {
match self.children[search_start.0..].binary_search_by_key(&byte, |x| x.1) { match self.children[search_start.0..].binary_search_by_key(&byte, |x| x.1) {
Ok(position) => { Ok(position) => {
@ -502,7 +498,7 @@ impl PrefixTrieNode {
fn from_sorted_prefixes<'a>(prefixes: impl Iterator<Item = &'a str>) -> Self { fn from_sorted_prefixes<'a>(prefixes: impl Iterator<Item = &'a str>) -> Self {
let mut node = PrefixTrieNode::default(); let mut node = PrefixTrieNode::default();
for prefix in prefixes { for prefix in prefixes {
node.insert_sorted_prefix(prefix.as_bytes().into_iter()); node.insert_sorted_prefix(prefix.as_bytes().iter());
} }
node node
} }