720: Make soft deletion optional in document addition and deletion + add lots of tests r=irevoire a=loiclec

# Pull Request

## What does this PR do?
When debugging recent issues, I created a few unit tests in the hopes reproducing the bugs I was looking for. In the end, I didn't find any, but I thought it would still be good to keep those tests. 

More importantly, I added a field to the `DeleteDocuments` and `IndexDocuments` builders, called `disable_soft_deletion`. If set to `true`, the indexing/deletion will never add documents to the `soft_deleted_documents_ids` and instead perform a real deletion of the documents from the databases.

For the new tests, I have:
- Improved the insta-snapshot format of the `external_documents_ids` structure
- Added more tests for the facet DB indexing, deletion, and search algorithms, making sure to test them when the facet DB contains strings (instead of numbers) as well.
- Added more tests for the incremental indexing of the prefix proximity databases. For example, to see if documents are replaced correctly and if common prefixes are deleted correctly.
- Added tests that mix soft deletion and hard deletion, including when processing batches of document updates. 


Co-authored-by: Loïc Lecrenier <loic.lecrenier@me.com>
This commit is contained in:
bors[bot] 2022-12-05 18:26:01 +00:00 committed by GitHub
commit 46e26ab550
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
91 changed files with 2724 additions and 64 deletions

View File

@ -1190,8 +1190,10 @@ pub(crate) mod tests {
use crate::documents::DocumentsBatchReader; use crate::documents::DocumentsBatchReader;
use crate::error::{Error, InternalError}; use crate::error::{Error, InternalError};
use crate::index::{DEFAULT_MIN_WORD_LEN_ONE_TYPO, DEFAULT_MIN_WORD_LEN_TWO_TYPOS}; use crate::index::{DEFAULT_MIN_WORD_LEN_ONE_TYPO, DEFAULT_MIN_WORD_LEN_TWO_TYPOS};
use crate::update::{self, IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings}; use crate::update::{
use crate::{db_snap, Index}; self, DeleteDocuments, IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings,
};
use crate::{db_snap, obkv_to_json, Index};
pub(crate) struct TempIndex { pub(crate) struct TempIndex {
pub inner: Index, pub inner: Index,
@ -1477,4 +1479,545 @@ pub(crate) mod tests {
let user_defined = index.user_defined_searchable_fields(&rtxn).unwrap().unwrap(); let user_defined = index.user_defined_searchable_fields(&rtxn).unwrap().unwrap();
assert_eq!(user_defined, &["doggo", "name"]); assert_eq!(user_defined, &["doggo", "name"]);
} }
#[test]
fn replace_documents_external_ids_and_soft_deletion_check() {
use big_s::S;
use maplit::hashset;
let mut index = TempIndex::new();
index
.update_settings(|settings| {
settings.set_primary_key("id".to_owned());
settings.set_filterable_fields(hashset! { S("doggo") });
})
.unwrap();
let mut docs = vec![];
for i in 0..4 {
docs.push(serde_json::json!(
{ "id": i, "doggo": i }
));
}
index.add_documents(documents!(docs)).unwrap();
db_snap!(index, documents_ids, @"[0, 1, 2, 3, ]");
db_snap!(index, external_documents_ids, 1, @r###"
soft:
hard:
0 0
1 1
2 2
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 1, @"[]");
db_snap!(index, facet_id_f64_docids, 1, @r###"
1 0 0 1 [0, ]
1 0 1 1 [1, ]
1 0 2 1 [2, ]
1 0 3 1 [3, ]
"###);
let mut docs = vec![];
for i in 0..3 {
docs.push(serde_json::json!(
{ "id": i, "doggo": i + 1 }
));
}
index.add_documents(documents!(docs)).unwrap();
db_snap!(index, documents_ids, @"[3, 4, 5, 6, ]");
db_snap!(index, external_documents_ids, 2, @r###"
soft:
hard:
0 4
1 5
2 6
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 2, @"[0, 1, 2, ]");
db_snap!(index, facet_id_f64_docids, 2, @r###"
1 0 0 1 [0, ]
1 0 1 1 [1, 4, ]
1 0 2 1 [2, 5, ]
1 0 3 1 [3, 6, ]
"###);
index.index_documents_config.disable_soft_deletion = false;
index
.add_documents(documents!([{ "id": 3, "doggo": 4 }, { "id": 3, "doggo": 5 },{ "id": 3, "doggo": 4 }]))
.unwrap();
db_snap!(index, documents_ids, @"[4, 5, 6, 7, ]");
db_snap!(index, external_documents_ids, 3, @r###"
soft:
3 7
hard:
0 4
1 5
2 6
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 3, @"[0, 1, 2, 3, ]");
db_snap!(index, facet_id_f64_docids, 3, @r###"
1 0 0 1 [0, ]
1 0 1 1 [1, 4, ]
1 0 2 1 [2, 5, ]
1 0 3 1 [3, 6, ]
1 0 4 1 [7, ]
"###);
index.index_documents_config.disable_soft_deletion = false;
index
.update_settings(|settings| {
settings.set_distinct_field("id".to_owned());
})
.unwrap();
db_snap!(index, documents_ids, @"[4, 5, 6, 7, ]");
db_snap!(index, external_documents_ids, 3, @r###"
soft:
3 7
hard:
0 4
1 5
2 6
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 3, @"[]");
db_snap!(index, facet_id_f64_docids, 3, @r###"
0 0 0 1 [4, ]
0 0 1 1 [5, ]
0 0 2 1 [6, ]
0 0 3 1 [7, ]
1 0 1 1 [4, ]
1 0 2 1 [5, ]
1 0 3 1 [6, ]
1 0 4 1 [7, ]
"###);
index.index_documents_config.disable_soft_deletion = true;
index.add_documents(documents!([{ "id": 3, "doggo": 4 }])).unwrap();
db_snap!(index, external_documents_ids, 3, @r###"
soft:
3 7
hard:
0 4
1 5
2 6
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 3, @"[]");
db_snap!(index, facet_id_f64_docids, 3, @r###"
0 0 0 1 [4, ]
0 0 1 1 [5, ]
0 0 2 1 [6, ]
0 0 3 1 [7, ]
1 0 1 1 [4, ]
1 0 2 1 [5, ]
1 0 3 1 [6, ]
1 0 4 1 [7, ]
"###);
let mut wtxn = index.write_txn().unwrap();
let mut delete = DeleteDocuments::new(&mut wtxn, &index).unwrap();
let docid = delete.delete_external_id("3").unwrap();
insta::assert_snapshot!(format!("{docid}"), @"7");
delete.execute().unwrap();
wtxn.commit().unwrap();
db_snap!(index, documents_ids, @"[4, 5, 6, ]");
db_snap!(index, external_documents_ids, 4, @r###"
soft:
3 7
hard:
0 4
1 5
2 6
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 4, @"[7, ]");
db_snap!(index, facet_id_f64_docids, 4, @r###"
0 0 0 1 [4, ]
0 0 1 1 [5, ]
0 0 2 1 [6, ]
0 0 3 1 [7, ]
1 0 1 1 [4, ]
1 0 2 1 [5, ]
1 0 3 1 [6, ]
1 0 4 1 [7, ]
"###);
index.index_documents_config.disable_soft_deletion = false;
index.add_documents(documents!([{ "id": 3, "doggo": 4 }])).unwrap();
db_snap!(index, external_documents_ids, 4, @r###"
soft:
3 0
hard:
0 4
1 5
2 6
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 4, @"[7, ]");
db_snap!(index, facet_id_f64_docids, 4, @r###"
0 0 0 1 [4, ]
0 0 1 1 [5, ]
0 0 2 1 [6, ]
0 0 3 1 [0, 7, ]
1 0 1 1 [4, ]
1 0 2 1 [5, ]
1 0 3 1 [6, ]
1 0 4 1 [0, 7, ]
"###);
index.index_documents_config.disable_soft_deletion = false;
index.add_documents(documents!([{ "id": 3, "doggo": 5 }])).unwrap();
db_snap!(index, external_documents_ids, 4, @r###"
soft:
3 1
hard:
0 4
1 5
2 6
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 4, @"[0, 7, ]");
db_snap!(index, facet_id_f64_docids, 4, @r###"
0 0 0 1 [4, ]
0 0 1 1 [5, ]
0 0 2 1 [6, ]
0 0 3 1 [0, 1, 7, ]
1 0 1 1 [4, ]
1 0 2 1 [5, ]
1 0 3 1 [6, ]
1 0 4 1 [0, 7, ]
1 0 5 1 [1, ]
"###);
index.index_documents_config.disable_soft_deletion = false;
index.add_documents(documents!([{ "id": 3, "doggo": 5, "id": 2, "doggo": 4 }])).unwrap();
db_snap!(index, external_documents_ids, 4, @r###"
soft:
hard:
0 4
1 5
2 2
3 1
"###);
db_snap!(index, soft_deleted_documents_ids, 4, @"[0, 6, 7, ]");
db_snap!(index, facet_id_f64_docids, 4, @r###"
0 0 0 1 [4, ]
0 0 1 1 [5, ]
0 0 2 1 [2, 6, ]
0 0 3 1 [0, 1, 7, ]
1 0 1 1 [4, ]
1 0 2 1 [5, ]
1 0 3 1 [6, ]
1 0 4 1 [0, 2, 7, ]
1 0 5 1 [1, ]
"###);
index.index_documents_config.disable_soft_deletion = false;
index
.add_documents(documents!([{ "id": 4, "doggo": 5 }, { "id": 3, "doggo": 5 }]))
.unwrap();
db_snap!(index, external_documents_ids, 4, @r###"
soft:
4 3
hard:
0 4
1 5
2 2
3 1
"###);
db_snap!(index, soft_deleted_documents_ids, 4, @"[0, 6, 7, ]");
db_snap!(index, facet_id_f64_docids, 4, @r###"
0 0 0 1 [4, ]
0 0 1 1 [5, ]
0 0 2 1 [2, 6, ]
0 0 3 1 [0, 1, 7, ]
0 0 4 1 [3, ]
1 0 1 1 [4, ]
1 0 2 1 [5, ]
1 0 3 1 [6, ]
1 0 4 1 [0, 2, 7, ]
1 0 5 1 [1, 3, ]
"###);
}
#[test]
fn replace_documents_in_batches_external_ids_and_soft_deletion_check() {
use big_s::S;
use maplit::hashset;
let mut index = TempIndex::new();
index
.update_settings(|settings| {
settings.set_primary_key("id".to_owned());
settings.set_filterable_fields(hashset! { S("doggo") });
})
.unwrap();
let add_documents = |index: &TempIndex, docs: Vec<Vec<serde_json::Value>>| {
let mut wtxn = index.write_txn().unwrap();
let mut builder = IndexDocuments::new(
&mut wtxn,
index,
&index.indexer_config,
index.index_documents_config.clone(),
|_| (),
|| false,
)
.unwrap();
for docs in docs {
(builder, _) = builder.add_documents(documents!(docs)).unwrap();
}
builder.execute().unwrap();
wtxn.commit().unwrap();
};
// First Batch
{
let mut docs1 = vec![];
for i in 0..4 {
docs1.push(serde_json::json!(
{ "id": i, "doggo": i }
));
}
add_documents(&index, vec![docs1]);
db_snap!(index, documents_ids, @"[0, 1, 2, 3, ]");
db_snap!(index, external_documents_ids, 1, @r###"
soft:
hard:
0 0
1 1
2 2
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 1, @"[]");
db_snap!(index, facet_id_f64_docids, 1, @r###"
1 0 0 1 [0, ]
1 0 1 1 [1, ]
1 0 2 1 [2, ]
1 0 3 1 [3, ]
"###);
}
// Second Batch: replace the documents with soft-deletion
{
index.index_documents_config.disable_soft_deletion = false;
let mut docs1 = vec![];
for i in 0..3 {
docs1.push(serde_json::json!(
{ "id": i, "doggo": i+1 }
));
}
let mut docs2 = vec![];
for i in 0..3 {
docs2.push(serde_json::json!(
{ "id": i, "doggo": i }
));
}
add_documents(&index, vec![docs1, docs2]);
db_snap!(index, documents_ids, @"[3, 4, 5, 6, ]");
db_snap!(index, external_documents_ids, 1, @r###"
soft:
hard:
0 4
1 5
2 6
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 1, @"[0, 1, 2, ]");
db_snap!(index, facet_id_f64_docids, 1, @r###"
1 0 0 1 [0, 4, ]
1 0 1 1 [1, 5, ]
1 0 2 1 [2, 6, ]
1 0 3 1 [3, ]
"###);
}
let rtxn = index.read_txn().unwrap();
let (_docid, obkv) = index.documents(&rtxn, [3]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(3),
"doggo": Number(3),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [4]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(0),
"doggo": Number(0),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [5]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(1),
"doggo": Number(1),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [6]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(2),
"doggo": Number(2),
}
"###);
drop(rtxn);
// Third Batch: replace the documents with soft-deletion again
{
index.index_documents_config.disable_soft_deletion = false;
let mut docs1 = vec![];
for i in 0..3 {
docs1.push(serde_json::json!(
{ "id": i, "doggo": i+1 }
));
}
let mut docs2 = vec![];
for i in 0..4 {
docs2.push(serde_json::json!(
{ "id": i, "doggo": i }
));
}
add_documents(&index, vec![docs1, docs2]);
db_snap!(index, documents_ids, @"[3, 7, 8, 9, ]");
db_snap!(index, external_documents_ids, 1, @r###"
soft:
hard:
0 7
1 8
2 9
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 1, @"[0, 1, 2, 4, 5, 6, ]");
db_snap!(index, facet_id_f64_docids, 1, @r###"
1 0 0 1 [0, 4, 7, ]
1 0 1 1 [1, 5, 8, ]
1 0 2 1 [2, 6, 9, ]
1 0 3 1 [3, ]
"###);
}
let rtxn = index.read_txn().unwrap();
let (_docid, obkv) = index.documents(&rtxn, [3]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(3),
"doggo": Number(3),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [7]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(0),
"doggo": Number(0),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [8]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(1),
"doggo": Number(1),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [9]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(2),
"doggo": Number(2),
}
"###);
drop(rtxn);
// Fourth Batch: replace the documents without soft-deletion
{
index.index_documents_config.disable_soft_deletion = true;
let mut docs1 = vec![];
for i in 0..3 {
docs1.push(serde_json::json!(
{ "id": i, "doggo": i+2 }
));
}
let mut docs2 = vec![];
for i in 0..1 {
docs2.push(serde_json::json!(
{ "id": i, "doggo": i }
));
}
add_documents(&index, vec![docs1, docs2]);
db_snap!(index, documents_ids, @"[3, 10, 11, 12, ]");
db_snap!(index, external_documents_ids, 1, @r###"
soft:
hard:
0 10
1 11
2 12
3 3
"###);
db_snap!(index, soft_deleted_documents_ids, 1, @"[]");
db_snap!(index, facet_id_f64_docids, 1, @r###"
1 0 0 1 [10, ]
1 0 3 1 [3, 11, ]
1 0 4 1 [12, ]
"###);
let rtxn = index.read_txn().unwrap();
let (_docid, obkv) = index.documents(&rtxn, [3]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(3),
"doggo": Number(3),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [10]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(0),
"doggo": Number(0),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [11]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(1),
"doggo": Number(3),
}
"###);
let (_docid, obkv) = index.documents(&rtxn, [12]).unwrap()[0];
let json = obkv_to_json(&[0, 1], &index.fields_ids_map(&rtxn).unwrap(), obkv).unwrap();
insta::assert_debug_snapshot!(json, @r###"
{
"id": Number(2),
"doggo": Number(4),
}
"###);
drop(rtxn);
}
}
} }

View File

@ -80,6 +80,8 @@ impl<'t, 'e> Iterator for AscendingFacetSort<'t, 'e> {
// that we found all the documents in the sub level iterations already, // that we found all the documents in the sub level iterations already,
// we can pop this level iterator. // we can pop this level iterator.
if documents_ids.is_empty() { if documents_ids.is_empty() {
// break our of the for loop into the end of the 'outer loop, which
// pops the stack
break; break;
} }
@ -113,11 +115,14 @@ mod tests {
use crate::milli_snap; use crate::milli_snap;
use crate::search::facet::facet_sort_ascending::ascending_facet_sort; use crate::search::facet::facet_sort_ascending::ascending_facet_sort;
use crate::search::facet::tests::{get_random_looking_index, get_simple_index}; use crate::search::facet::tests::{
get_random_looking_index, get_random_looking_string_index_with_multiple_field_ids,
get_simple_index, get_simple_string_index_with_multiple_field_ids,
};
use crate::snapshot_tests::display_bitmap; use crate::snapshot_tests::display_bitmap;
#[test] #[test]
fn filter_sort() { fn filter_sort_ascending() {
let indexes = [get_simple_index(), get_random_looking_index()]; let indexes = [get_simple_index(), get_random_looking_index()];
for (i, index) in indexes.iter().enumerate() { for (i, index) in indexes.iter().enumerate() {
let txn = index.env.read_txn().unwrap(); let txn = index.env.read_txn().unwrap();
@ -134,4 +139,88 @@ mod tests {
txn.commit().unwrap(); txn.commit().unwrap();
} }
} }
#[test]
fn filter_sort_ascending_multiple_field_ids() {
let indexes = [
get_simple_string_index_with_multiple_field_ids(),
get_random_looking_string_index_with_multiple_field_ids(),
];
for (i, index) in indexes.iter().enumerate() {
let txn = index.env.read_txn().unwrap();
let candidates = (200..=300).into_iter().collect::<RoaringBitmap>();
let mut results = String::new();
let iter = ascending_facet_sort(&txn, index.content, 0, candidates.clone()).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
milli_snap!(results, format!("{i}-0"));
let mut results = String::new();
let iter = ascending_facet_sort(&txn, index.content, 1, candidates).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
milli_snap!(results, format!("{i}-1"));
txn.commit().unwrap();
}
}
#[test]
fn filter_sort_ascending_with_no_candidates() {
let indexes = [
get_simple_string_index_with_multiple_field_ids(),
get_random_looking_string_index_with_multiple_field_ids(),
];
for (_i, index) in indexes.iter().enumerate() {
let txn = index.env.read_txn().unwrap();
let candidates = RoaringBitmap::new();
let mut results = String::new();
let iter = ascending_facet_sort(&txn, index.content, 0, candidates.clone()).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
assert!(results.is_empty());
let mut results = String::new();
let iter = ascending_facet_sort(&txn, index.content, 1, candidates).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
assert!(results.is_empty());
txn.commit().unwrap();
}
}
#[test]
fn filter_sort_ascending_with_inexisting_field_id() {
let indexes = [
get_simple_string_index_with_multiple_field_ids(),
get_random_looking_string_index_with_multiple_field_ids(),
];
for (_i, index) in indexes.iter().enumerate() {
let txn = index.env.read_txn().unwrap();
let candidates = RoaringBitmap::new();
let mut results = String::new();
let iter = ascending_facet_sort(&txn, index.content, 3, candidates.clone()).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
assert!(results.is_empty());
txn.commit().unwrap();
}
}
} }

View File

@ -125,12 +125,20 @@ mod tests {
use crate::heed_codec::ByteSliceRefCodec; use crate::heed_codec::ByteSliceRefCodec;
use crate::milli_snap; use crate::milli_snap;
use crate::search::facet::facet_sort_descending::descending_facet_sort; use crate::search::facet::facet_sort_descending::descending_facet_sort;
use crate::search::facet::tests::{get_random_looking_index, get_simple_index}; use crate::search::facet::tests::{
get_random_looking_index, get_random_looking_string_index_with_multiple_field_ids,
get_simple_index, get_simple_index_with_multiple_field_ids,
get_simple_string_index_with_multiple_field_ids,
};
use crate::snapshot_tests::display_bitmap; use crate::snapshot_tests::display_bitmap;
#[test] #[test]
fn filter_sort_descending() { fn filter_sort_descending() {
let indexes = [get_simple_index(), get_random_looking_index()]; let indexes = [
get_simple_index(),
get_random_looking_index(),
get_simple_index_with_multiple_field_ids(),
];
for (i, index) in indexes.iter().enumerate() { for (i, index) in indexes.iter().enumerate() {
let txn = index.env.read_txn().unwrap(); let txn = index.env.read_txn().unwrap();
let candidates = (200..=300).into_iter().collect::<RoaringBitmap>(); let candidates = (200..=300).into_iter().collect::<RoaringBitmap>();
@ -147,4 +155,89 @@ mod tests {
txn.commit().unwrap(); txn.commit().unwrap();
} }
} }
#[test]
fn filter_sort_descending_multiple_field_ids() {
let indexes = [
get_simple_string_index_with_multiple_field_ids(),
get_random_looking_string_index_with_multiple_field_ids(),
];
for (i, index) in indexes.iter().enumerate() {
let txn = index.env.read_txn().unwrap();
let candidates = (200..=300).into_iter().collect::<RoaringBitmap>();
let mut results = String::new();
let db = index.content.remap_key_type::<FacetGroupKeyCodec<ByteSliceRefCodec>>();
let iter = descending_facet_sort(&txn, db, 0, candidates.clone()).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
milli_snap!(results, format!("{i}-0"));
let mut results = String::new();
let iter = descending_facet_sort(&txn, db, 1, candidates).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
milli_snap!(results, format!("{i}-1"));
txn.commit().unwrap();
}
}
#[test]
fn filter_sort_ascending_with_no_candidates() {
let indexes = [
get_simple_string_index_with_multiple_field_ids(),
get_random_looking_string_index_with_multiple_field_ids(),
];
for (_i, index) in indexes.iter().enumerate() {
let txn = index.env.read_txn().unwrap();
let candidates = RoaringBitmap::new();
let mut results = String::new();
let iter = descending_facet_sort(&txn, index.content, 0, candidates.clone()).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
assert!(results.is_empty());
let mut results = String::new();
let iter = descending_facet_sort(&txn, index.content, 1, candidates).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
assert!(results.is_empty());
txn.commit().unwrap();
}
}
#[test]
fn filter_sort_ascending_with_inexisting_field_id() {
let indexes = [
get_simple_string_index_with_multiple_field_ids(),
get_random_looking_string_index_with_multiple_field_ids(),
];
for (_i, index) in indexes.iter().enumerate() {
let txn = index.env.read_txn().unwrap();
let candidates = RoaringBitmap::new();
let mut results = String::new();
let iter = descending_facet_sort(&txn, index.content, 3, candidates.clone()).unwrap();
for el in iter {
let docids = el.unwrap();
results.push_str(&display_bitmap(&docids));
results.push('\n');
}
assert!(results.is_empty());
txn.commit().unwrap();
}
}
} }

View File

@ -89,7 +89,8 @@ pub(crate) mod tests {
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::heed_codec::facet::OrderedF64Codec; use crate::heed_codec::facet::OrderedF64Codec;
use crate::update::facet::tests::FacetIndex; use crate::heed_codec::StrRefCodec;
use crate::update::facet::test_helpers::FacetIndex;
pub fn get_simple_index() -> FacetIndex<OrderedF64Codec> { pub fn get_simple_index() -> FacetIndex<OrderedF64Codec> {
let index = FacetIndex::<OrderedF64Codec>::new(4, 8, 5); let index = FacetIndex::<OrderedF64Codec>::new(4, 8, 5);
@ -147,4 +148,43 @@ pub(crate) mod tests {
txn.commit().unwrap(); txn.commit().unwrap();
index index
} }
pub fn get_simple_string_index_with_multiple_field_ids() -> FacetIndex<StrRefCodec> {
let index = FacetIndex::<StrRefCodec>::new(4, 8, 5);
let mut txn = index.env.write_txn().unwrap();
for fid in 0..2 {
for i in 0..256u16 {
let mut bitmap = RoaringBitmap::new();
bitmap.insert(i as u32);
if i % 2 == 0 {
index.insert(&mut txn, fid, &format!("{i}").as_str(), &bitmap);
} else {
index.insert(&mut txn, fid, &"", &bitmap);
}
}
}
txn.commit().unwrap();
index
}
pub fn get_random_looking_string_index_with_multiple_field_ids() -> FacetIndex<StrRefCodec> {
let index = FacetIndex::<StrRefCodec>::new(4, 8, 5);
let mut txn = index.env.write_txn().unwrap();
let mut rng = rand::rngs::SmallRng::from_seed([0; 32]);
let keys =
std::iter::from_fn(|| Some(rng.gen_range(0..256))).take(128).collect::<Vec<u32>>();
for fid in 0..2 {
for (_i, &key) in keys.iter().enumerate() {
let mut bitmap = RoaringBitmap::new();
bitmap.insert(key);
bitmap.insert(key + 100);
if key % 2 == 0 {
index.insert(&mut txn, fid, &format!("{key}").as_str(), &bitmap);
} else {
index.insert(&mut txn, fid, &"", &bitmap);
}
}
}
txn.commit().unwrap();
index
}
} }

View File

@ -0,0 +1,33 @@
---
source: milli/src/search/facet/facet_sort_ascending.rs
---
[201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255, ]
[200, ]
[202, ]
[204, ]
[206, ]
[208, ]
[210, ]
[212, ]
[214, ]
[216, ]
[218, ]
[220, ]
[222, ]
[224, ]
[226, ]
[228, ]
[230, ]
[232, ]
[234, ]
[236, ]
[238, ]
[240, ]
[242, ]
[244, ]
[246, ]
[248, ]
[250, ]
[252, ]
[254, ]

View File

@ -0,0 +1,33 @@
---
source: milli/src/search/facet/facet_sort_ascending.rs
---
[201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255, ]
[200, ]
[202, ]
[204, ]
[206, ]
[208, ]
[210, ]
[212, ]
[214, ]
[216, ]
[218, ]
[220, ]
[222, ]
[224, ]
[226, ]
[228, ]
[230, ]
[232, ]
[234, ]
[236, ]
[238, ]
[240, ]
[242, ]
[244, ]
[246, ]
[248, ]
[250, ]
[252, ]
[254, ]

View File

@ -0,0 +1,27 @@
---
source: milli/src/search/facet/facet_sort_ascending.rs
---
[201, 203, 205, 207, 209, 211, 215, 219, 223, 231, 233, 235, 237, 239, 241, 243, 247, 263, 267, 269, 273, 277, 279, 281, 289, 293, 295, 297, ]
[202, ]
[224, ]
[230, ]
[236, ]
[244, ]
[250, ]
[256, ]
[258, ]
[260, ]
[262, ]
[264, ]
[278, ]
[282, ]
[286, ]
[292, ]
[206, ]
[208, ]
[210, ]
[216, ]
[220, ]
[226, ]
[238, ]

View File

@ -0,0 +1,27 @@
---
source: milli/src/search/facet/facet_sort_ascending.rs
---
[201, 203, 205, 207, 209, 211, 215, 219, 223, 231, 233, 235, 237, 239, 241, 243, 247, 263, 267, 269, 273, 277, 279, 281, 289, 293, 295, 297, ]
[202, ]
[224, ]
[230, ]
[236, ]
[244, ]
[250, ]
[256, ]
[258, ]
[260, ]
[262, ]
[264, ]
[278, ]
[282, ]
[286, ]
[292, ]
[206, ]
[208, ]
[210, ]
[216, ]
[220, ]
[226, ]
[238, ]

View File

@ -0,0 +1,60 @@
---
source: milli/src/search/facet/facet_sort_descending.rs
---
[255, ]
[254, ]
[253, ]
[252, ]
[251, ]
[250, ]
[249, ]
[248, ]
[247, ]
[246, ]
[245, ]
[244, ]
[243, ]
[242, ]
[241, ]
[240, ]
[239, ]
[238, ]
[237, ]
[236, ]
[235, ]
[234, ]
[233, ]
[232, ]
[231, ]
[230, ]
[229, ]
[228, ]
[227, ]
[226, ]
[225, ]
[224, ]
[223, ]
[222, ]
[221, ]
[220, ]
[219, ]
[218, ]
[217, ]
[216, ]
[215, ]
[214, ]
[213, ]
[212, ]
[211, ]
[210, ]
[209, ]
[208, ]
[207, ]
[206, ]
[205, ]
[204, ]
[203, ]
[202, ]
[201, ]
[200, ]

View File

@ -0,0 +1,33 @@
---
source: milli/src/search/facet/facet_sort_descending.rs
---
[254, ]
[252, ]
[250, ]
[248, ]
[246, ]
[244, ]
[242, ]
[240, ]
[238, ]
[236, ]
[234, ]
[232, ]
[230, ]
[228, ]
[226, ]
[224, ]
[222, ]
[220, ]
[218, ]
[216, ]
[214, ]
[212, ]
[210, ]
[208, ]
[206, ]
[204, ]
[202, ]
[200, ]
[201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255, ]

View File

@ -0,0 +1,33 @@
---
source: milli/src/search/facet/facet_sort_descending.rs
---
[254, ]
[252, ]
[250, ]
[248, ]
[246, ]
[244, ]
[242, ]
[240, ]
[238, ]
[236, ]
[234, ]
[232, ]
[230, ]
[228, ]
[226, ]
[224, ]
[222, ]
[220, ]
[218, ]
[216, ]
[214, ]
[212, ]
[210, ]
[208, ]
[206, ]
[204, ]
[202, ]
[200, ]
[201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255, ]

View File

@ -0,0 +1,27 @@
---
source: milli/src/search/facet/facet_sort_descending.rs
---
[238, ]
[236, ]
[226, ]
[220, ]
[216, ]
[210, ]
[208, ]
[206, ]
[292, ]
[286, ]
[282, ]
[278, ]
[264, ]
[262, ]
[260, ]
[258, ]
[256, ]
[250, ]
[244, ]
[230, ]
[224, ]
[202, ]
[201, 203, 205, 207, 209, 211, 215, 219, 223, 231, 233, 235, 237, 239, 241, 243, 247, 263, 267, 269, 273, 277, 279, 281, 289, 293, 295, 297, ]

View File

@ -0,0 +1,27 @@
---
source: milli/src/search/facet/facet_sort_descending.rs
---
[238, ]
[236, ]
[226, ]
[220, ]
[216, ]
[210, ]
[208, ]
[206, ]
[292, ]
[286, ]
[282, ]
[278, ]
[264, ]
[262, ]
[260, ]
[258, ]
[256, ]
[250, ]
[244, ]
[230, ]
[224, ]
[202, ]
[201, 203, 205, 207, 209, 211, 215, 219, 223, 231, 233, 235, 237, 239, 241, 243, 247, 263, 267, 269, 273, 277, 279, 281, 289, 293, 295, 297, ]

View File

@ -356,19 +356,22 @@ pub fn snap_geo_faceted_documents_ids(index: &Index) -> String {
pub fn snap_external_documents_ids(index: &Index) -> String { pub fn snap_external_documents_ids(index: &Index) -> String {
let rtxn = index.read_txn().unwrap(); let rtxn = index.read_txn().unwrap();
let ExternalDocumentsIds { soft, hard, .. } = index.external_documents_ids(&rtxn).unwrap(); let ExternalDocumentsIds { soft, hard, .. } = index.external_documents_ids(&rtxn).unwrap();
let mut snap = String::new(); let mut snap = String::new();
let soft_bytes = soft.into_fst().as_bytes().to_owned();
let mut hex_soft = String::new(); writeln!(&mut snap, "soft:").unwrap();
for byte in soft_bytes { let stream_soft = soft.stream();
write!(&mut hex_soft, "{:x}", byte).unwrap(); let soft_external_ids = stream_soft.into_str_vec().unwrap();
for (key, id) in soft_external_ids {
writeln!(&mut snap, "{key:<24} {id}").unwrap();
} }
writeln!(&mut snap, "soft: {hex_soft}").unwrap(); writeln!(&mut snap, "hard:").unwrap();
let hard_bytes = hard.into_fst().as_bytes().to_owned(); let stream_hard = hard.stream();
let mut hex_hard = String::new(); let hard_external_ids = stream_hard.into_str_vec().unwrap();
for byte in hard_bytes { for (key, id) in hard_external_ids {
write!(&mut hex_hard, "{:x}", byte).unwrap(); writeln!(&mut snap, "{key:<24} {id}").unwrap();
} }
writeln!(&mut snap, "hard: {hex_hard}").unwrap();
snap snap
} }
pub fn snap_number_faceted_documents_ids(index: &Index) -> String { pub fn snap_number_faceted_documents_ids(index: &Index) -> String {

View File

@ -26,7 +26,6 @@ pub struct DeleteDocuments<'t, 'u, 'i> {
index: &'i Index, index: &'i Index,
external_documents_ids: ExternalDocumentsIds<'static>, external_documents_ids: ExternalDocumentsIds<'static>,
to_delete_docids: RoaringBitmap, to_delete_docids: RoaringBitmap,
#[cfg(test)]
disable_soft_deletion: bool, disable_soft_deletion: bool,
} }
@ -48,12 +47,10 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
index, index,
external_documents_ids, external_documents_ids,
to_delete_docids: RoaringBitmap::new(), to_delete_docids: RoaringBitmap::new(),
#[cfg(test)]
disable_soft_deletion: false, disable_soft_deletion: false,
}) })
} }
#[cfg(test)]
pub fn disable_soft_deletion(&mut self, disable: bool) { pub fn disable_soft_deletion(&mut self, disable: bool) {
self.disable_soft_deletion = disable; self.disable_soft_deletion = disable;
} }
@ -156,17 +153,8 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
// We run the deletion. // We run the deletion.
// - With 100Go of disk and 50Go used including 15Go of soft-deleted documents // - With 100Go of disk and 50Go used including 15Go of soft-deleted documents
// We run the deletion. // We run the deletion.
let disable_soft_deletion = {
#[cfg(not(test))] if !self.disable_soft_deletion
{
false
}
#[cfg(test)]
{
self.disable_soft_deletion
}
};
if !disable_soft_deletion
&& percentage_available > 10 && percentage_available > 10
&& percentage_used_by_soft_deleted_documents < 10 && percentage_used_by_soft_deleted_documents < 10
{ {

View File

@ -365,8 +365,9 @@ mod tests {
use crate::documents::documents_batch_reader_from_objects; use crate::documents::documents_batch_reader_from_objects;
use crate::heed_codec::facet::OrderedF64Codec; use crate::heed_codec::facet::OrderedF64Codec;
use crate::heed_codec::StrRefCodec;
use crate::index::tests::TempIndex; use crate::index::tests::TempIndex;
use crate::update::facet::tests::FacetIndex; use crate::update::facet::test_helpers::{ordered_string, FacetIndex};
use crate::{db_snap, milli_snap}; use crate::{db_snap, milli_snap};
#[test] #[test]
@ -491,4 +492,37 @@ mod tests {
db_snap!(index, facet_id_f64_docids, "initial", @"c34f499261f3510d862fa0283bbe843a"); db_snap!(index, facet_id_f64_docids, "initial", @"c34f499261f3510d862fa0283bbe843a");
db_snap!(index, number_faceted_documents_ids, "initial", @"01594fecbb316798ce3651d6730a4521"); db_snap!(index, number_faceted_documents_ids, "initial", @"01594fecbb316798ce3651d6730a4521");
} }
#[test]
fn insert_string() {
let test = |name: &str, group_size: u8, min_level_size: u8| {
let index = FacetIndex::<StrRefCodec>::new(group_size, 0 /*NA*/, min_level_size);
let strings = (0..1_000).map(|i| ordered_string(i as usize)).collect::<Vec<_>>();
let mut elements = Vec::<((u16, &str), RoaringBitmap)>::new();
for i in 0..1_000u32 {
// field id = 0, left_bound = i, docids = [i]
elements.push(((0, &strings[i as usize]), once(i).collect()));
}
for i in 0..100u32 {
// field id = 1, left_bound = i, docids = [i]
elements.push(((1, &strings[i as usize]), once(i).collect()));
}
let mut wtxn = index.env.write_txn().unwrap();
index.bulk_insert(&mut wtxn, &[0, 1], elements.iter());
index.verify_structure_validity(&wtxn, 0);
index.verify_structure_validity(&wtxn, 1);
wtxn.commit().unwrap();
milli_snap!(format!("{index}"), name);
};
test("default", 4, 5);
test("small_group_small_min_level", 2, 2);
test("small_group_large_min_level", 2, 128);
test("large_group_small_min_level", 16, 2);
test("odd_group_odd_min_level", 7, 3);
}
} }

View File

@ -114,11 +114,14 @@ mod tests {
use big_s::S; use big_s::S;
use maplit::hashset; use maplit::hashset;
use rand::seq::SliceRandom;
use rand::SeedableRng;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::db_snap; use crate::db_snap;
use crate::documents::documents_batch_reader_from_objects; use crate::documents::documents_batch_reader_from_objects;
use crate::index::tests::TempIndex; use crate::index::tests::TempIndex;
use crate::update::facet::test_helpers::ordered_string;
use crate::update::DeleteDocuments; use crate::update::DeleteDocuments;
#[test] #[test]
@ -156,8 +159,8 @@ mod tests {
let documents = documents_batch_reader_from_objects(documents); let documents = documents_batch_reader_from_objects(documents);
index.add_documents(documents).unwrap(); index.add_documents(documents).unwrap();
db_snap!(index, facet_id_f64_docids, 1); db_snap!(index, facet_id_f64_docids, 1, @"550cd138d6fe31ccdd42cd5392fbd576");
db_snap!(index, number_faceted_documents_ids, 1); db_snap!(index, number_faceted_documents_ids, 1, @"9a0ea88e7c9dcf6dc0ef0b601736ffcf");
let mut wtxn = index.env.write_txn().unwrap(); let mut wtxn = index.env.write_txn().unwrap();
@ -174,8 +177,126 @@ mod tests {
wtxn.commit().unwrap(); wtxn.commit().unwrap();
db_snap!(index, soft_deleted_documents_ids, @"[]"); db_snap!(index, soft_deleted_documents_ids, @"[]");
db_snap!(index, facet_id_f64_docids, 2); db_snap!(index, facet_id_f64_docids, 2, @"d4d5f14e7f1e1f09b86821a0b6defcc6");
db_snap!(index, number_faceted_documents_ids, 2); db_snap!(index, number_faceted_documents_ids, 2, @"3570e0ac0fdb21be9ebe433f59264b56");
}
// Same test as above but working with string values for the facets
#[test]
fn delete_mixed_incremental_and_bulk_string() {
// The point of this test is to create an index populated with documents
// containing different filterable attributes. Then, we delete a bunch of documents
// such that a mix of the incremental and bulk indexer is used (depending on the field id)
let index = TempIndex::new_with_map_size(4096 * 1000 * 100);
index
.update_settings(|settings| {
settings.set_filterable_fields(
hashset! { S("id"), S("label"), S("timestamp"), S("colour") },
);
})
.unwrap();
let mut documents = vec![];
for i in 0..1000 {
documents.push(
serde_json::json! {
{
"id": i,
"label": ordered_string(i / 10),
"colour": ordered_string(i / 100),
"timestamp": ordered_string(i / 2),
}
}
.as_object()
.unwrap()
.clone(),
);
}
let documents = documents_batch_reader_from_objects(documents);
index.add_documents(documents).unwrap();
// Note that empty strings are not stored in the facet db due to commit 4860fd452965 (comment written on 29 Nov 2022)
db_snap!(index, facet_id_string_docids, 1, @"5fd1bd0724c65a6dc1aafb6db93c7503");
db_snap!(index, string_faceted_documents_ids, 1, @"54bc15494fa81d93339f43c08fd9d8f5");
let mut wtxn = index.env.write_txn().unwrap();
let mut builder = DeleteDocuments::new(&mut wtxn, &index).unwrap();
builder.disable_soft_deletion(true);
builder.delete_documents(&RoaringBitmap::from_iter(0..100));
// by deleting the first 100 documents, we expect that:
// - the "id" part of the DB will be updated in bulk, since #affected_facet_value = 100 which is > database_len / 150 (= 13)
// - the "label" part will be updated incrementally, since #affected_facet_value = 10 which is < 13
// - the "colour" part will also be updated incrementally, since #affected_values = 1 which is < 13
// - the "timestamp" part will be updated in bulk, since #affected_values = 50 which is > 13
// This has to be verified manually by inserting breakpoint/adding print statements to the code when running the test
builder.execute().unwrap();
wtxn.commit().unwrap();
db_snap!(index, soft_deleted_documents_ids, @"[]");
db_snap!(index, facet_id_string_docids, 2, @"7f9c00b29e04d58c1821202a5dda0ebc");
db_snap!(index, string_faceted_documents_ids, 2, @"504152afa5c94fd4e515dcdfa4c7161f");
}
#[test]
fn delete_almost_all_incrementally_string() {
let index = TempIndex::new_with_map_size(4096 * 1000 * 100);
index
.update_settings(|settings| {
settings.set_filterable_fields(
hashset! { S("id"), S("label"), S("timestamp"), S("colour") },
);
})
.unwrap();
let mut documents = vec![];
for i in 0..1000 {
documents.push(
serde_json::json! {
{
"id": i,
"label": ordered_string(i / 10),
"colour": ordered_string(i / 100),
"timestamp": ordered_string(i / 2),
}
}
.as_object()
.unwrap()
.clone(),
);
}
let documents = documents_batch_reader_from_objects(documents);
index.add_documents(documents).unwrap();
// Note that empty strings are not stored in the facet db due to commit 4860fd452965 (comment written on 29 Nov 2022)
db_snap!(index, facet_id_string_docids, 1, @"5fd1bd0724c65a6dc1aafb6db93c7503");
db_snap!(index, string_faceted_documents_ids, 1, @"54bc15494fa81d93339f43c08fd9d8f5");
let mut rng = rand::rngs::SmallRng::from_seed([0; 32]);
let mut docids_to_delete = (0..1000).collect::<Vec<_>>();
docids_to_delete.shuffle(&mut rng);
for docid in docids_to_delete.into_iter().take(990) {
let mut wtxn = index.env.write_txn().unwrap();
let mut builder = DeleteDocuments::new(&mut wtxn, &index).unwrap();
builder.disable_soft_deletion(true);
builder.delete_documents(&RoaringBitmap::from_iter([docid]));
builder.execute().unwrap();
wtxn.commit().unwrap();
}
db_snap!(index, soft_deleted_documents_ids, @"[]");
db_snap!(index, facet_id_string_docids, 2, @"ece56086e76d50e661fb2b58475b9f7d");
db_snap!(index, string_faceted_documents_ids, 2, @r###"
0 []
1 [11, 20, 73, 292, 324, 358, 381, 493, 839, 852, ]
2 [292, 324, 358, 381, 493, 839, 852, ]
3 [11, 20, 73, 292, 324, 358, 381, 493, 839, 852, ]
"###);
} }
} }
@ -188,7 +309,7 @@ mod comparison_bench {
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::heed_codec::facet::OrderedF64Codec; use crate::heed_codec::facet::OrderedF64Codec;
use crate::update::facet::tests::FacetIndex; use crate::update::facet::test_helpers::FacetIndex;
// This is a simple test to get an intuition on the relative speed // This is a simple test to get an intuition on the relative speed
// of the incremental vs. bulk indexer. // of the incremental vs. bulk indexer.

View File

@ -648,7 +648,7 @@ mod tests {
use crate::heed_codec::facet::OrderedF64Codec; use crate::heed_codec::facet::OrderedF64Codec;
use crate::heed_codec::StrRefCodec; use crate::heed_codec::StrRefCodec;
use crate::milli_snap; use crate::milli_snap;
use crate::update::facet::tests::FacetIndex; use crate::update::facet::test_helpers::FacetIndex;
#[test] #[test]
fn append() { fn append() {
@ -1053,7 +1053,7 @@ mod fuzz {
use tempfile::TempDir; use tempfile::TempDir;
use super::*; use super::*;
use crate::update::facet::tests::FacetIndex; use crate::update::facet::test_helpers::FacetIndex;
#[derive(Default)] #[derive(Default)]
pub struct TrivialDatabase<T> { pub struct TrivialDatabase<T> {
pub elements: BTreeMap<u16, BTreeMap<T, RoaringBitmap>>, pub elements: BTreeMap<u16, BTreeMap<T, RoaringBitmap>>,

View File

@ -162,7 +162,7 @@ impl<'i> FacetsUpdate<'i> {
} }
#[cfg(test)] #[cfg(test)]
pub(crate) mod tests { pub(crate) mod test_helpers {
use std::cell::Cell; use std::cell::Cell;
use std::fmt::Display; use std::fmt::Display;
use std::iter::FromIterator; use std::iter::FromIterator;
@ -183,6 +183,23 @@ pub(crate) mod tests {
use crate::update::FacetsUpdateIncrementalInner; use crate::update::FacetsUpdateIncrementalInner;
use crate::CboRoaringBitmapCodec; use crate::CboRoaringBitmapCodec;
/// Utility function to generate a string whose position in a lexicographically
/// ordered list is `i`.
pub fn ordered_string(mut i: usize) -> String {
// The first string is empty
if i == 0 {
return String::new();
}
// The others are 5 char long, each between 'a' and 'z'
let mut s = String::new();
for _ in 0..5 {
let (digit, next) = (i % 26, i / 26);
s.insert(0, char::from_u32('a' as u32 + digit as u32).unwrap());
i = next;
}
s
}
/// A dummy index that only contains the facet database, used for testing /// A dummy index that only contains the facet database, used for testing
pub struct FacetIndex<BoundCodec> pub struct FacetIndex<BoundCodec>
where where
@ -438,6 +455,98 @@ pub(crate) mod tests {
} }
} }
#[cfg(test)]
mod tests {
use big_s::S;
use maplit::hashset;
use crate::db_snap;
use crate::documents::documents_batch_reader_from_objects;
use crate::index::tests::TempIndex;
#[test]
fn replace_all_identical_soft_deletion_then_hard_deletion() {
let mut index = TempIndex::new_with_map_size(4096 * 1000 * 100);
index
.update_settings(|settings| {
settings.set_primary_key("id".to_owned());
settings.set_filterable_fields(hashset! { S("size") });
})
.unwrap();
let mut documents = vec![];
for i in 0..1000 {
documents.push(
serde_json::json! {
{
"id": i,
"size": i % 250,
}
}
.as_object()
.unwrap()
.clone(),
);
}
let documents = documents_batch_reader_from_objects(documents);
index.add_documents(documents).unwrap();
db_snap!(index, facet_id_f64_docids, "initial", @"777e0e221d778764b472c512617eeb3b");
db_snap!(index, number_faceted_documents_ids, "initial", @"bd916ef32b05fd5c3c4c518708f431a9");
db_snap!(index, soft_deleted_documents_ids, "initial", @"[]");
let mut documents = vec![];
for i in 0..999 {
documents.push(
serde_json::json! {
{
"id": i,
"size": i % 250,
"other": 0,
}
}
.as_object()
.unwrap()
.clone(),
);
}
let documents = documents_batch_reader_from_objects(documents);
index.add_documents(documents).unwrap();
db_snap!(index, facet_id_f64_docids, "replaced_1_soft", @"abba175d7bed727d0efadaef85a4388f");
db_snap!(index, number_faceted_documents_ids, "replaced_1_soft", @"de76488bd05ad94c6452d725acf1bd06");
db_snap!(index, soft_deleted_documents_ids, "replaced_1_soft", @"6c975deb900f286d2f6456d2d5c3a123");
// Then replace the last document while disabling soft_deletion
index.index_documents_config.disable_soft_deletion = true;
let mut documents = vec![];
for i in 999..1000 {
documents.push(
serde_json::json! {
{
"id": i,
"size": i % 250,
"other": 0,
}
}
.as_object()
.unwrap()
.clone(),
);
}
let documents = documents_batch_reader_from_objects(documents);
index.add_documents(documents).unwrap();
db_snap!(index, facet_id_f64_docids, "replaced_2_hard", @"029e27a46d09c574ae949aa4289b45e6");
db_snap!(index, number_faceted_documents_ids, "replaced_2_hard", @"60b19824f136affe6b240a7200779028");
db_snap!(index, soft_deleted_documents_ids, "replaced_2_hard", @"[]");
}
}
#[allow(unused)] #[allow(unused)]
#[cfg(test)] #[cfg(test)]
mod comparison_bench { mod comparison_bench {
@ -446,7 +555,7 @@ mod comparison_bench {
use rand::Rng; use rand::Rng;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use super::tests::FacetIndex; use super::test_helpers::FacetIndex;
use crate::heed_codec::facet::OrderedF64Codec; use crate::heed_codec::facet::OrderedF64Codec;
// This is a simple test to get an intuition on the relative speed // This is a simple test to get an intuition on the relative speed

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/facet/bulk.rs
---
353d70f52eea66e5031dca989ea8a037

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/facet/bulk.rs
---
52a093c909133d84023a4a7b83864808

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/facet/bulk.rs
---
9d86c72ddb241d0aeca2995d61a3648a

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/facet/bulk.rs
---
c0943177594534bfe5527cbf40fe388e

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/facet/bulk.rs
---
6ed86f234028ae3df5881bee5512f11e

View File

@ -1,4 +0,0 @@
---
source: milli/src/update/facet/delete.rs
---
550cd138d6fe31ccdd42cd5392fbd576

View File

@ -1,4 +0,0 @@
---
source: milli/src/update/facet/delete.rs
---
9a0ea88e7c9dcf6dc0ef0b601736ffcf

View File

@ -1,4 +0,0 @@
---
source: milli/src/update/facet/delete.rs
---
d4d5f14e7f1e1f09b86821a0b6defcc6

View File

@ -1,4 +0,0 @@
---
source: milli/src/update/facet/delete.rs
---
3570e0ac0fdb21be9ebe433f59264b56

View File

@ -88,6 +88,7 @@ pub struct IndexDocumentsConfig {
pub words_positions_level_group_size: Option<NonZeroU32>, pub words_positions_level_group_size: Option<NonZeroU32>,
pub words_positions_min_level_size: Option<NonZeroU32>, pub words_positions_min_level_size: Option<NonZeroU32>,
pub update_method: IndexDocumentsMethod, pub update_method: IndexDocumentsMethod,
pub disable_soft_deletion: bool,
pub autogenerate_docids: bool, pub autogenerate_docids: bool,
} }
@ -331,6 +332,7 @@ where
// able to simply insert all the documents even if they already exist in the database. // able to simply insert all the documents even if they already exist in the database.
if !replaced_documents_ids.is_empty() { if !replaced_documents_ids.is_empty() {
let mut deletion_builder = update::DeleteDocuments::new(self.wtxn, self.index)?; let mut deletion_builder = update::DeleteDocuments::new(self.wtxn, self.index)?;
deletion_builder.disable_soft_deletion(self.config.disable_soft_deletion);
debug!("documents to delete {:?}", replaced_documents_ids); debug!("documents to delete {:?}", replaced_documents_ids);
deletion_builder.delete_documents(&replaced_documents_ids); deletion_builder.delete_documents(&replaced_documents_ids);
let deleted_documents_count = deletion_builder.execute()?; let deleted_documents_count = deletion_builder.execute()?;
@ -906,6 +908,8 @@ mod tests {
{ "id": 42, "title": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", "_geo": { "lat": 35, "lng": 23 } } { "id": 42, "title": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", "_geo": { "lat": 35, "lng": 23 } }
])).unwrap(); ])).unwrap();
db_snap!(index, word_docids, "initial");
index.index_documents_config.update_method = IndexDocumentsMethod::UpdateDocuments; index.index_documents_config.update_method = IndexDocumentsMethod::UpdateDocuments;
index index
@ -928,6 +932,9 @@ mod tests {
let count = index.all_documents(&rtxn).unwrap().count(); let count = index.all_documents(&rtxn).unwrap().count();
assert_eq!(count, 6); assert_eq!(count, 6);
db_snap!(index, word_docids, "updated");
db_snap!(index, soft_deleted_documents_ids, "updated", @"[0, 1, 4, ]");
drop(rtxn); drop(rtxn);
} }

View File

@ -0,0 +1,54 @@
---
source: milli/src/update/index_documents/mod.rs
---
1 [2, ]
10.0 [1, ]
12 [0, ]
1344 [3, ]
2 [0, ]
23 [5, ]
25.99 [2, ]
3.5 [0, ]
35 [5, ]
4 [4, ]
42 [0, 5, ]
456 [1, ]
adams [5, ]
adventure [1, ]
alice [2, ]
and [0, 4, ]
antoine [1, ]
austin [0, ]
blood [4, ]
carroll [2, ]
de [1, ]
douglas [5, ]
exupery [1, ]
fantasy [2, 3, 4, ]
galaxy [5, ]
guide [5, ]
half [4, ]
harry [4, ]
hitchhiker' [5, ]
hobbit [3, ]
in [2, ]
j [3, 4, ]
jane [0, ]
k [4, ]
le [1, ]
lewis [2, ]
petit [1, ]
potter [4, ]
prejudice [0, ]
pride [0, ]
prince [1, 4, ]
r [3, ]
romance [0, ]
rowling [4, ]
s [5, ]
saint [1, ]
the [3, 4, 5, ]
to [5, ]
tolkien [3, ]
wonderland [2, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/index_documents/mod.rs
---
[0, 1, 4, ]

View File

@ -0,0 +1,58 @@
---
source: milli/src/update/index_documents/mod.rs
---
1 [2, ]
10.0 [1, 7, ]
12 [0, 8, ]
1344 [3, ]
1813 [8, ]
2 [0, 8, ]
23 [5, ]
25.99 [2, ]
3.5 [0, 8, ]
35 [5, ]
4 [4, 6, ]
42 [0, 5, 8, ]
456 [1, 7, ]
adams [5, ]
adventure [1, 7, ]
alice [2, ]
and [0, 4, 6, 8, ]
antoine [1, 7, ]
austen [8, ]
austin [0, ]
blood [4, 6, ]
carroll [2, ]
de [1, 7, ]
douglas [5, ]
exupery [1, 7, ]
fantasy [2, 3, 4, 6, ]
galaxy [5, ]
guide [5, ]
half [4, 6, ]
harry [4, 6, ]
hitchhiker' [5, ]
hobbit [3, ]
in [2, ]
j [3, 4, 6, 8, ]
jane [0, ]
k [4, 6, ]
le [1, ]
lewis [2, ]
little [7, ]
petit [1, ]
potter [4, 6, ]
prejudice [0, 8, ]
pride [0, 8, ]
prince [1, 4, 7, ]
princess [6, ]
r [3, ]
romance [0, 8, ]
rowling [4, 6, ]
s [5, ]
saint [1, 7, ]
the [3, 4, 5, 6, 7, ]
to [5, ]
tolkien [3, ]
wonderland [2, ]

View File

@ -156,30 +156,40 @@ pub fn write_into_lmdb_database_without_merging(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::io::Cursor; use std::io::Cursor;
use std::iter::FromIterator;
use roaring::RoaringBitmap;
use crate::db_snap; use crate::db_snap;
use crate::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; use crate::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use crate::index::tests::TempIndex; use crate::index::tests::TempIndex;
use crate::update::{DeleteDocuments, IndexDocumentsMethod};
fn documents_with_enough_different_words_for_prefixes(prefixes: &[&str]) -> Vec<crate::Object> { fn documents_with_enough_different_words_for_prefixes(
prefixes: &[&str],
start_id: usize,
) -> Vec<crate::Object> {
let mut documents = Vec::new(); let mut documents = Vec::new();
let mut id = start_id;
for prefix in prefixes { for prefix in prefixes {
for i in 0..50 { for i in 0..50 {
documents.push( documents.push(
serde_json::json!({ serde_json::json!({
"id": id,
"text": format!("{prefix}{i:x}"), "text": format!("{prefix}{i:x}"),
}) })
.as_object() .as_object()
.unwrap() .unwrap()
.clone(), .clone(),
) );
id += 1;
} }
} }
documents documents
} }
#[test] #[test]
fn test_update() { fn add_new_documents() {
let mut index = TempIndex::new(); let mut index = TempIndex::new();
index.index_documents_config.words_prefix_threshold = Some(50); index.index_documents_config.words_prefix_threshold = Some(50);
index.index_documents_config.autogenerate_docids = true; index.index_documents_config.autogenerate_docids = true;
@ -198,10 +208,11 @@ mod tests {
DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap() DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap()
}; };
let mut documents = documents_with_enough_different_words_for_prefixes(&["a", "be"]); let mut documents = documents_with_enough_different_words_for_prefixes(&["a", "be"], 0);
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database // now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
documents.push( documents.push(
serde_json::json!({ serde_json::json!({
"id": "9000",
"text": "At an amazing and beautiful house" "text": "At an amazing and beautiful house"
}) })
.as_object() .as_object()
@ -210,6 +221,7 @@ mod tests {
); );
documents.push( documents.push(
serde_json::json!({ serde_json::json!({
"id": "9001",
"text": "The bell rings at 5 am" "text": "The bell rings at 5 am"
}) })
.as_object() .as_object()
@ -221,10 +233,12 @@ mod tests {
index.add_documents(documents).unwrap(); index.add_documents(documents).unwrap();
db_snap!(index, word_prefix_pair_proximity_docids, "initial"); db_snap!(index, word_prefix_pair_proximity_docids, "initial");
db_snap!(index, prefix_word_pair_proximity_docids, "initial");
let mut documents = documents_with_enough_different_words_for_prefixes(&["am", "an"]); let mut documents = documents_with_enough_different_words_for_prefixes(&["am", "an"], 100);
documents.push( documents.push(
serde_json::json!({ serde_json::json!({
"id": "9002",
"text": "At an extraordinary house" "text": "At an extraordinary house"
}) })
.as_object() .as_object()
@ -239,7 +253,7 @@ mod tests {
db_snap!(index, prefix_word_pair_proximity_docids, "update"); db_snap!(index, prefix_word_pair_proximity_docids, "update");
} }
#[test] #[test]
fn test_batch_bug_3043() { fn batch_bug_3043() {
// https://github.com/meilisearch/meilisearch/issues/3043 // https://github.com/meilisearch/meilisearch/issues/3043
let mut index = TempIndex::new(); let mut index = TempIndex::new();
index.index_documents_config.words_prefix_threshold = Some(50); index.index_documents_config.words_prefix_threshold = Some(50);
@ -259,7 +273,7 @@ mod tests {
DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap() DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap()
}; };
let mut documents = documents_with_enough_different_words_for_prefixes(&["y"]); let mut documents = documents_with_enough_different_words_for_prefixes(&["y"], 0);
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database // now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
documents.push( documents.push(
serde_json::json!({ serde_json::json!({
@ -285,4 +299,291 @@ mod tests {
db_snap!(index, word_prefix_pair_proximity_docids); db_snap!(index, word_prefix_pair_proximity_docids);
db_snap!(index, prefix_word_pair_proximity_docids); db_snap!(index, prefix_word_pair_proximity_docids);
} }
#[test]
fn hard_delete_and_reupdate() {
let mut index = TempIndex::new();
index.index_documents_config.words_prefix_threshold = Some(50);
index
.update_settings(|settings| {
settings.set_primary_key("id".to_owned());
settings.set_searchable_fields(vec!["text".to_owned()]);
})
.unwrap();
let batch_reader_from_documents = |documents| {
let mut builder = DocumentsBatchBuilder::new(Vec::new());
for object in documents {
builder.append_json_object(&object).unwrap();
}
DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap()
};
let mut documents = documents_with_enough_different_words_for_prefixes(&["a"], 0);
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
documents.push(
serde_json::json!({
"id": 9000,
"text": "At an amazing and beautiful house"
})
.as_object()
.unwrap()
.clone(),
);
documents.push(
serde_json::json!({
"id": 9001,
"text": "The bell rings at 5 am"
})
.as_object()
.unwrap()
.clone(),
);
let documents = batch_reader_from_documents(documents);
index.add_documents(documents).unwrap();
db_snap!(index, documents_ids, "initial");
db_snap!(index, word_docids, "initial");
db_snap!(index, word_prefix_pair_proximity_docids, "initial");
db_snap!(index, prefix_word_pair_proximity_docids, "initial");
let mut wtxn = index.write_txn().unwrap();
let mut delete = DeleteDocuments::new(&mut wtxn, &index).unwrap();
delete.disable_soft_deletion(true);
delete.delete_documents(&RoaringBitmap::from_iter([50]));
delete.execute().unwrap();
wtxn.commit().unwrap();
db_snap!(index, documents_ids, "first_delete");
db_snap!(index, word_docids, "first_delete");
db_snap!(index, word_prefix_pair_proximity_docids, "first_delete");
db_snap!(index, prefix_word_pair_proximity_docids, "first_delete");
let mut wtxn = index.write_txn().unwrap();
let mut delete = DeleteDocuments::new(&mut wtxn, &index).unwrap();
delete.disable_soft_deletion(true);
delete.delete_documents(&RoaringBitmap::from_iter(0..50));
delete.execute().unwrap();
wtxn.commit().unwrap();
db_snap!(index, documents_ids, "second_delete");
db_snap!(index, word_docids, "second_delete");
db_snap!(index, word_prefix_pair_proximity_docids, "second_delete");
db_snap!(index, prefix_word_pair_proximity_docids, "second_delete");
let documents = documents_with_enough_different_words_for_prefixes(&["b"], 1000);
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
index.add_documents(batch_reader_from_documents(documents)).unwrap();
db_snap!(index, documents_ids, "reupdate");
db_snap!(index, word_docids, "reupdate");
db_snap!(index, word_prefix_pair_proximity_docids, "reupdate");
db_snap!(index, prefix_word_pair_proximity_docids, "reupdate");
}
#[test]
fn soft_delete_and_reupdate() {
let mut index = TempIndex::new();
index.index_documents_config.words_prefix_threshold = Some(50);
index
.update_settings(|settings| {
settings.set_primary_key("id".to_owned());
settings.set_searchable_fields(vec!["text".to_owned()]);
})
.unwrap();
let batch_reader_from_documents = |documents| {
let mut builder = DocumentsBatchBuilder::new(Vec::new());
for object in documents {
builder.append_json_object(&object).unwrap();
}
DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap()
};
let mut documents = documents_with_enough_different_words_for_prefixes(&["a"], 0);
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
documents.push(
serde_json::json!({
"id": 9000,
"text": "At an amazing and beautiful house"
})
.as_object()
.unwrap()
.clone(),
);
documents.push(
serde_json::json!({
"id": 9001,
"text": "The bell rings at 5 am"
})
.as_object()
.unwrap()
.clone(),
);
let documents = batch_reader_from_documents(documents);
index.add_documents(documents).unwrap();
db_snap!(index, documents_ids, "initial");
db_snap!(index, word_docids, "initial");
db_snap!(index, word_prefix_pair_proximity_docids, "initial");
db_snap!(index, prefix_word_pair_proximity_docids, "initial");
let mut wtxn = index.write_txn().unwrap();
let mut delete = DeleteDocuments::new(&mut wtxn, &index).unwrap();
delete.delete_documents(&RoaringBitmap::from_iter([50]));
delete.execute().unwrap();
wtxn.commit().unwrap();
db_snap!(index, documents_ids, "first_delete");
db_snap!(index, word_docids, "first_delete");
db_snap!(index, word_prefix_pair_proximity_docids, "first_delete");
db_snap!(index, prefix_word_pair_proximity_docids, "first_delete");
let mut wtxn = index.write_txn().unwrap();
let mut delete = DeleteDocuments::new(&mut wtxn, &index).unwrap();
delete.delete_documents(&RoaringBitmap::from_iter(0..50));
delete.execute().unwrap();
wtxn.commit().unwrap();
db_snap!(index, documents_ids, "second_delete");
db_snap!(index, word_docids, "second_delete");
db_snap!(index, word_prefix_pair_proximity_docids, "second_delete");
db_snap!(index, prefix_word_pair_proximity_docids, "second_delete");
let documents = documents_with_enough_different_words_for_prefixes(&["b"], 1000);
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
index.add_documents(batch_reader_from_documents(documents)).unwrap();
db_snap!(index, documents_ids, "reupdate");
db_snap!(index, word_docids, "reupdate");
db_snap!(index, word_prefix_pair_proximity_docids, "reupdate");
db_snap!(index, prefix_word_pair_proximity_docids, "reupdate");
}
#[test]
fn replace_soft_deletion() {
let mut index = TempIndex::new();
index.index_documents_config.words_prefix_threshold = Some(50);
index.index_documents_config.update_method = IndexDocumentsMethod::ReplaceDocuments;
index
.update_settings(|settings| {
settings.set_primary_key("id".to_owned());
settings.set_searchable_fields(vec!["text".to_owned()]);
})
.unwrap();
let batch_reader_from_documents = |documents| {
let mut builder = DocumentsBatchBuilder::new(Vec::new());
for object in documents {
builder.append_json_object(&object).unwrap();
}
DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap()
};
let mut documents = documents_with_enough_different_words_for_prefixes(&["a"], 0);
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
documents.push(
serde_json::json!({
"id": 9000,
"text": "At an amazing house"
})
.as_object()
.unwrap()
.clone(),
);
documents.push(
serde_json::json!({
"id": 9001,
"text": "The bell rings"
})
.as_object()
.unwrap()
.clone(),
);
let documents = batch_reader_from_documents(documents);
index.add_documents(documents).unwrap();
db_snap!(index, documents_ids, "initial");
db_snap!(index, word_docids, "initial");
db_snap!(index, word_prefix_pair_proximity_docids, "initial");
db_snap!(index, prefix_word_pair_proximity_docids, "initial");
let documents = documents_with_enough_different_words_for_prefixes(&["b"], 0);
index.add_documents(batch_reader_from_documents(documents)).unwrap();
db_snap!(index, documents_ids, "replaced");
db_snap!(index, word_docids, "replaced");
db_snap!(index, word_prefix_pair_proximity_docids, "replaced");
db_snap!(index, prefix_word_pair_proximity_docids, "replaced");
db_snap!(index, soft_deleted_documents_ids, "replaced", @"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, ]");
}
#[test]
fn replace_hard_deletion() {
let mut index = TempIndex::new();
index.index_documents_config.words_prefix_threshold = Some(50);
index.index_documents_config.disable_soft_deletion = true;
index.index_documents_config.update_method = IndexDocumentsMethod::ReplaceDocuments;
index
.update_settings(|settings| {
settings.set_primary_key("id".to_owned());
settings.set_searchable_fields(vec!["text".to_owned()]);
})
.unwrap();
let batch_reader_from_documents = |documents| {
let mut builder = DocumentsBatchBuilder::new(Vec::new());
for object in documents {
builder.append_json_object(&object).unwrap();
}
DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap()
};
let mut documents = documents_with_enough_different_words_for_prefixes(&["a"], 0);
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
documents.push(
serde_json::json!({
"id": 9000,
"text": "At an amazing house"
})
.as_object()
.unwrap()
.clone(),
);
documents.push(
serde_json::json!({
"id": 9001,
"text": "The bell rings"
})
.as_object()
.unwrap()
.clone(),
);
let documents = batch_reader_from_documents(documents);
index.add_documents(documents).unwrap();
db_snap!(index, documents_ids, "initial");
db_snap!(index, word_docids, "initial");
db_snap!(index, word_prefix_pair_proximity_docids, "initial");
db_snap!(index, prefix_word_pair_proximity_docids, "initial");
let documents = documents_with_enough_different_words_for_prefixes(&["b"], 0);
index.add_documents(batch_reader_from_documents(documents)).unwrap();
db_snap!(index, documents_ids, "replaced");
db_snap!(index, word_docids, "replaced");
db_snap!(index, word_prefix_pair_proximity_docids, "replaced");
db_snap!(index, prefix_word_pair_proximity_docids, "replaced");
db_snap!(index, soft_deleted_documents_ids, "replaced", @"[]");
}
} }

View File

@ -0,0 +1,20 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a 5 [101, ]
1 a amazing [100, ]
1 a an [100, ]
1 a and [100, ]
1 a beautiful [100, ]
1 b house [100, ]
1 b rings [101, ]
1 be house [100, ]
1 be rings [101, ]
2 a am [101, ]
2 a amazing [100, ]
2 a and [100, ]
2 a beautiful [100, ]
2 a house [100, ]
2 b at [101, ]
2 be at [101, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 51, ]

View File

@ -0,0 +1,6 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a 5 [51, ]
2 a am [51, ]

View File

@ -0,0 +1,60 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
5 [51, ]
a0 [0, ]
a1 [1, ]
a10 [16, ]
a11 [17, ]
a12 [18, ]
a13 [19, ]
a14 [20, ]
a15 [21, ]
a16 [22, ]
a17 [23, ]
a18 [24, ]
a19 [25, ]
a1a [26, ]
a1b [27, ]
a1c [28, ]
a1d [29, ]
a1e [30, ]
a1f [31, ]
a2 [2, ]
a20 [32, ]
a21 [33, ]
a22 [34, ]
a23 [35, ]
a24 [36, ]
a25 [37, ]
a26 [38, ]
a27 [39, ]
a28 [40, ]
a29 [41, ]
a2a [42, ]
a2b [43, ]
a2c [44, ]
a2d [45, ]
a2e [46, ]
a2f [47, ]
a3 [3, ]
a30 [48, ]
a31 [49, ]
a4 [4, ]
a5 [5, ]
a6 [6, ]
a7 [7, ]
a8 [8, ]
a9 [9, ]
aa [10, ]
ab [11, ]
ac [12, ]
ad [13, ]
ae [14, ]
af [15, ]
am [51, ]
at [51, ]
bell [51, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,11 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 5 a [51, ]
1 rings a [51, ]
2 at a [51, ]
2 bell a [51, ]
3 rings a [51, ]
3 the a [51, ]
4 bell a [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, ]

View File

@ -0,0 +1,14 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a 5 [51, ]
1 a amazing [50, ]
1 a an [50, ]
1 a and [50, ]
1 a beautiful [50, ]
2 a am [51, ]
2 a amazing [50, ]
2 a and [50, ]
2 a beautiful [50, ]
2 a house [50, ]

View File

@ -0,0 +1,65 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
5 [51, ]
a0 [0, ]
a1 [1, ]
a10 [16, ]
a11 [17, ]
a12 [18, ]
a13 [19, ]
a14 [20, ]
a15 [21, ]
a16 [22, ]
a17 [23, ]
a18 [24, ]
a19 [25, ]
a1a [26, ]
a1b [27, ]
a1c [28, ]
a1d [29, ]
a1e [30, ]
a1f [31, ]
a2 [2, ]
a20 [32, ]
a21 [33, ]
a22 [34, ]
a23 [35, ]
a24 [36, ]
a25 [37, ]
a26 [38, ]
a27 [39, ]
a28 [40, ]
a29 [41, ]
a2a [42, ]
a2b [43, ]
a2c [44, ]
a2d [45, ]
a2e [46, ]
a2f [47, ]
a3 [3, ]
a30 [48, ]
a31 [49, ]
a4 [4, ]
a5 [5, ]
a6 [6, ]
a7 [7, ]
a8 [8, ]
a9 [9, ]
aa [10, ]
ab [11, ]
ac [12, ]
ad [13, ]
ae [14, ]
af [15, ]
am [51, ]
amazing [50, ]
an [50, ]
and [50, ]
at [50, 51, ]
beautiful [50, ]
bell [51, ]
house [50, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,16 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 5 a [51, ]
1 amazing a [50, ]
1 an a [50, ]
1 at a [50, ]
1 rings a [51, ]
2 an a [50, ]
2 at a [50, 51, ]
2 bell a [51, ]
3 at a [50, ]
3 rings a [51, ]
3 the a [51, ]
4 bell a [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 51, ]

View File

@ -0,0 +1,6 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 b rings [51, ]
2 b at [51, ]

View File

@ -0,0 +1,60 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
5 [51, ]
am [51, ]
at [51, ]
b0 [0, ]
b1 [1, ]
b10 [16, ]
b11 [17, ]
b12 [18, ]
b13 [19, ]
b14 [20, ]
b15 [21, ]
b16 [22, ]
b17 [23, ]
b18 [24, ]
b19 [25, ]
b1a [26, ]
b1b [27, ]
b1c [28, ]
b1d [29, ]
b1e [30, ]
b1f [31, ]
b2 [2, ]
b20 [32, ]
b21 [33, ]
b22 [34, ]
b23 [35, ]
b24 [36, ]
b25 [37, ]
b26 [38, ]
b27 [39, ]
b28 [40, ]
b29 [41, ]
b2a [42, ]
b2b [43, ]
b2c [44, ]
b2d [45, ]
b2e [46, ]
b2f [47, ]
b3 [3, ]
b30 [48, ]
b31 [49, ]
b4 [4, ]
b5 [5, ]
b6 [6, ]
b7 [7, ]
b8 [8, ]
b9 [9, ]
ba [10, ]
bb [11, ]
bc [12, ]
bd [13, ]
be [14, ]
bell [51, ]
bf [15, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,5 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 the b [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[51, ]

View File

@ -0,0 +1,6 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a 5 [51, ]
2 a am [51, ]

View File

@ -0,0 +1,10 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
5 [51, ]
am [51, ]
at [51, ]
bell [51, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,11 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 5 a [51, ]
1 rings a [51, ]
2 at a [51, ]
2 bell a [51, ]
3 rings a [51, ]
3 the a [51, ]
4 bell a [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, ]

View File

@ -0,0 +1,9 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a amazing [50, ]
1 a an [50, ]
1 a house [50, ]
2 a amazing [50, ]
2 a house [50, ]

View File

@ -0,0 +1,61 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
a0 [0, ]
a1 [1, ]
a10 [16, ]
a11 [17, ]
a12 [18, ]
a13 [19, ]
a14 [20, ]
a15 [21, ]
a16 [22, ]
a17 [23, ]
a18 [24, ]
a19 [25, ]
a1a [26, ]
a1b [27, ]
a1c [28, ]
a1d [29, ]
a1e [30, ]
a1f [31, ]
a2 [2, ]
a20 [32, ]
a21 [33, ]
a22 [34, ]
a23 [35, ]
a24 [36, ]
a25 [37, ]
a26 [38, ]
a27 [39, ]
a28 [40, ]
a29 [41, ]
a2a [42, ]
a2b [43, ]
a2c [44, ]
a2d [45, ]
a2e [46, ]
a2f [47, ]
a3 [3, ]
a30 [48, ]
a31 [49, ]
a4 [4, ]
a5 [5, ]
a6 [6, ]
a7 [7, ]
a8 [8, ]
a9 [9, ]
aa [10, ]
ab [11, ]
ac [12, ]
ad [13, ]
ae [14, ]
af [15, ]
amazing [50, ]
an [50, ]
at [50, ]
bell [51, ]
house [50, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,7 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 an a [50, ]
1 at a [50, ]
2 at a [50, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, ]

View File

@ -0,0 +1,5 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 b rings [51, ]

View File

@ -0,0 +1,61 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
amazing [50, ]
an [50, ]
at [50, ]
b0 [52, ]
b1 [53, ]
b10 [68, ]
b11 [69, ]
b12 [70, ]
b13 [71, ]
b14 [72, ]
b15 [73, ]
b16 [74, ]
b17 [75, ]
b18 [76, ]
b19 [77, ]
b1a [78, ]
b1b [79, ]
b1c [80, ]
b1d [81, ]
b1e [82, ]
b1f [83, ]
b2 [54, ]
b20 [84, ]
b21 [85, ]
b22 [86, ]
b23 [87, ]
b24 [88, ]
b25 [89, ]
b26 [90, ]
b27 [91, ]
b28 [92, ]
b29 [93, ]
b2a [94, ]
b2b [95, ]
b2c [96, ]
b2d [97, ]
b2e [98, ]
b2f [99, ]
b3 [55, ]
b30 [100, ]
b31 [101, ]
b4 [56, ]
b5 [57, ]
b6 [58, ]
b7 [59, ]
b8 [60, ]
b9 [61, ]
ba [62, ]
bb [63, ]
bc [64, ]
bd [65, ]
be [66, ]
bell [51, ]
bf [67, ]
house [50, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,5 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 the b [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, ]

View File

@ -0,0 +1,9 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a amazing [50, ]
1 a an [50, ]
1 a house [50, ]
2 a amazing [50, ]
2 a house [50, ]

View File

@ -0,0 +1,61 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
a0 [0, ]
a1 [1, ]
a10 [16, ]
a11 [17, ]
a12 [18, ]
a13 [19, ]
a14 [20, ]
a15 [21, ]
a16 [22, ]
a17 [23, ]
a18 [24, ]
a19 [25, ]
a1a [26, ]
a1b [27, ]
a1c [28, ]
a1d [29, ]
a1e [30, ]
a1f [31, ]
a2 [2, ]
a20 [32, ]
a21 [33, ]
a22 [34, ]
a23 [35, ]
a24 [36, ]
a25 [37, ]
a26 [38, ]
a27 [39, ]
a28 [40, ]
a29 [41, ]
a2a [42, ]
a2b [43, ]
a2c [44, ]
a2d [45, ]
a2e [46, ]
a2f [47, ]
a3 [3, ]
a30 [48, ]
a31 [49, ]
a4 [4, ]
a5 [5, ]
a6 [6, ]
a7 [7, ]
a8 [8, ]
a9 [9, ]
aa [10, ]
ab [11, ]
ac [12, ]
ad [13, ]
ae [14, ]
af [15, ]
amazing [50, ]
an [50, ]
at [50, ]
bell [51, ]
house [50, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,7 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 an a [50, ]
1 at a [50, ]
2 at a [50, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, ]

View File

@ -0,0 +1,10 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a amazing [50, ]
1 a an [50, ]
1 a house [50, ]
1 b rings [51, ]
2 a amazing [50, ]
2 a house [50, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
5f6443e54fae188aa96d4f27fce28939

View File

@ -0,0 +1,8 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 an a [50, ]
1 at a [50, ]
1 the b [51, ]
2 at a [50, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 51, ]

View File

@ -0,0 +1,14 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a 5 [51, ]
1 a amazing [50, ]
1 a an [50, ]
1 a and [50, ]
1 a beautiful [50, ]
2 a am [51, ]
2 a amazing [50, ]
2 a and [50, ]
2 a beautiful [50, ]
2 a house [50, ]

View File

@ -0,0 +1,65 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
5 [51, ]
a0 [0, ]
a1 [1, ]
a10 [16, ]
a11 [17, ]
a12 [18, ]
a13 [19, ]
a14 [20, ]
a15 [21, ]
a16 [22, ]
a17 [23, ]
a18 [24, ]
a19 [25, ]
a1a [26, ]
a1b [27, ]
a1c [28, ]
a1d [29, ]
a1e [30, ]
a1f [31, ]
a2 [2, ]
a20 [32, ]
a21 [33, ]
a22 [34, ]
a23 [35, ]
a24 [36, ]
a25 [37, ]
a26 [38, ]
a27 [39, ]
a28 [40, ]
a29 [41, ]
a2a [42, ]
a2b [43, ]
a2c [44, ]
a2d [45, ]
a2e [46, ]
a2f [47, ]
a3 [3, ]
a30 [48, ]
a31 [49, ]
a4 [4, ]
a5 [5, ]
a6 [6, ]
a7 [7, ]
a8 [8, ]
a9 [9, ]
aa [10, ]
ab [11, ]
ac [12, ]
ad [13, ]
ae [14, ]
af [15, ]
am [51, ]
amazing [50, ]
an [50, ]
and [50, ]
at [50, 51, ]
beautiful [50, ]
bell [51, ]
house [50, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,16 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 5 a [51, ]
1 amazing a [50, ]
1 an a [50, ]
1 at a [50, ]
1 rings a [51, ]
2 an a [50, ]
2 at a [50, 51, ]
2 bell a [51, ]
3 at a [50, ]
3 rings a [51, ]
3 the a [51, ]
4 bell a [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, ]

View File

@ -0,0 +1,14 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a 5 [51, ]
1 a amazing [50, ]
1 a an [50, ]
1 a and [50, ]
1 a beautiful [50, ]
2 a am [51, ]
2 a amazing [50, ]
2 a and [50, ]
2 a beautiful [50, ]
2 a house [50, ]

View File

@ -0,0 +1,65 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
5 [51, ]
a0 [0, ]
a1 [1, ]
a10 [16, ]
a11 [17, ]
a12 [18, ]
a13 [19, ]
a14 [20, ]
a15 [21, ]
a16 [22, ]
a17 [23, ]
a18 [24, ]
a19 [25, ]
a1a [26, ]
a1b [27, ]
a1c [28, ]
a1d [29, ]
a1e [30, ]
a1f [31, ]
a2 [2, ]
a20 [32, ]
a21 [33, ]
a22 [34, ]
a23 [35, ]
a24 [36, ]
a25 [37, ]
a26 [38, ]
a27 [39, ]
a28 [40, ]
a29 [41, ]
a2a [42, ]
a2b [43, ]
a2c [44, ]
a2d [45, ]
a2e [46, ]
a2f [47, ]
a3 [3, ]
a30 [48, ]
a31 [49, ]
a4 [4, ]
a5 [5, ]
a6 [6, ]
a7 [7, ]
a8 [8, ]
a9 [9, ]
aa [10, ]
ab [11, ]
ac [12, ]
ad [13, ]
ae [14, ]
af [15, ]
am [51, ]
amazing [50, ]
an [50, ]
and [50, ]
at [50, 51, ]
beautiful [50, ]
bell [51, ]
house [50, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,16 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 5 a [51, ]
1 amazing a [50, ]
1 an a [50, ]
1 at a [50, ]
1 rings a [51, ]
2 an a [50, ]
2 at a [50, 51, ]
2 bell a [51, ]
3 at a [50, ]
3 rings a [51, ]
3 the a [51, ]
4 bell a [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, ]

View File

@ -0,0 +1,17 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a 5 [51, ]
1 a amazing [50, ]
1 a an [50, ]
1 a and [50, ]
1 a beautiful [50, ]
1 b house [50, ]
1 b rings [51, ]
2 a am [51, ]
2 a amazing [50, ]
2 a and [50, ]
2 a beautiful [50, ]
2 a house [50, ]
2 b at [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
9f4866b80177e321a33ce434992022b5

View File

@ -0,0 +1,21 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 5 a [51, ]
1 amazing a [50, ]
1 an a [50, ]
1 and b [50, ]
1 at a [50, ]
1 rings a [51, ]
1 the b [51, ]
2 amazing b [50, ]
2 an a [50, ]
2 at a [50, 51, ]
2 bell a [51, ]
3 an b [50, ]
3 at a [50, ]
3 rings a [51, ]
3 the a [51, ]
4 at b [50, ]
4 bell a [51, ]

View File

@ -0,0 +1,4 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
[51, ]

View File

@ -0,0 +1,14 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 a 5 [51, ]
1 a amazing [50, ]
1 a an [50, ]
1 a and [50, ]
1 a beautiful [50, ]
2 a am [51, ]
2 a amazing [50, ]
2 a and [50, ]
2 a beautiful [50, ]
2 a house [50, ]

View File

@ -0,0 +1,65 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
5 [51, ]
a0 [0, ]
a1 [1, ]
a10 [16, ]
a11 [17, ]
a12 [18, ]
a13 [19, ]
a14 [20, ]
a15 [21, ]
a16 [22, ]
a17 [23, ]
a18 [24, ]
a19 [25, ]
a1a [26, ]
a1b [27, ]
a1c [28, ]
a1d [29, ]
a1e [30, ]
a1f [31, ]
a2 [2, ]
a20 [32, ]
a21 [33, ]
a22 [34, ]
a23 [35, ]
a24 [36, ]
a25 [37, ]
a26 [38, ]
a27 [39, ]
a28 [40, ]
a29 [41, ]
a2a [42, ]
a2b [43, ]
a2c [44, ]
a2d [45, ]
a2e [46, ]
a2f [47, ]
a3 [3, ]
a30 [48, ]
a31 [49, ]
a4 [4, ]
a5 [5, ]
a6 [6, ]
a7 [7, ]
a8 [8, ]
a9 [9, ]
aa [10, ]
ab [11, ]
ac [12, ]
ad [13, ]
ae [14, ]
af [15, ]
am [51, ]
amazing [50, ]
an [50, ]
and [50, ]
at [50, 51, ]
beautiful [50, ]
bell [51, ]
house [50, ]
rings [51, ]
the [51, ]

View File

@ -0,0 +1,16 @@
---
source: milli/src/update/prefix_word_pairs/mod.rs
---
1 5 a [51, ]
1 amazing a [50, ]
1 an a [50, ]
1 at a [50, ]
1 rings a [51, ]
2 an a [50, ]
2 at a [50, 51, ]
2 bell a [51, ]
3 at a [50, ]
3 rings a [51, ]
3 the a [51, ]
4 bell a [51, ]