mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-04 20:18:55 +01:00
remove tests on soft-deleted
This commit is contained in:
parent
f19332466e
commit
b1d1355b69
@ -558,91 +558,6 @@ pub(crate) mod test_helpers {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use big_s::S;
|
|
||||||
use maplit::hashset;
|
|
||||||
|
|
||||||
use crate::db_snap;
|
|
||||||
use crate::documents::documents_batch_reader_from_objects;
|
|
||||||
use crate::index::tests::TempIndex;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_all_identical_soft_deletion_then_hard_deletion() {
|
|
||||||
let index = TempIndex::new_with_map_size(4096 * 1000 * 100);
|
|
||||||
|
|
||||||
index
|
|
||||||
.update_settings(|settings| {
|
|
||||||
settings.set_primary_key("id".to_owned());
|
|
||||||
settings.set_filterable_fields(hashset! { S("size") });
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let mut documents = vec![];
|
|
||||||
for i in 0..1000 {
|
|
||||||
documents.push(
|
|
||||||
serde_json::json! {
|
|
||||||
{
|
|
||||||
"id": i,
|
|
||||||
"size": i % 250,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let documents = documents_batch_reader_from_objects(documents);
|
|
||||||
index.add_documents(documents).unwrap();
|
|
||||||
|
|
||||||
db_snap!(index, facet_id_f64_docids, "initial", @"777e0e221d778764b472c512617eeb3b");
|
|
||||||
|
|
||||||
let mut documents = vec![];
|
|
||||||
for i in 0..999 {
|
|
||||||
documents.push(
|
|
||||||
serde_json::json! {
|
|
||||||
{
|
|
||||||
"id": i,
|
|
||||||
"size": i % 250,
|
|
||||||
"other": 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let documents = documents_batch_reader_from_objects(documents);
|
|
||||||
index.add_documents(documents).unwrap();
|
|
||||||
|
|
||||||
db_snap!(index, facet_id_f64_docids, "replaced_1_soft", @"abba175d7bed727d0efadaef85a4388f");
|
|
||||||
|
|
||||||
// Then replace the last document while disabling soft_deletion
|
|
||||||
let mut documents = vec![];
|
|
||||||
for i in 999..1000 {
|
|
||||||
documents.push(
|
|
||||||
serde_json::json! {
|
|
||||||
{
|
|
||||||
"id": i,
|
|
||||||
"size": i % 250,
|
|
||||||
"other": 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let documents = documents_batch_reader_from_objects(documents);
|
|
||||||
index.add_documents(documents).unwrap();
|
|
||||||
|
|
||||||
db_snap!(index, facet_id_f64_docids, "replaced_2_hard", @"029e27a46d09c574ae949aa4289b45e6");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod comparison_bench {
|
mod comparison_bench {
|
||||||
|
@ -763,11 +763,10 @@ mod tests {
|
|||||||
assert_eq!(count, 1);
|
assert_eq!(count, 1);
|
||||||
|
|
||||||
// Check that we get only one document from the database.
|
// Check that we get only one document from the database.
|
||||||
// Since the document has been deleted and re-inserted, its internal docid has been incremented to 1
|
let docs = index.documents(&rtxn, Some(0)).unwrap();
|
||||||
let docs = index.documents(&rtxn, Some(1)).unwrap();
|
|
||||||
assert_eq!(docs.len(), 1);
|
assert_eq!(docs.len(), 1);
|
||||||
let (id, doc) = docs[0];
|
let (id, doc) = docs[0];
|
||||||
assert_eq!(id, 1);
|
assert_eq!(id, 0);
|
||||||
|
|
||||||
// Check that this document is equal to the last one sent.
|
// Check that this document is equal to the last one sent.
|
||||||
let mut doc_iter = doc.iter();
|
let mut doc_iter = doc.iter();
|
||||||
@ -828,7 +827,7 @@ mod tests {
|
|||||||
assert_eq!(count, 3);
|
assert_eq!(count, 3);
|
||||||
|
|
||||||
// the document 0 has been deleted and reinserted with the id 3
|
// the document 0 has been deleted and reinserted with the id 3
|
||||||
let docs = index.documents(&rtxn, vec![1, 2, 3]).unwrap();
|
let docs = index.documents(&rtxn, vec![1, 2, 0]).unwrap();
|
||||||
let kevin_position =
|
let kevin_position =
|
||||||
docs.iter().position(|(_, d)| d.get(0).unwrap() == br#""updated kevin""#).unwrap();
|
docs.iter().position(|(_, d)| d.get(0).unwrap() == br#""updated kevin""#).unwrap();
|
||||||
assert_eq!(kevin_position, 2);
|
assert_eq!(kevin_position, 2);
|
||||||
|
@ -357,139 +357,6 @@ mod tests {
|
|||||||
db_snap!(index, prefix_word_pair_proximity_docids, "reupdate");
|
db_snap!(index, prefix_word_pair_proximity_docids, "reupdate");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn soft_delete_and_reupdate() {
|
|
||||||
let mut index = TempIndex::new();
|
|
||||||
index.index_documents_config.words_prefix_threshold = Some(50);
|
|
||||||
|
|
||||||
index
|
|
||||||
.update_settings(|settings| {
|
|
||||||
settings.set_primary_key("id".to_owned());
|
|
||||||
settings.set_searchable_fields(vec!["text".to_owned()]);
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let batch_reader_from_documents = |documents| {
|
|
||||||
let mut builder = DocumentsBatchBuilder::new(Vec::new());
|
|
||||||
for object in documents {
|
|
||||||
builder.append_json_object(&object).unwrap();
|
|
||||||
}
|
|
||||||
DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut documents = documents_with_enough_different_words_for_prefixes(&["a"], 0);
|
|
||||||
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
|
|
||||||
documents.push(
|
|
||||||
serde_json::json!({
|
|
||||||
"id": 9000,
|
|
||||||
"text": "At an amazing and beautiful house"
|
|
||||||
})
|
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
documents.push(
|
|
||||||
serde_json::json!({
|
|
||||||
"id": 9001,
|
|
||||||
"text": "The bell rings at 5 am"
|
|
||||||
})
|
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let documents = batch_reader_from_documents(documents);
|
|
||||||
index.add_documents(documents).unwrap();
|
|
||||||
|
|
||||||
db_snap!(index, documents_ids, "initial");
|
|
||||||
db_snap!(index, word_docids, "initial");
|
|
||||||
db_snap!(index, word_prefix_pair_proximity_docids, "initial");
|
|
||||||
db_snap!(index, prefix_word_pair_proximity_docids, "initial");
|
|
||||||
|
|
||||||
index.delete_document("9000");
|
|
||||||
|
|
||||||
db_snap!(index, documents_ids, "first_delete");
|
|
||||||
db_snap!(index, word_docids, "first_delete");
|
|
||||||
db_snap!(index, word_prefix_pair_proximity_docids, "first_delete");
|
|
||||||
db_snap!(index, prefix_word_pair_proximity_docids, "first_delete");
|
|
||||||
|
|
||||||
index.delete_documents((0..50).map(|id| id.to_string()).collect());
|
|
||||||
|
|
||||||
db_snap!(index, documents_ids, "second_delete");
|
|
||||||
db_snap!(index, word_docids, "second_delete");
|
|
||||||
db_snap!(index, word_prefix_pair_proximity_docids, "second_delete");
|
|
||||||
db_snap!(index, prefix_word_pair_proximity_docids, "second_delete");
|
|
||||||
|
|
||||||
let documents = documents_with_enough_different_words_for_prefixes(&["b"], 1000);
|
|
||||||
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
|
|
||||||
|
|
||||||
index.add_documents(batch_reader_from_documents(documents)).unwrap();
|
|
||||||
|
|
||||||
db_snap!(index, documents_ids, "reupdate");
|
|
||||||
db_snap!(index, word_docids, "reupdate");
|
|
||||||
db_snap!(index, word_prefix_pair_proximity_docids, "reupdate");
|
|
||||||
db_snap!(index, prefix_word_pair_proximity_docids, "reupdate");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn replace_soft_deletion() {
|
|
||||||
let mut index = TempIndex::new();
|
|
||||||
index.index_documents_config.words_prefix_threshold = Some(50);
|
|
||||||
index.index_documents_config.update_method = IndexDocumentsMethod::ReplaceDocuments;
|
|
||||||
|
|
||||||
index
|
|
||||||
.update_settings(|settings| {
|
|
||||||
settings.set_primary_key("id".to_owned());
|
|
||||||
settings.set_searchable_fields(vec!["text".to_owned()]);
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let batch_reader_from_documents = |documents| {
|
|
||||||
let mut builder = DocumentsBatchBuilder::new(Vec::new());
|
|
||||||
for object in documents {
|
|
||||||
builder.append_json_object(&object).unwrap();
|
|
||||||
}
|
|
||||||
DocumentsBatchReader::from_reader(Cursor::new(builder.into_inner().unwrap())).unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut documents = documents_with_enough_different_words_for_prefixes(&["a"], 0);
|
|
||||||
// now we add some documents where the text should populate the word_prefix_pair_proximity_docids database
|
|
||||||
documents.push(
|
|
||||||
serde_json::json!({
|
|
||||||
"id": 9000,
|
|
||||||
"text": "At an amazing house"
|
|
||||||
})
|
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
documents.push(
|
|
||||||
serde_json::json!({
|
|
||||||
"id": 9001,
|
|
||||||
"text": "The bell rings"
|
|
||||||
})
|
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let documents = batch_reader_from_documents(documents);
|
|
||||||
index.add_documents(documents).unwrap();
|
|
||||||
|
|
||||||
db_snap!(index, documents_ids, "initial");
|
|
||||||
db_snap!(index, word_docids, "initial");
|
|
||||||
db_snap!(index, word_prefix_pair_proximity_docids, "initial");
|
|
||||||
db_snap!(index, prefix_word_pair_proximity_docids, "initial");
|
|
||||||
|
|
||||||
let documents = documents_with_enough_different_words_for_prefixes(&["b"], 0);
|
|
||||||
index.add_documents(batch_reader_from_documents(documents)).unwrap();
|
|
||||||
|
|
||||||
db_snap!(index, documents_ids, "replaced");
|
|
||||||
db_snap!(index, word_docids, "replaced");
|
|
||||||
db_snap!(index, word_prefix_pair_proximity_docids, "replaced");
|
|
||||||
db_snap!(index, prefix_word_pair_proximity_docids, "replaced");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn replace_hard_deletion() {
|
fn replace_hard_deletion() {
|
||||||
let mut index = TempIndex::new();
|
let mut index = TempIndex::new();
|
||||||
|
Loading…
Reference in New Issue
Block a user