Clean up the tests

This commit is contained in:
Kerollmops 2024-12-18 15:06:29 +01:00
parent 6cdae51626
commit bfaebb50b2
No known key found for this signature in database
GPG Key ID: F250A4C4E3AE5F5F
6 changed files with 9 additions and 132 deletions

View File

@ -1370,7 +1370,7 @@ impl Index {
let (_docid, compressed_obkv) = entry?;
let obkv = compressed_obkv
.decompress_with_optional_dictionary(&mut buffer, dictionary.as_ref())?;
match primary_key.document_id(&obkv, &fields)? {
match primary_key.document_id(obkv, &fields)? {
Ok(document_id) => Ok(document_id),
Err(_) => Err(InternalError::DocumentsError(
crate::documents::Error::InvalidDocumentFormat,

View File

@ -948,128 +948,6 @@ mod tests {
drop(rtxn);
}
#[test]
fn not_auto_generated_documents_ids() {
let index = TempIndex::new();
let result = index.add_documents(documents!([
{ "name": "kevin" },
{ "name": "kevina" },
{ "name": "benoit" }
]));
assert!(result.is_err());
// Check that there is no document.
let rtxn = index.read_txn().unwrap();
let count = index.number_of_documents(&rtxn).unwrap();
assert_eq!(count, 0);
drop(rtxn);
}
#[test]
fn simple_auto_generated_documents_ids() {
let mut index = TempIndex::new();
let mut buffer = Vec::new();
index.index_documents_config.autogenerate_docids = true;
// First we send 3 documents with ids from 1 to 3.
index
.add_documents(documents!([
{ "name": "kevin" },
{ "name": "kevina" },
{ "name": "benoit" }
]))
.unwrap();
// Check that there is 3 documents now.
let rtxn = index.read_txn().unwrap();
let dictionary = index.document_decompression_dictionary(&rtxn).unwrap();
let count = index.number_of_documents(&rtxn).unwrap();
assert_eq!(count, 3);
let compressed_docs = index.compressed_documents(&rtxn, vec![0, 1, 2]).unwrap();
let (_id, compressed_obkv) = compressed_docs
.iter()
.find(|(_id, compressed_doc)| {
let doc = compressed_doc
.decompress_with_optional_dictionary(&mut buffer, dictionary.as_ref())
.unwrap();
doc.get(0) == Some(br#""kevin""#)
})
.unwrap();
let obkv = compressed_obkv
.decompress_with_optional_dictionary(&mut buffer, dictionary.as_ref())
.unwrap();
let kevin_uuid: String = serde_json::from_slice(obkv.get(1).unwrap()).unwrap();
drop(dictionary);
drop(rtxn);
// Second we send 1 document with the generated uuid, to erase the previous ones.
index.add_documents(documents!([ { "name": "updated kevin", "id": kevin_uuid } ])).unwrap();
// Check that there is **always** 3 documents.
let rtxn = index.read_txn().unwrap();
let dictionary = index.document_decompression_dictionary(&rtxn).unwrap();
let count = index.number_of_documents(&rtxn).unwrap();
assert_eq!(count, 3);
// the document 0 has been deleted and reinserted with the id 3
let mut compressed_docs = index.compressed_documents(&rtxn, vec![1, 2, 0]).unwrap();
let kevin_position = compressed_docs
.iter()
.position(|(_, compressed_doc)| {
let doc = compressed_doc
.decompress_with_optional_dictionary(&mut buffer, dictionary.as_ref())
.unwrap();
doc.get(0).unwrap() == br#""updated kevin""#
})
.unwrap();
assert_eq!(kevin_position, 2);
let (_, compressed_doc) = compressed_docs.remove(kevin_position);
let doc = compressed_doc
.decompress_with_optional_dictionary(&mut buffer, dictionary.as_ref())
.unwrap();
// Check that this document is equal to the last
// one sent and that an UUID has been generated.
assert_eq!(doc.get(0), Some(&br#""updated kevin""#[..]));
// This is an UUID, it must be 36 bytes long plus the 2 surrounding string quotes (").
assert_eq!(doc.get(1).unwrap().len(), 36 + 2);
drop(dictionary);
drop(rtxn);
}
#[test]
fn reordered_auto_generated_documents_ids() {
let mut index = TempIndex::new();
// First we send 3 documents with ids from 1 to 3.
index
.add_documents(documents!([
{ "id": 1, "name": "kevin" },
{ "id": 2, "name": "kevina" },
{ "id": 3, "name": "benoit" }
]))
.unwrap();
// Check that there is 3 documents now.
let rtxn = index.read_txn().unwrap();
let count = index.number_of_documents(&rtxn).unwrap();
assert_eq!(count, 3);
drop(rtxn);
// Second we send 1 document without specifying the id.
index.index_documents_config.autogenerate_docids = true;
index.add_documents(documents!([ { "name": "new kevin" } ])).unwrap();
// Check that there is 4 documents now.
let rtxn = index.read_txn().unwrap();
let count = index.number_of_documents(&rtxn).unwrap();
assert_eq!(count, 4);
drop(rtxn);
}
#[test]
fn empty_update() {
let index = TempIndex::new();

View File

@ -214,7 +214,7 @@ pub(crate) fn write_typed_chunk_into_index(
match dictionary.as_ref() {
Some(dictionary) => {
let doc = KvReaderU16::from_slice(&uncompressed_document_bytes);
let compressed = CompressedObkvU16::with_dictionary(&doc, dictionary)?;
let compressed = CompressedObkvU16::with_dictionary(doc, dictionary)?;
db.put(wtxn, &docid, compressed.as_bytes())?
}
None => db.put(wtxn, &docid, &uncompressed_document_bytes)?,

View File

@ -137,7 +137,7 @@ impl<'t, Mapper: FieldIdMapper> DocumentFromDb<'t, Mapper> {
match index.compressed_document(rtxn, docid)? {
Some(compressed) => {
let content = match db_document_decompression_dictionary {
Some(dictionary) => compressed.decompress_into_bump(doc_alloc, &dictionary)?,
Some(dictionary) => compressed.decompress_into_bump(doc_alloc, dictionary)?,
None => compressed.as_non_compressed(),
};
Ok(Some(Self { fields_ids_map: db_fields_ids_map, content }))

View File

@ -66,7 +66,7 @@ impl<'pl> DocumentChanges<'pl> for DocumentDeletionChanges<'pl> {
{
let compressed = context.index.compressed_document(&context.rtxn, *docid)?.unwrap();
let current = match context.db_document_decompression_dictionary {
Some(dict) => compressed.decompress_into_bump(&context.doc_alloc, &dict)?,
Some(dict) => compressed.decompress_into_bump(&context.doc_alloc, dict)?,
None => compressed.as_non_compressed(),
};
@ -151,11 +151,10 @@ mod test {
let fields_ids_map =
RwLock::new(FieldIdMapWithMetadata::new(db_fields_ids_map.clone(), metadata_builder));
let db_document_decompression_dictionary =
match index.document_compression_raw_dictionary(&rtxn).unwrap() {
Some(dictionary) => Some(zstd::dict::DecoderDictionary::copy(dictionary)),
None => None,
};
let db_document_decompression_dictionary = index
.document_compression_raw_dictionary(&rtxn)
.unwrap()
.map(zstd::dict::DecoderDictionary::copy);
let fields_ids_map_store = ThreadLocal::new();
let mut extractor_allocs = ThreadLocal::new();

View File

@ -108,7 +108,7 @@ impl<'index> DocumentChanges<'index> for UpdateByFunctionChanges<'index> {
// their IDs comes from the list of documents ids.
let compressed_document = index.compressed_document(txn, docid)?.unwrap();
let document = match db_document_decompression_dictionary {
Some(dictionary) => compressed_document.decompress_into_bump(doc_alloc, &dictionary)?,
Some(dictionary) => compressed_document.decompress_into_bump(doc_alloc, dictionary)?,
None => compressed_document.as_non_compressed(),
};
let rhai_document = obkv_to_rhaimap(document, db_fields_ids_map)?;