mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 20:37:15 +02:00
Prefer using a stable than a random hash builder
This commit is contained in:
parent
6b269795d2
commit
a751972c57
8 changed files with 40 additions and 23 deletions
|
@ -2,6 +2,7 @@ use std::ops::ControlFlow;
|
|||
|
||||
use bumpalo::Bump;
|
||||
use bumparaw_collections::RawVec;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
use serde::de::{DeserializeSeed, Deserializer as _, Visitor};
|
||||
use serde_json::value::RawValue;
|
||||
|
||||
|
@ -394,7 +395,7 @@ impl<'a> Iterator for DeserrRawVecIter<'a> {
|
|||
}
|
||||
|
||||
pub struct DeserrRawMap<'a> {
|
||||
map: bumparaw_collections::RawMap<'a>,
|
||||
map: bumparaw_collections::RawMap<'a, FxBuildHasher>,
|
||||
alloc: &'a Bump,
|
||||
}
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ use hashbrown::hash_map::Entry;
|
|||
use heed::RoTxn;
|
||||
use memmap2::Mmap;
|
||||
use rayon::slice::ParallelSlice;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
use serde_json::value::RawValue;
|
||||
use serde_json::Deserializer;
|
||||
|
||||
|
@ -166,8 +167,9 @@ fn extract_addition_payload_changes<'r, 'pl: 'r>(
|
|||
|
||||
// Only guess the primary key if it is the first document
|
||||
let retrieved_primary_key = if previous_offset == 0 {
|
||||
let doc =
|
||||
RawMap::from_raw_value(doc, indexer).map(Some).map_err(UserError::SerdeJson)?;
|
||||
let doc = RawMap::from_raw_value_and_hasher(doc, FxBuildHasher, indexer)
|
||||
.map(Some)
|
||||
.map_err(UserError::SerdeJson)?;
|
||||
|
||||
let result = retrieve_or_guess_primary_key(
|
||||
rtxn,
|
||||
|
@ -546,7 +548,8 @@ impl MergeChanges for MergeDocumentForReplacement {
|
|||
Some(InnerDocOp::Addition(DocumentOffset { content })) => {
|
||||
let document = serde_json::from_slice(content).unwrap();
|
||||
let document =
|
||||
RawMap::from_raw_value(document, doc_alloc).map_err(UserError::SerdeJson)?;
|
||||
RawMap::from_raw_value_and_hasher(document, FxBuildHasher, doc_alloc)
|
||||
.map_err(UserError::SerdeJson)?;
|
||||
|
||||
if is_new {
|
||||
Ok(Some(DocumentChange::Insertion(Insertion::create(
|
||||
|
@ -633,7 +636,8 @@ impl MergeChanges for MergeDocumentForUpdates {
|
|||
};
|
||||
let document = serde_json::from_slice(content).unwrap();
|
||||
let document =
|
||||
RawMap::from_raw_value(document, doc_alloc).map_err(UserError::SerdeJson)?;
|
||||
RawMap::from_raw_value_and_hasher(document, FxBuildHasher, doc_alloc)
|
||||
.map_err(UserError::SerdeJson)?;
|
||||
|
||||
Some(Versions::single(document))
|
||||
}
|
||||
|
@ -647,8 +651,9 @@ impl MergeChanges for MergeDocumentForUpdates {
|
|||
};
|
||||
|
||||
let document = serde_json::from_slice(content).unwrap();
|
||||
let document = RawMap::from_raw_value(document, doc_alloc)
|
||||
.map_err(UserError::SerdeJson)?;
|
||||
let document =
|
||||
RawMap::from_raw_value_and_hasher(document, FxBuildHasher, doc_alloc)
|
||||
.map_err(UserError::SerdeJson)?;
|
||||
Ok(document)
|
||||
});
|
||||
Versions::multiple(versions)?
|
||||
|
|
|
@ -14,6 +14,7 @@ use heed::{RoTxn, RwTxn};
|
|||
use itertools::{merge_join_by, EitherOrBoth};
|
||||
pub use partial_dump::PartialDump;
|
||||
use rand::SeedableRng as _;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
use time::OffsetDateTime;
|
||||
pub use update_by_function::UpdateByFunction;
|
||||
|
||||
|
@ -776,7 +777,7 @@ pub fn retrieve_or_guess_primary_key<'a>(
|
|||
index: &Index,
|
||||
new_fields_ids_map: &mut FieldsIdsMap,
|
||||
primary_key_from_op: Option<&'a str>,
|
||||
first_document: Option<RawMap<'a>>,
|
||||
first_document: Option<RawMap<'a, FxBuildHasher>>,
|
||||
) -> Result<StdResult<(PrimaryKey<'a>, bool), UserError>> {
|
||||
// make sure that we have a declared primary key, either fetching it from the index or attempting to guess it.
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ use std::ops::DerefMut;
|
|||
|
||||
use bumparaw_collections::RawMap;
|
||||
use rayon::iter::IndexedParallelIterator;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
use serde_json::value::RawValue;
|
||||
|
||||
use super::document_changes::{DocumentChangeContext, DocumentChanges};
|
||||
|
@ -76,8 +77,8 @@ where
|
|||
self.primary_key.extract_fields_and_docid(document, fields_ids_map, doc_alloc)?;
|
||||
let external_document_id = external_document_id.to_de();
|
||||
|
||||
let document =
|
||||
RawMap::from_raw_value(document, doc_alloc).map_err(InternalError::SerdeJson)?;
|
||||
let document = RawMap::from_raw_value_and_hasher(document, FxBuildHasher, doc_alloc)
|
||||
.map_err(InternalError::SerdeJson)?;
|
||||
|
||||
let insertion = Insertion::create(docid, external_document_id, Versions::single(document));
|
||||
Ok(Some(DocumentChange::Insertion(insertion)))
|
||||
|
|
|
@ -3,6 +3,7 @@ use rayon::iter::IndexedParallelIterator;
|
|||
use rayon::slice::ParallelSlice as _;
|
||||
use rhai::{Dynamic, Engine, OptimizationLevel, Scope, AST};
|
||||
use roaring::RoaringBitmap;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
|
||||
use super::document_changes::DocumentChangeContext;
|
||||
use super::DocumentChanges;
|
||||
|
@ -160,8 +161,12 @@ impl<'index> DocumentChanges<'index> for UpdateByFunctionChanges<'index> {
|
|||
if document_id != new_document_id {
|
||||
Err(Error::UserError(UserError::DocumentEditionCannotModifyPrimaryKey))
|
||||
} else {
|
||||
let raw_new_doc = RawMap::from_raw_value(raw_new_doc, doc_alloc)
|
||||
.map_err(InternalError::SerdeJson)?;
|
||||
let raw_new_doc = RawMap::from_raw_value_and_hasher(
|
||||
raw_new_doc,
|
||||
FxBuildHasher,
|
||||
doc_alloc,
|
||||
)
|
||||
.map_err(InternalError::SerdeJson)?;
|
||||
|
||||
Ok(Some(DocumentChange::Update(Update::create(
|
||||
docid,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue