mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 20:37:15 +02:00
Make milli use edition 2021 (#4770)
* Make milli use edition 2021 * Add lifetime annotations to milli. * Run cargo fmt
This commit is contained in:
parent
aac15f6719
commit
0a40a98bb6
73 changed files with 406 additions and 347 deletions
|
@ -36,7 +36,7 @@ impl Key for DelAdd {
|
|||
/// Addition: put all the values under DelAdd::Addition,
|
||||
/// DeletionAndAddition: put all the values under DelAdd::Deletion and DelAdd::Addition,
|
||||
pub fn into_del_add_obkv<K: obkv::Key + PartialOrd>(
|
||||
reader: obkv::KvReader<K>,
|
||||
reader: obkv::KvReader<'_, K>,
|
||||
operation: DelAddOperation,
|
||||
buffer: &mut Vec<u8>,
|
||||
) -> Result<(), std::io::Error> {
|
||||
|
@ -46,7 +46,7 @@ pub fn into_del_add_obkv<K: obkv::Key + PartialOrd>(
|
|||
/// Akin to the [into_del_add_obkv] function but lets you
|
||||
/// conditionally define the `DelAdd` variant based on the obkv key.
|
||||
pub fn into_del_add_obkv_conditional_operation<K, F>(
|
||||
reader: obkv::KvReader<K>,
|
||||
reader: obkv::KvReader<'_, K>,
|
||||
buffer: &mut Vec<u8>,
|
||||
operation: F,
|
||||
) -> std::io::Result<()>
|
||||
|
@ -86,8 +86,8 @@ pub enum DelAddOperation {
|
|||
/// putting each deletion obkv's keys under an DelAdd::Deletion
|
||||
/// and putting each addition obkv's keys under an DelAdd::Addition
|
||||
pub fn del_add_from_two_obkvs<K: obkv::Key + PartialOrd + Ord>(
|
||||
deletion: &obkv::KvReader<K>,
|
||||
addition: &obkv::KvReader<K>,
|
||||
deletion: &obkv::KvReader<'_, K>,
|
||||
addition: &obkv::KvReader<'_, K>,
|
||||
buffer: &mut Vec<u8>,
|
||||
) -> Result<(), std::io::Error> {
|
||||
use itertools::merge_join_by;
|
||||
|
@ -121,7 +121,7 @@ pub fn del_add_from_two_obkvs<K: obkv::Key + PartialOrd + Ord>(
|
|||
writer.finish()
|
||||
}
|
||||
|
||||
pub fn is_noop_del_add_obkv(del_add: KvReaderDelAdd) -> bool {
|
||||
pub fn is_noop_del_add_obkv(del_add: KvReaderDelAdd<'_>) -> bool {
|
||||
del_add.get(DelAdd::Deletion) == del_add.get(DelAdd::Addition)
|
||||
}
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ impl<'i> FacetsUpdateBulk<'i> {
|
|||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facets::bulk")]
|
||||
pub fn execute(self, wtxn: &mut heed::RwTxn) -> Result<()> {
|
||||
pub fn execute(self, wtxn: &mut heed::RwTxn<'_>) -> Result<()> {
|
||||
let Self { index, field_ids, group_size, min_level_size, facet_type, delta_data } = self;
|
||||
|
||||
let db = match facet_type {
|
||||
|
@ -95,7 +95,7 @@ pub(crate) struct FacetsUpdateBulkInner<R: std::io::Read + std::io::Seek> {
|
|||
pub min_level_size: u8,
|
||||
}
|
||||
impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
||||
pub fn update(mut self, wtxn: &mut RwTxn, field_ids: &[u16]) -> Result<()> {
|
||||
pub fn update(mut self, wtxn: &mut RwTxn<'_>, field_ids: &[u16]) -> Result<()> {
|
||||
self.update_level0(wtxn)?;
|
||||
for &field_id in field_ids.iter() {
|
||||
self.clear_levels(wtxn, field_id)?;
|
||||
|
@ -114,7 +114,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn clear_levels(&self, wtxn: &mut heed::RwTxn, field_id: FieldId) -> Result<()> {
|
||||
fn clear_levels(&self, wtxn: &mut heed::RwTxn<'_>, field_id: FieldId) -> Result<()> {
|
||||
let left = FacetGroupKey::<&[u8]> { field_id, level: 1, left_bound: &[] };
|
||||
let right = FacetGroupKey::<&[u8]> { field_id, level: u8::MAX, left_bound: &[] };
|
||||
let range = left..=right;
|
||||
|
@ -122,7 +122,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn update_level0(&mut self, wtxn: &mut RwTxn) -> Result<()> {
|
||||
fn update_level0(&mut self, wtxn: &mut RwTxn<'_>) -> Result<()> {
|
||||
let delta_data = match self.delta_data.take() {
|
||||
Some(x) => x,
|
||||
None => return Ok(()),
|
||||
|
@ -198,7 +198,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||
fn compute_levels_for_field_id(
|
||||
&self,
|
||||
field_id: FieldId,
|
||||
txn: &RoTxn,
|
||||
txn: &RoTxn<'_>,
|
||||
) -> Result<Vec<grenad::Reader<BufReader<File>>>> {
|
||||
let subwriters = self.compute_higher_levels(txn, field_id, 32, &mut |_, _| Ok(()))?;
|
||||
|
||||
|
@ -207,7 +207,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||
#[allow(clippy::type_complexity)]
|
||||
fn read_level_0<'t>(
|
||||
&self,
|
||||
rtxn: &'t RoTxn,
|
||||
rtxn: &'t RoTxn<'t>,
|
||||
field_id: u16,
|
||||
handle_group: &mut dyn FnMut(&[RoaringBitmap], &'t [u8]) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
|
@ -261,7 +261,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||
#[allow(clippy::type_complexity)]
|
||||
fn compute_higher_levels<'t>(
|
||||
&self,
|
||||
rtxn: &'t RoTxn,
|
||||
rtxn: &'t RoTxn<'t>,
|
||||
field_id: u16,
|
||||
level: u8,
|
||||
handle_group: &mut dyn FnMut(&[RoaringBitmap], &'t [u8]) -> Result<()>,
|
||||
|
|
|
@ -88,7 +88,7 @@ impl FacetsUpdateIncremental {
|
|||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facets::incremental")]
|
||||
pub fn execute(self, wtxn: &mut RwTxn) -> crate::Result<()> {
|
||||
pub fn execute(self, wtxn: &mut RwTxn<'_>) -> crate::Result<()> {
|
||||
let mut current_field_id = None;
|
||||
let mut facet_level_may_be_updated = false;
|
||||
let mut iter = self.delta_data.into_stream_merger_iter()?;
|
||||
|
@ -172,7 +172,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
field_id: u16,
|
||||
level: u8,
|
||||
facet_value: &[u8],
|
||||
txn: &RoTxn,
|
||||
txn: &RoTxn<'_>,
|
||||
) -> Result<(FacetGroupKey<Vec<u8>>, FacetGroupValue)> {
|
||||
assert!(level > 0);
|
||||
match self.db.get_lower_than_or_equal_to(
|
||||
|
@ -215,7 +215,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
/// See documentation of `insert_in_level`
|
||||
fn modify_in_level_0(
|
||||
&self,
|
||||
txn: &mut RwTxn,
|
||||
txn: &mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
facet_value: &[u8],
|
||||
add_docids: Option<&RoaringBitmap>,
|
||||
|
@ -277,7 +277,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
/// Returns `ModificationResult::Insert` if the split is successful.
|
||||
fn split_group(
|
||||
&self,
|
||||
txn: &mut RwTxn,
|
||||
txn: &mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
level: u8,
|
||||
insertion_key: FacetGroupKey<Vec<u8>>,
|
||||
|
@ -346,7 +346,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
/// This process is needed to avoid removing docids from a group node where the docid is present in several sub-nodes.
|
||||
fn trim_del_docids<'a>(
|
||||
&self,
|
||||
txn: &mut RwTxn,
|
||||
txn: &mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
level: u8,
|
||||
insertion_key: &FacetGroupKey<Vec<u8>>,
|
||||
|
@ -383,7 +383,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
///
|
||||
fn modify_in_level(
|
||||
&self,
|
||||
txn: &mut RwTxn,
|
||||
txn: &mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
level: u8,
|
||||
facet_value: &[u8],
|
||||
|
@ -523,7 +523,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
/// Otherwise returns `false` if the tree-nodes have been modified in place.
|
||||
pub fn modify(
|
||||
&self,
|
||||
txn: &mut RwTxn,
|
||||
txn: &mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
facet_value: &[u8],
|
||||
add_docids: Option<&RoaringBitmap>,
|
||||
|
@ -558,7 +558,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
/// If it has, we must build an addition level above it.
|
||||
/// Then check whether the highest level is under `min_level_size`.
|
||||
/// If it has, we must remove the complete level.
|
||||
pub(crate) fn add_or_delete_level(&self, txn: &mut RwTxn, field_id: u16) -> Result<()> {
|
||||
pub(crate) fn add_or_delete_level(&self, txn: &mut RwTxn<'_>, field_id: u16) -> Result<()> {
|
||||
let highest_level = get_highest_level(txn, self.db, field_id)?;
|
||||
let mut highest_level_prefix = vec![];
|
||||
highest_level_prefix.extend_from_slice(&field_id.to_be_bytes());
|
||||
|
@ -577,7 +577,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
}
|
||||
|
||||
/// Delete a level.
|
||||
fn delete_level(&self, txn: &mut RwTxn, highest_level_prefix: &[u8]) -> Result<()> {
|
||||
fn delete_level(&self, txn: &mut RwTxn<'_>, highest_level_prefix: &[u8]) -> Result<()> {
|
||||
let mut to_delete = vec![];
|
||||
let mut iter =
|
||||
self.db.remap_types::<Bytes, Bytes>().prefix_iter(txn, highest_level_prefix)?;
|
||||
|
@ -599,7 +599,7 @@ impl FacetsUpdateIncrementalInner {
|
|||
/// Build an additional level for the field id.
|
||||
fn add_level(
|
||||
&self,
|
||||
txn: &mut RwTxn,
|
||||
txn: &mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
highest_level: u8,
|
||||
highest_level_prefix: &[u8],
|
||||
|
|
|
@ -141,7 +141,7 @@ impl<'i> FacetsUpdate<'i> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn execute(self, wtxn: &mut heed::RwTxn) -> Result<()> {
|
||||
pub fn execute(self, wtxn: &mut heed::RwTxn<'_>) -> Result<()> {
|
||||
if self.data_size == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -181,7 +181,7 @@ impl<'i> FacetsUpdate<'i> {
|
|||
}
|
||||
|
||||
fn index_facet_search(
|
||||
wtxn: &mut heed::RwTxn,
|
||||
wtxn: &mut heed::RwTxn<'_>,
|
||||
normalized_delta_data: Merger<BufReader<File>, MergeFn>,
|
||||
index: &Index,
|
||||
) -> Result<()> {
|
||||
|
@ -417,7 +417,7 @@ pub(crate) mod test_helpers {
|
|||
|
||||
pub fn insert<'a>(
|
||||
&self,
|
||||
wtxn: &'a mut RwTxn,
|
||||
wtxn: &'a mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
||||
docids: &RoaringBitmap,
|
||||
|
@ -434,7 +434,7 @@ pub(crate) mod test_helpers {
|
|||
}
|
||||
pub fn delete_single_docid<'a>(
|
||||
&self,
|
||||
wtxn: &'a mut RwTxn,
|
||||
wtxn: &'a mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
||||
docid: u32,
|
||||
|
@ -444,7 +444,7 @@ pub(crate) mod test_helpers {
|
|||
|
||||
pub fn delete<'a>(
|
||||
&self,
|
||||
wtxn: &'a mut RwTxn,
|
||||
wtxn: &'a mut RwTxn<'_>,
|
||||
field_id: u16,
|
||||
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
||||
docids: &RoaringBitmap,
|
||||
|
@ -462,7 +462,7 @@ pub(crate) mod test_helpers {
|
|||
|
||||
pub fn bulk_insert<'a, 'b>(
|
||||
&self,
|
||||
wtxn: &'a mut RwTxn,
|
||||
wtxn: &'a mut RwTxn<'_>,
|
||||
field_ids: &[u16],
|
||||
els: impl IntoIterator<
|
||||
Item = &'a ((u16, <BoundCodec as BytesEncode<'a>>::EItem), RoaringBitmap),
|
||||
|
@ -498,7 +498,7 @@ pub(crate) mod test_helpers {
|
|||
update.update(wtxn, field_ids).unwrap();
|
||||
}
|
||||
|
||||
pub fn verify_structure_validity(&self, txn: &RoTxn, field_id: u16) {
|
||||
pub fn verify_structure_validity(&self, txn: &RoTxn<'_>, field_id: u16) {
|
||||
let mut field_id_prefix = vec![];
|
||||
field_id_prefix.extend_from_slice(&field_id.to_be_bytes());
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ use crate::{FieldId, Index, Result};
|
|||
/// - if reader.is_empty(), this function may panic in some cases
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::documents")]
|
||||
pub fn enrich_documents_batch<R: Read + Seek>(
|
||||
rtxn: &heed::RoTxn,
|
||||
rtxn: &heed::RoTxn<'_>,
|
||||
index: &Index,
|
||||
autogenerate_docids: bool,
|
||||
reader: DocumentsBatchReader<R>,
|
||||
|
@ -145,9 +145,9 @@ pub fn enrich_documents_batch<R: Read + Seek>(
|
|||
#[tracing::instrument(level = "trace", skip(uuid_buffer, documents_batch_index, document)
|
||||
target = "indexing::documents")]
|
||||
fn fetch_or_generate_document_id(
|
||||
document: &obkv::KvReader<FieldId>,
|
||||
document: &obkv::KvReader<'_, FieldId>,
|
||||
documents_batch_index: &DocumentsBatchIndex,
|
||||
primary_key: PrimaryKey,
|
||||
primary_key: PrimaryKey<'_>,
|
||||
autogenerate_docids: bool,
|
||||
uuid_buffer: &mut [u8; uuid::fmt::Hyphenated::LENGTH],
|
||||
count: u32,
|
||||
|
|
|
@ -179,7 +179,7 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
|||
|
||||
/// Check if any searchable fields of a document changed.
|
||||
fn searchable_fields_changed(
|
||||
obkv: &KvReader<FieldId>,
|
||||
obkv: &KvReader<'_, FieldId>,
|
||||
settings_diff: &InnerIndexSettingsDiff,
|
||||
) -> bool {
|
||||
let searchable_fields = &settings_diff.new.searchable_fields_ids;
|
||||
|
@ -228,9 +228,9 @@ fn tokenizer_builder<'a>(
|
|||
/// Extract words mapped with their positions of a document,
|
||||
/// ensuring no Language detection mistakes was made.
|
||||
fn lang_safe_tokens_from_document<'a>(
|
||||
obkv: &KvReader<FieldId>,
|
||||
obkv: &KvReader<'_, FieldId>,
|
||||
settings: &InnerIndexSettings,
|
||||
tokenizer: &Tokenizer,
|
||||
tokenizer: &Tokenizer<'_>,
|
||||
max_positions_per_attributes: u32,
|
||||
del_add: DelAdd,
|
||||
buffers: &'a mut Buffers,
|
||||
|
@ -295,9 +295,9 @@ fn lang_safe_tokens_from_document<'a>(
|
|||
|
||||
/// Extract words mapped with their positions of a document.
|
||||
fn tokens_from_document<'a>(
|
||||
obkv: &KvReader<FieldId>,
|
||||
obkv: &KvReader<'a, FieldId>,
|
||||
searchable_fields: &[FieldId],
|
||||
tokenizer: &Tokenizer,
|
||||
tokenizer: &Tokenizer<'_>,
|
||||
max_positions_per_attributes: u32,
|
||||
del_add: DelAdd,
|
||||
buffers: &'a mut Buffers,
|
||||
|
|
|
@ -68,7 +68,7 @@ pub fn extract_geo_points<R: io::Read + io::Seek>(
|
|||
|
||||
/// Extract the finite floats lat and lng from two bytes slices.
|
||||
fn extract_lat_lng(
|
||||
document: &obkv::KvReader<FieldId>,
|
||||
document: &obkv::KvReader<'_, FieldId>,
|
||||
settings: &InnerIndexSettings,
|
||||
deladd: DelAdd,
|
||||
document_id: impl Fn() -> Value,
|
||||
|
|
|
@ -172,7 +172,7 @@ pub fn grenad_obkv_into_chunks<R: io::Read + io::Seek>(
|
|||
pub fn write_sorter_into_database<K, V, FS, FM>(
|
||||
sorter: Sorter<MergeFn>,
|
||||
database: &heed::Database<K, V>,
|
||||
wtxn: &mut heed::RwTxn,
|
||||
wtxn: &mut heed::RwTxn<'_>,
|
||||
index_is_empty: bool,
|
||||
serialize_value: FS,
|
||||
merge_values: FM,
|
||||
|
|
|
@ -45,8 +45,8 @@ pub fn keep_latest_obkv<'a>(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Result<Cow<
|
|||
}
|
||||
|
||||
pub fn merge_two_del_add_obkvs(
|
||||
base: obkv::KvReaderU16,
|
||||
update: obkv::KvReaderU16,
|
||||
base: obkv::KvReaderU16<'_>,
|
||||
update: obkv::KvReaderU16<'_>,
|
||||
merge_additions: bool,
|
||||
buffer: &mut Vec<u8>,
|
||||
) {
|
||||
|
|
|
@ -758,7 +758,7 @@ where
|
|||
name = "index_documents_word_prefix_docids"
|
||||
)]
|
||||
fn execute_word_prefix_docids(
|
||||
txn: &mut heed::RwTxn,
|
||||
txn: &mut heed::RwTxn<'_>,
|
||||
merger: Merger<CursorClonableMmap, MergeFn>,
|
||||
word_docids_db: Database<Str, CboRoaringBitmapCodec>,
|
||||
word_prefix_docids_db: Database<Str, CboRoaringBitmapCodec>,
|
||||
|
|
|
@ -102,7 +102,7 @@ fn create_fields_mapping(
|
|||
|
||||
impl<'a, 'i> Transform<'a, 'i> {
|
||||
pub fn new(
|
||||
wtxn: &mut heed::RwTxn,
|
||||
wtxn: &mut heed::RwTxn<'_>,
|
||||
index: &'i Index,
|
||||
indexer_settings: &'a IndexerConfig,
|
||||
index_documents_method: IndexDocumentsMethod,
|
||||
|
@ -155,7 +155,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
pub fn read_documents<R, FP, FA>(
|
||||
&mut self,
|
||||
reader: EnrichedDocumentsBatchReader<R>,
|
||||
wtxn: &mut heed::RwTxn,
|
||||
wtxn: &mut heed::RwTxn<'_>,
|
||||
progress_callback: FP,
|
||||
should_abort: FA,
|
||||
) -> Result<usize>
|
||||
|
@ -177,7 +177,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
let mut document_sorter_key_buffer = Vec::new();
|
||||
let mut documents_count = 0;
|
||||
let mut docid_buffer: Vec<u8> = Vec::new();
|
||||
let mut field_buffer: Vec<(u16, Cow<[u8]>)> = Vec::new();
|
||||
let mut field_buffer: Vec<(u16, Cow<'_, [u8]>)> = Vec::new();
|
||||
while let Some(enriched_document) = cursor.next_enriched_document()? {
|
||||
let EnrichedDocument { document, document_id } = enriched_document;
|
||||
|
||||
|
@ -370,7 +370,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
pub fn remove_documents<FA>(
|
||||
&mut self,
|
||||
mut to_remove: Vec<String>,
|
||||
wtxn: &mut heed::RwTxn,
|
||||
wtxn: &mut heed::RwTxn<'_>,
|
||||
should_abort: FA,
|
||||
) -> Result<usize>
|
||||
where
|
||||
|
@ -459,7 +459,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
pub fn remove_documents_from_db_no_batch<FA>(
|
||||
&mut self,
|
||||
to_remove: &RoaringBitmap,
|
||||
wtxn: &mut heed::RwTxn,
|
||||
wtxn: &mut heed::RwTxn<'_>,
|
||||
should_abort: FA,
|
||||
) -> Result<usize>
|
||||
where
|
||||
|
@ -493,7 +493,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
&mut self,
|
||||
internal_docid: u32,
|
||||
external_docid: String,
|
||||
txn: &heed::RoTxn,
|
||||
txn: &heed::RoTxn<'_>,
|
||||
document_sorter_key_buffer: &mut Vec<u8>,
|
||||
document_sorter_value_buffer: &mut Vec<u8>,
|
||||
) -> Result<()> {
|
||||
|
@ -552,7 +552,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
target = "indexing::transform"
|
||||
)]
|
||||
fn flatten_from_fields_ids_map(
|
||||
obkv: &KvReader<FieldId>,
|
||||
obkv: &KvReader<'_, FieldId>,
|
||||
fields_ids_map: &mut FieldsIdsMap,
|
||||
) -> Result<Option<Vec<u8>>> {
|
||||
if obkv
|
||||
|
@ -566,7 +566,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
// We first extract all the key+value out of the obkv. If a value is not nested
|
||||
// we keep a reference on its value. If the value is nested we'll get its value
|
||||
// as an owned `Vec<u8>` after flattening it.
|
||||
let mut key_value: Vec<(FieldId, Cow<[u8]>)> = Vec::new();
|
||||
let mut key_value: Vec<(FieldId, Cow<'_, [u8]>)> = Vec::new();
|
||||
|
||||
// the object we're going to use to store the fields that need to be flattened.
|
||||
let mut doc = serde_json::Map::new();
|
||||
|
@ -609,7 +609,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
|
||||
/// Generate an obkv from a slice of key / value sorted by key.
|
||||
fn create_obkv_from_key_value(
|
||||
key_value: &mut [(FieldId, Cow<[u8]>)],
|
||||
key_value: &mut [(FieldId, Cow<'_, [u8]>)],
|
||||
output_buffer: &mut Vec<u8>,
|
||||
) -> Result<()> {
|
||||
debug_assert!(
|
||||
|
@ -677,7 +677,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::transform")]
|
||||
pub(crate) fn output_from_sorter<F>(
|
||||
self,
|
||||
wtxn: &mut heed::RwTxn,
|
||||
wtxn: &mut heed::RwTxn<'_>,
|
||||
progress_callback: F,
|
||||
) -> Result<TransformOutput>
|
||||
where
|
||||
|
@ -837,7 +837,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
/// then fill the provided buffers with delta documents using KvWritterDelAdd.
|
||||
#[allow(clippy::too_many_arguments)] // need the vectors + fid, feel free to create a struct xo xo
|
||||
fn rebind_existing_document(
|
||||
old_obkv: KvReader<FieldId>,
|
||||
old_obkv: KvReader<'_, FieldId>,
|
||||
settings_diff: &InnerIndexSettingsDiff,
|
||||
modified_faceted_fields: &HashSet<String>,
|
||||
mut injected_vectors: serde_json::Map<String, serde_json::Value>,
|
||||
|
@ -990,7 +990,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||
};
|
||||
|
||||
let readers: Result<
|
||||
BTreeMap<&str, (Vec<arroy::Reader<arroy::distances::Angular>>, &RoaringBitmap)>,
|
||||
BTreeMap<&str, (Vec<arroy::Reader<'_, arroy::distances::Angular>>, &RoaringBitmap)>,
|
||||
> = settings_diff
|
||||
.embedding_config_updates
|
||||
.iter()
|
||||
|
|
|
@ -128,7 +128,7 @@ impl TypedChunk {
|
|||
/// Return new documents seen.
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::write_db")]
|
||||
pub(crate) fn write_typed_chunk_into_index(
|
||||
wtxn: &mut RwTxn,
|
||||
wtxn: &mut RwTxn<'_>,
|
||||
index: &Index,
|
||||
settings_diff: &InnerIndexSettingsDiff,
|
||||
typed_chunks: Vec<TypedChunk>,
|
||||
|
@ -165,7 +165,7 @@ pub(crate) fn write_typed_chunk_into_index(
|
|||
let mut vectors_buffer = Vec::new();
|
||||
while let Some((key, reader)) = iter.next()? {
|
||||
let mut writer: KvWriter<_, FieldId> = KvWriter::memory();
|
||||
let reader: KvReader<FieldId> = KvReader::new(reader);
|
||||
let reader: KvReader<'_, FieldId> = KvReader::new(reader);
|
||||
|
||||
let (document_id_bytes, external_id_bytes) = try_split_array_at(key)
|
||||
.ok_or(SerializationError::Decoding { db_name: Some(DOCUMENTS) })?;
|
||||
|
@ -835,7 +835,7 @@ fn merge_word_docids_reader_into_fst(
|
|||
fn write_entries_into_database<R, K, V, FS, FM>(
|
||||
merger: Merger<R, MergeFn>,
|
||||
database: &heed::Database<K, V>,
|
||||
wtxn: &mut RwTxn,
|
||||
wtxn: &mut RwTxn<'_>,
|
||||
serialize_value: FS,
|
||||
merge_values: FM,
|
||||
) -> Result<()>
|
||||
|
@ -872,7 +872,7 @@ where
|
|||
fn write_proximity_entries_into_database_additional_searchables<R>(
|
||||
merger: Merger<R, MergeFn>,
|
||||
database: &heed::Database<U8StrStrCodec, CboRoaringBitmapCodec>,
|
||||
wtxn: &mut RwTxn,
|
||||
wtxn: &mut RwTxn<'_>,
|
||||
) -> Result<()>
|
||||
where
|
||||
R: io::Read + io::Seek,
|
||||
|
|
|
@ -44,7 +44,7 @@ where
|
|||
{
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
value: deserr::Value<V>,
|
||||
location: deserr::ValuePointerRef,
|
||||
location: deserr::ValuePointerRef<'_>,
|
||||
) -> std::result::Result<Self, E> {
|
||||
match value {
|
||||
deserr::Value::Null => Ok(Setting::Reset),
|
||||
|
@ -617,7 +617,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
|||
fn update_synonyms(&mut self) -> Result<bool> {
|
||||
match self.synonyms {
|
||||
Setting::Set(ref user_synonyms) => {
|
||||
fn normalize(tokenizer: &Tokenizer, text: &str) -> Vec<String> {
|
||||
fn normalize(tokenizer: &Tokenizer<'_>, text: &str) -> Vec<String> {
|
||||
tokenizer
|
||||
.tokenize(text)
|
||||
.filter_map(|token| {
|
||||
|
@ -838,7 +838,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
|||
fn update_exact_words(&mut self) -> Result<()> {
|
||||
match self.exact_words {
|
||||
Setting::Set(ref mut words) => {
|
||||
fn normalize(tokenizer: &Tokenizer, text: &str) -> String {
|
||||
fn normalize(tokenizer: &Tokenizer<'_>, text: &str) -> String {
|
||||
tokenizer.tokenize(text).map(|token| token.lemma().to_string()).collect()
|
||||
}
|
||||
|
||||
|
@ -1344,7 +1344,7 @@ pub(crate) struct InnerIndexSettings {
|
|||
}
|
||||
|
||||
impl InnerIndexSettings {
|
||||
pub fn from_index(index: &Index, rtxn: &heed::RoTxn) -> Result<Self> {
|
||||
pub fn from_index(index: &Index, rtxn: &heed::RoTxn<'_>) -> Result<Self> {
|
||||
let stop_words = index.stop_words(rtxn)?;
|
||||
let stop_words = stop_words.map(|sw| sw.map_data(Vec::from).unwrap());
|
||||
let allowed_separators = index.allowed_separators(rtxn)?;
|
||||
|
@ -1407,7 +1407,7 @@ impl InnerIndexSettings {
|
|||
}
|
||||
|
||||
// find and insert the new field ids
|
||||
pub fn recompute_facets(&mut self, wtxn: &mut heed::RwTxn, index: &Index) -> Result<()> {
|
||||
pub fn recompute_facets(&mut self, wtxn: &mut heed::RwTxn<'_>, index: &Index) -> Result<()> {
|
||||
let new_facets = self
|
||||
.fields_ids_map
|
||||
.iter()
|
||||
|
@ -1422,7 +1422,11 @@ impl InnerIndexSettings {
|
|||
}
|
||||
|
||||
// find and insert the new field ids
|
||||
pub fn recompute_searchables(&mut self, wtxn: &mut heed::RwTxn, index: &Index) -> Result<()> {
|
||||
pub fn recompute_searchables(
|
||||
&mut self,
|
||||
wtxn: &mut heed::RwTxn<'_>,
|
||||
index: &Index,
|
||||
) -> Result<()> {
|
||||
let searchable_fields = self
|
||||
.user_defined_searchable_fields
|
||||
.as_ref()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue