775: Fix clippy for Rust 1.67, allow `uninlined_format_args` r=dureuill a=dureuill

# Pull Request

milli part of https://github.com/meilisearch/meilisearch/pull/3437

Co-authored-by: Louis Dureuil <louis@meilisearch.com>
This commit is contained in:
bors[bot] 2023-01-31 10:29:24 +00:00 committed by GitHub
commit 33f61d2cd4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 43 additions and 42 deletions

View File

@ -65,6 +65,7 @@ jobs:
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: clippy command: clippy
args: -- --allow clippy::uninlined_format_args
fmt: fmt:
name: Run Rustfmt name: Run Rustfmt

View File

@ -348,10 +348,10 @@ impl Index {
/* external documents ids */ /* external documents ids */
/// Writes the external documents ids and internal ids (i.e. `u32`). /// Writes the external documents ids and internal ids (i.e. `u32`).
pub(crate) fn put_external_documents_ids<'a>( pub(crate) fn put_external_documents_ids(
&self, &self,
wtxn: &mut RwTxn, wtxn: &mut RwTxn,
external_documents_ids: &ExternalDocumentsIds<'a>, external_documents_ids: &ExternalDocumentsIds<'_>,
) -> heed::Result<()> { ) -> heed::Result<()> {
let ExternalDocumentsIds { hard, soft, .. } = external_documents_ids; let ExternalDocumentsIds { hard, soft, .. } = external_documents_ids;
let hard = hard.as_fst().as_bytes(); let hard = hard.as_fst().as_bytes();
@ -426,7 +426,7 @@ impl Index {
} }
/// Returns the `rtree` which associates coordinates to documents ids. /// Returns the `rtree` which associates coordinates to documents ids.
pub fn geo_rtree<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<RTree<GeoPoint>>> { pub fn geo_rtree(&self, rtxn: &RoTxn) -> Result<Option<RTree<GeoPoint>>> {
match self match self
.main .main
.get::<_, Str, SerdeBincode<RTree<GeoPoint>>>(rtxn, main_key::GEO_RTREE_KEY)? .get::<_, Str, SerdeBincode<RTree<GeoPoint>>>(rtxn, main_key::GEO_RTREE_KEY)?

View File

@ -182,15 +182,15 @@ impl<'t> Criterion for Proximity<'t> {
} }
} }
fn resolve_candidates<'t>( fn resolve_candidates(
ctx: &'t dyn Context, ctx: &dyn Context,
query_tree: &Operation, query_tree: &Operation,
proximity: u8, proximity: u8,
cache: &mut Cache, cache: &mut Cache,
wdcache: &mut WordDerivationsCache, wdcache: &mut WordDerivationsCache,
) -> Result<RoaringBitmap> { ) -> Result<RoaringBitmap> {
fn resolve_operation<'t>( fn resolve_operation(
ctx: &'t dyn Context, ctx: &dyn Context,
query_tree: &Operation, query_tree: &Operation,
proximity: u8, proximity: u8,
cache: &mut Cache, cache: &mut Cache,
@ -243,8 +243,8 @@ fn resolve_candidates<'t>(
Ok(result) Ok(result)
} }
fn mdfs_pair<'t>( fn mdfs_pair(
ctx: &'t dyn Context, ctx: &dyn Context,
left: &Operation, left: &Operation,
right: &Operation, right: &Operation,
proximity: u8, proximity: u8,
@ -298,8 +298,8 @@ fn resolve_candidates<'t>(
Ok(output) Ok(output)
} }
fn mdfs<'t>( fn mdfs(
ctx: &'t dyn Context, ctx: &dyn Context,
branches: &[Operation], branches: &[Operation],
proximity: u8, proximity: u8,
cache: &mut Cache, cache: &mut Cache,

View File

@ -239,15 +239,15 @@ fn alterate_query_tree(
Ok(query_tree) Ok(query_tree)
} }
fn resolve_candidates<'t>( fn resolve_candidates(
ctx: &'t dyn Context, ctx: &dyn Context,
query_tree: &Operation, query_tree: &Operation,
number_typos: u8, number_typos: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
wdcache: &mut WordDerivationsCache, wdcache: &mut WordDerivationsCache,
) -> Result<RoaringBitmap> { ) -> Result<RoaringBitmap> {
fn resolve_operation<'t>( fn resolve_operation(
ctx: &'t dyn Context, ctx: &dyn Context,
query_tree: &Operation, query_tree: &Operation,
number_typos: u8, number_typos: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
@ -276,8 +276,8 @@ fn resolve_candidates<'t>(
} }
} }
fn mdfs<'t>( fn mdfs(
ctx: &'t dyn Context, ctx: &dyn Context,
branches: &[Operation], branches: &[Operation],
mana: u8, mana: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,

View File

@ -574,9 +574,9 @@ fn remove_from_word_docids(
Ok(()) Ok(())
} }
fn remove_docids_from_field_id_docid_facet_value<'i, 'a>( fn remove_docids_from_field_id_docid_facet_value(
index: &'i Index, index: &Index,
wtxn: &'a mut heed::RwTxn, wtxn: &mut heed::RwTxn,
facet_type: FacetType, facet_type: FacetType,
field_id: FieldId, field_id: FieldId,
to_remove: &RoaringBitmap, to_remove: &RoaringBitmap,

View File

@ -157,9 +157,9 @@ impl FacetsUpdateIncrementalInner {
/// ///
/// ## Return /// ## Return
/// See documentation of `insert_in_level` /// See documentation of `insert_in_level`
fn insert_in_level_0<'t>( fn insert_in_level_0(
&self, &self,
txn: &'t mut RwTxn, txn: &mut RwTxn,
field_id: u16, field_id: u16,
facet_value: &[u8], facet_value: &[u8],
docids: &RoaringBitmap, docids: &RoaringBitmap,
@ -211,9 +211,9 @@ impl FacetsUpdateIncrementalInner {
/// - `InsertionResult::Insert` means that inserting the `facet_value` into the `level` resulted /// - `InsertionResult::Insert` means that inserting the `facet_value` into the `level` resulted
/// in the addition of a new key in that level, and that therefore the number of children /// in the addition of a new key in that level, and that therefore the number of children
/// of the parent node should be incremented. /// of the parent node should be incremented.
fn insert_in_level<'t>( fn insert_in_level(
&self, &self,
txn: &'t mut RwTxn, txn: &mut RwTxn,
field_id: u16, field_id: u16,
level: u8, level: u8,
facet_value: &[u8], facet_value: &[u8],
@ -348,9 +348,9 @@ impl FacetsUpdateIncrementalInner {
} }
/// Insert the given facet value and corresponding document ids in the database. /// Insert the given facet value and corresponding document ids in the database.
pub fn insert<'t>( pub fn insert(
&self, &self,
txn: &'t mut RwTxn, txn: &mut RwTxn,
field_id: u16, field_id: u16,
facet_value: &[u8], facet_value: &[u8],
docids: &RoaringBitmap, docids: &RoaringBitmap,
@ -470,9 +470,9 @@ impl FacetsUpdateIncrementalInner {
/// in level 1, the key with the left bound `3` had to be changed to the next facet value (e.g. 4). /// in level 1, the key with the left bound `3` had to be changed to the next facet value (e.g. 4).
/// In that case `DeletionResult::Reduce` is returned. The parent of the reduced key may need to adjust /// In that case `DeletionResult::Reduce` is returned. The parent of the reduced key may need to adjust
/// its left bound as well. /// its left bound as well.
fn delete_in_level<'t>( fn delete_in_level(
&self, &self,
txn: &'t mut RwTxn, txn: &mut RwTxn,
field_id: u16, field_id: u16,
level: u8, level: u8,
facet_value: &[u8], facet_value: &[u8],
@ -529,9 +529,9 @@ impl FacetsUpdateIncrementalInner {
} }
} }
fn delete_in_level_0<'t>( fn delete_in_level_0(
&self, &self,
txn: &'t mut RwTxn, txn: &mut RwTxn,
field_id: u16, field_id: u16,
facet_value: &[u8], facet_value: &[u8],
docids: &RoaringBitmap, docids: &RoaringBitmap,
@ -557,9 +557,9 @@ impl FacetsUpdateIncrementalInner {
} }
} }
pub fn delete<'t>( pub fn delete(
&self, &self,
txn: &'t mut RwTxn, txn: &mut RwTxn,
field_id: u16, field_id: u16,
facet_value: &[u8], facet_value: &[u8],
docids: &RoaringBitmap, docids: &RoaringBitmap,

View File

@ -1,6 +1,6 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::fs::File; use std::fs::File;
use std::io::{self, Seek, SeekFrom}; use std::io::{self, Seek};
use std::time::Instant; use std::time::Instant;
use grenad::{CompressionType, Sorter}; use grenad::{CompressionType, Sorter};
@ -66,7 +66,7 @@ pub fn sorter_into_reader(
pub fn writer_into_reader(writer: grenad::Writer<File>) -> Result<grenad::Reader<File>> { pub fn writer_into_reader(writer: grenad::Writer<File>) -> Result<grenad::Reader<File>> {
let mut file = writer.into_inner()?; let mut file = writer.into_inner()?;
file.seek(SeekFrom::Start(0))?; file.rewind()?;
grenad::Reader::new(file).map_err(Into::into) grenad::Reader::new(file).map_err(Into::into)
} }

View File

@ -2,7 +2,7 @@ use std::borrow::Cow;
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fs::File; use std::fs::File;
use std::io::{Read, Seek, SeekFrom}; use std::io::{Read, Seek};
use fxhash::FxHashMap; use fxhash::FxHashMap;
use heed::RoTxn; use heed::RoTxn;
@ -510,7 +510,7 @@ impl<'a, 'i> Transform<'a, 'i> {
let mut original_documents = writer.into_inner()?; let mut original_documents = writer.into_inner()?;
// We then extract the file and reset the seek to be able to read it again. // We then extract the file and reset the seek to be able to read it again.
original_documents.seek(SeekFrom::Start(0))?; original_documents.rewind()?;
// We create a final writer to write the new documents in order from the sorter. // We create a final writer to write the new documents in order from the sorter.
let mut writer = create_writer( let mut writer = create_writer(
@ -522,7 +522,7 @@ impl<'a, 'i> Transform<'a, 'i> {
// into this writer, extract the file and reset the seek to be able to read it again. // into this writer, extract the file and reset the seek to be able to read it again.
self.flattened_sorter.write_into_stream_writer(&mut writer)?; self.flattened_sorter.write_into_stream_writer(&mut writer)?;
let mut flattened_documents = writer.into_inner()?; let mut flattened_documents = writer.into_inner()?;
flattened_documents.seek(SeekFrom::Start(0))?; flattened_documents.rewind()?;
let mut new_external_documents_ids_builder: Vec<_> = let mut new_external_documents_ids_builder: Vec<_> =
self.new_external_documents_ids_builder.into_iter().collect(); self.new_external_documents_ids_builder.into_iter().collect();
@ -650,10 +650,10 @@ impl<'a, 'i> Transform<'a, 'i> {
// Once we have written all the documents, we extract // Once we have written all the documents, we extract
// the file and reset the seek to be able to read it again. // the file and reset the seek to be able to read it again.
let mut original_documents = original_writer.into_inner()?; let mut original_documents = original_writer.into_inner()?;
original_documents.seek(SeekFrom::Start(0))?; original_documents.rewind()?;
let mut flattened_documents = flattened_writer.into_inner()?; let mut flattened_documents = flattened_writer.into_inner()?;
flattened_documents.seek(SeekFrom::Start(0))?; flattened_documents.rewind()?;
let output = TransformOutput { let output = TransformOutput {
primary_key, primary_key,

View File

@ -7,15 +7,15 @@ fn set_stop_words(index: &Index, stop_words: &[&str]) {
let mut wtxn = index.write_txn().unwrap(); let mut wtxn = index.write_txn().unwrap();
let config = IndexerConfig::default(); let config = IndexerConfig::default();
let mut builder = Settings::new(&mut wtxn, &index, &config); let mut builder = Settings::new(&mut wtxn, index, &config);
let stop_words = stop_words.into_iter().map(|s| s.to_string()).collect(); let stop_words = stop_words.iter().map(|s| s.to_string()).collect();
builder.set_stop_words(stop_words); builder.set_stop_words(stop_words);
builder.execute(|_| (), || false).unwrap(); builder.execute(|_| (), || false).unwrap();
wtxn.commit().unwrap(); wtxn.commit().unwrap();
} }
fn test_phrase_search_with_stop_words_given_criteria(criteria: &[Criterion]) { fn test_phrase_search_with_stop_words_given_criteria(criteria: &[Criterion]) {
let index = super::setup_search_index_with_criteria(&criteria); let index = super::setup_search_index_with_criteria(criteria);
// Add stop_words // Add stop_words
set_stop_words(&index, &["a", "an", "the", "of"]); set_stop_words(&index, &["a", "an", "the", "of"]);