Make cargo fmt happy

This commit is contained in:
Clément Renault 2025-04-01 11:27:41 +02:00
parent 64477aac60
commit a0bfcf8872
No known key found for this signature in database
GPG Key ID: F250A4C4E3AE5F5F
13 changed files with 29 additions and 38 deletions

View File

@ -169,7 +169,8 @@ pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
)
})
})
.transpose()?.is_some_and(|s| s.to_lowercase() == "true"))
.transpose()?
.is_some_and(|s| s.to_lowercase() == "true"))
}
#[derive(Debug, Serialize, ToSchema)]

View File

@ -236,7 +236,8 @@ impl<'a> Filter<'a> {
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
for fid in self.condition.fids(MAX_FILTER_DEPTH) {
let attribute = fid.value();
if matching_features(attribute, &filterable_attributes_rules).is_some_and(|(_, features)| features.is_filterable())
if matching_features(attribute, &filterable_attributes_rules)
.is_some_and(|(_, features)| features.is_filterable())
{
continue;
}

View File

@ -191,8 +191,7 @@ impl<'a> Search<'a> {
let filterable_fields = ctx.index.filterable_attributes_rules(ctx.txn)?;
// check if the distinct field is in the filterable fields
let matched_rule = matching_features(distinct, &filterable_fields);
let is_filterable =
matched_rule.is_some_and(|(_, features)| features.is_filterable());
let is_filterable = matched_rule.is_some_and(|(_, features)| features.is_filterable());
if !is_filterable {
// if not, remove the hidden fields from the filterable fields to generate the error message

View File

@ -71,7 +71,8 @@ pub fn find_best_match_interval(matches: &[Match], crop_size: usize) -> [&Match;
let mut save_best_interval = |interval_first, interval_last| {
let interval_score = get_interval_score(&matches[interval_first..=interval_last]);
let is_interval_score_better = &best_interval
.as_ref().is_none_or(|MatchIntervalWithScore { score, .. }| interval_score > *score);
.as_ref()
.is_none_or(|MatchIntervalWithScore { score, .. }| interval_score > *score);
if *is_interval_score_better {
best_interval = Some(MatchIntervalWithScore {

View File

@ -101,8 +101,7 @@ impl FacetsUpdateIncremental {
let key = FacetGroupKeyCodec::<BytesRefCodec>::bytes_decode(key)
.map_err(heed::Error::Encoding)?;
if facet_level_may_be_updated
&& current_field_id.is_some_and(|fid| fid != key.field_id)
if facet_level_may_be_updated && current_field_id.is_some_and(|fid| fid != key.field_id)
{
// Only add or remove a level after making all the field modifications.
self.inner.add_or_delete_level(wtxn, current_field_id.unwrap())?;

View File

@ -159,10 +159,12 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
let del_geo_support = settings_diff
.old
.geo_fields_ids.is_some_and(|(lat, lng)| field_id == lat || field_id == lng);
.geo_fields_ids
.is_some_and(|(lat, lng)| field_id == lat || field_id == lng);
let add_geo_support = settings_diff
.new
.geo_fields_ids.is_some_and(|(lat, lng)| field_id == lat || field_id == lng);
.geo_fields_ids
.is_some_and(|(lat, lng)| field_id == lat || field_id == lng);
let del_filterable_values =
del_value.map(|value| extract_facet_values(&value, del_geo_support));
let add_filterable_values =

View File

@ -512,10 +512,8 @@ where
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
)?;
let embedder_config = settings_diff.embedding_config_updates.get(&embedder_name);
let was_quantized = settings_diff
.old
.embedding_configs
.get(&embedder_name).is_some_and(|conf| conf.2);
let was_quantized =
settings_diff.old.embedding_configs.get(&embedder_name).is_some_and(|conf| conf.2);
let is_quantizing = embedder_config.is_some_and(|action| action.is_being_quantized);
pool.install(|| {

View File

@ -664,10 +664,8 @@ pub(crate) fn write_typed_chunk_into_index(
let embedder_index = index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or(
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
)?;
let binary_quantized = settings_diff
.old
.embedding_configs
.get(&embedder_name).is_some_and(|conf| conf.2);
let binary_quantized =
settings_diff.old.embedding_configs.get(&embedder_name).is_some_and(|conf| conf.2);
// FIXME: allow customizing distance
let writer = ArroyWrapper::new(index.vector_arroy, embedder_index, binary_quantized);

View File

@ -269,7 +269,8 @@ fn process_document_tokens<'doc>(
}
// drain the proximity window until the head word is considered close to the word we are inserting.
while word_positions
.front().is_some_and(|(_w, p)| index_proximity(*p as u32, pos as u32) >= MAX_DISTANCE)
.front()
.is_some_and(|(_w, p)| index_proximity(*p as u32, pos as u32) >= MAX_DISTANCE)
{
word_positions_into_word_pair_proximity(word_positions, word_pair_proximity);
}

View File

@ -149,13 +149,11 @@ pub struct IndexingContext<
pub grenad_parameters: &'indexer GrenadParameters,
}
impl<
MSP,
> Copy
impl<MSP> Copy
for IndexingContext<
'_, // invariant lifetime of fields ids map
'_, // invariant lifetime of fields ids map
'_, // covariant lifetime of objects that are borrowed during the entire indexing operation
'_, // covariant lifetime of the index
'_, // covariant lifetime of the index
MSP,
>
where
@ -163,13 +161,11 @@ where
{
}
impl<
MSP,
> Clone
impl<MSP> Clone
for IndexingContext<
'_, // invariant lifetime of fields ids map
'_, // invariant lifetime of fields ids map
'_, // covariant lifetime of objects that are borrowed during the entire indexing operation
'_, // covariant lifetime of the index
'_, // covariant lifetime of the index
MSP,
>
where

View File

@ -210,11 +210,8 @@ fn extract_addition_payload_changes<'r, 'pl: 'r>(
primary_key.as_ref().unwrap()
};
let external_id = retrieved_primary_key.extract_fields_and_docid(
doc,
new_fields_ids_map,
indexer,
)?;
let external_id =
retrieved_primary_key.extract_fields_and_docid(doc, new_fields_ids_map, indexer)?;
let external_id = external_id.to_de();
let current_offset = iter.byte_offset();

View File

@ -559,7 +559,8 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
let fst = fst::Set::from_iter(stop_words.into_iter())?;
// Does the new FST differ from the previous one?
if current.is_none_or(|current| current.as_fst().as_bytes() != fst.as_fst().as_bytes())
if current
.is_none_or(|current| current.as_fst().as_bytes() != fst.as_fst().as_bytes())
{
// we want to re-create our FST.
self.index.put_stop_words(self.wtxn, &fst)?;

View File

@ -256,10 +256,7 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
} else if matches!(filter, "opt1 IS EMPTY" | "NOT opt1 IS NOT EMPTY") {
id = document.opt1.as_ref().is_some_and(is_empty_value).then(|| document.id.clone());
} else if matches!(filter, "NOT opt1 IS EMPTY" | "opt1 IS NOT EMPTY") {
id = document
.opt1
.as_ref().is_none_or(|v| !is_empty_value(v))
.then(|| document.id.clone());
id = document.opt1.as_ref().is_none_or(|v| !is_empty_value(v)).then(|| document.id.clone());
} else if matches!(filter, "opt1.opt2 IS EMPTY") {
if document.opt1opt2.as_ref().is_some_and(is_empty_value) {
id = Some(document.id.clone());