mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-22 20:50:04 +01:00
Make clippy happy
This commit is contained in:
parent
976075578f
commit
bf76d4a43c
@ -101,7 +101,7 @@ impl Index {
|
||||
let index = Self::open(&dst_dir_path, size)?;
|
||||
let mut txn = index.write_txn()?;
|
||||
|
||||
let handler = UpdateHandler::new(&indexing_options)?;
|
||||
let handler = UpdateHandler::new(indexing_options)?;
|
||||
|
||||
index.update_settings_txn(&mut txn, &settings, handler.update_builder(0))?;
|
||||
|
||||
|
@ -62,34 +62,34 @@ impl Index {
|
||||
|
||||
pub fn settings_txn(&self, txn: &RoTxn) -> Result<Settings<Checked>> {
|
||||
let displayed_attributes = self
|
||||
.displayed_fields(&txn)?
|
||||
.displayed_fields(txn)?
|
||||
.map(|fields| fields.into_iter().map(String::from).collect());
|
||||
|
||||
let searchable_attributes = self
|
||||
.searchable_fields(&txn)?
|
||||
.searchable_fields(txn)?
|
||||
.map(|fields| fields.into_iter().map(String::from).collect());
|
||||
|
||||
let filterable_attributes = self.filterable_fields(&txn)?.into_iter().collect();
|
||||
let filterable_attributes = self.filterable_fields(txn)?.into_iter().collect();
|
||||
|
||||
let criteria = self
|
||||
.criteria(&txn)?
|
||||
.criteria(txn)?
|
||||
.into_iter()
|
||||
.map(|c| c.to_string())
|
||||
.collect();
|
||||
|
||||
let stop_words = self
|
||||
.stop_words(&txn)?
|
||||
.stop_words(txn)?
|
||||
.map(|stop_words| -> Result<BTreeSet<_>> {
|
||||
Ok(stop_words.stream().into_strs()?.into_iter().collect())
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(BTreeSet::new);
|
||||
let distinct_field = self.distinct_field(&txn)?.map(String::from);
|
||||
let distinct_field = self.distinct_field(txn)?.map(String::from);
|
||||
|
||||
// in milli each word in the synonyms map were split on their separator. Since we lost
|
||||
// this information we are going to put space between words.
|
||||
let synonyms = self
|
||||
.synonyms(&txn)?
|
||||
.synonyms(txn)?
|
||||
.iter()
|
||||
.map(|(key, values)| {
|
||||
(
|
||||
@ -175,7 +175,7 @@ impl Index {
|
||||
attributes_to_retrieve: &Option<Vec<S>>,
|
||||
fields_ids_map: &milli::FieldsIdsMap,
|
||||
) -> Result<Vec<FieldId>> {
|
||||
let mut displayed_fields_ids = match self.displayed_fields_ids(&txn)? {
|
||||
let mut displayed_fields_ids = match self.displayed_fields_ids(txn)? {
|
||||
Some(ids) => ids.into_iter().collect::<Vec<_>>(),
|
||||
None => fields_ids_map.iter().map(|(id, _)| id).collect(),
|
||||
};
|
||||
|
@ -239,7 +239,7 @@ fn compute_matches<A: AsRef<[u8]>>(
|
||||
|
||||
for (key, value) in document {
|
||||
let mut infos = Vec::new();
|
||||
compute_value_matches(&mut infos, value, matcher, &analyzer);
|
||||
compute_value_matches(&mut infos, value, matcher, analyzer);
|
||||
if !infos.is_empty() {
|
||||
matches.insert(key.clone(), infos);
|
||||
}
|
||||
@ -329,7 +329,7 @@ fn add_highlight_to_formatted_options(
|
||||
break;
|
||||
}
|
||||
|
||||
if let Some(id) = fields_ids_map.id(&attr) {
|
||||
if let Some(id) = fields_ids_map.id(attr) {
|
||||
if displayed_ids.contains(&id) {
|
||||
formatted_options.insert(id, new_format);
|
||||
}
|
||||
@ -366,7 +366,7 @@ fn add_crop_to_formatted_options(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(id) = fields_ids_map.id(&attr_name) {
|
||||
if let Some(id) = fields_ids_map.id(attr_name) {
|
||||
if displayed_ids.contains(&id) {
|
||||
formatted_options
|
||||
.entry(id)
|
||||
@ -592,7 +592,7 @@ impl<'a, A: AsRef<[u8]>> Formatter<'a, A> {
|
||||
// we highlight the complete word.
|
||||
None => {
|
||||
out.push_str(&self.marks.0);
|
||||
out.push_str(&word);
|
||||
out.push_str(word);
|
||||
out.push_str(&self.marks.1);
|
||||
}
|
||||
}
|
||||
|
@ -205,7 +205,7 @@ impl Index {
|
||||
|
||||
// Set the primary key if not set already, ignore if already set.
|
||||
if let (None, Some(primary_key)) = (self.primary_key(txn)?, primary_key) {
|
||||
let mut builder = UpdateBuilder::new(0).settings(txn, &self);
|
||||
let mut builder = UpdateBuilder::new(0).settings(txn, self);
|
||||
builder.set_primary_key(primary_key.to_string());
|
||||
builder.execute(|_, _| ())?;
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ fn load_index(
|
||||
|
||||
let mut txn = index.write_txn()?;
|
||||
|
||||
let handler = UpdateHandler::new(&indexer_options)?;
|
||||
let handler = UpdateHandler::new(indexer_options)?;
|
||||
|
||||
index.update_settings_txn(&mut txn, &settings.check(), handler.update_builder(0))?;
|
||||
|
||||
|
@ -40,9 +40,9 @@ impl IndexMeta {
|
||||
}
|
||||
|
||||
fn new_txn(index: &Index, txn: &heed::RoTxn) -> Result<Self> {
|
||||
let created_at = index.created_at(&txn)?;
|
||||
let updated_at = index.updated_at(&txn)?;
|
||||
let primary_key = index.primary_key(&txn)?.map(String::from);
|
||||
let created_at = index.created_at(txn)?;
|
||||
let updated_at = index.updated_at(txn)?;
|
||||
let primary_key = index.primary_key(txn)?.map(String::from);
|
||||
Ok(Self {
|
||||
created_at,
|
||||
updated_at,
|
||||
|
@ -59,8 +59,8 @@ impl UpdateStore {
|
||||
let update_files_path = path.as_ref().join(super::UPDATE_DIR);
|
||||
create_dir_all(&update_files_path)?;
|
||||
|
||||
self.dump_pending(&txn, uuids, &mut dump_data_file, &path)?;
|
||||
self.dump_completed(&txn, uuids, &mut dump_data_file)?;
|
||||
self.dump_pending(txn, uuids, &mut dump_data_file, &path)?;
|
||||
self.dump_completed(txn, uuids, &mut dump_data_file)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -268,13 +268,13 @@ impl UpdateStore {
|
||||
self.pending_queue.remap_key_type::<PendingKeyCodec>().put(
|
||||
wtxn,
|
||||
&(global_id, index_uuid, enqueued.id()),
|
||||
&enqueued,
|
||||
enqueued,
|
||||
)?;
|
||||
}
|
||||
_ => {
|
||||
let _update_id = self.next_update_id_raw(wtxn, index_uuid)?;
|
||||
self.updates
|
||||
.put(wtxn, &(index_uuid, update.id()), &update)?;
|
||||
.put(wtxn, &(index_uuid, update.id()), update)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
Loading…
x
Reference in New Issue
Block a user