mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-23 13:10:06 +01:00
Make clippy happy
This commit is contained in:
parent
31de33d5ee
commit
5d21c790ef
@ -207,7 +207,7 @@ impl BatchKind {
|
||||
|
||||
(BatchKind::Settings { settings_ids }, Kind::DocumentClear) => {
|
||||
ControlFlow::Continue(BatchKind::ClearAndSettings {
|
||||
settings_ids: settings_ids.clone(),
|
||||
settings_ids: settings_ids,
|
||||
other: vec![id],
|
||||
})
|
||||
}
|
||||
|
@ -350,7 +350,7 @@ impl IndexScheduler {
|
||||
// matter.
|
||||
let index_name = task.indexes().unwrap()[0];
|
||||
|
||||
let _index = self.get_index(rtxn, &index_name)? & enqueued;
|
||||
let _index = self.get_index(rtxn, index_name)? & enqueued;
|
||||
|
||||
let enqueued = enqueued
|
||||
.into_iter()
|
||||
@ -382,7 +382,7 @@ impl IndexScheduler {
|
||||
| IndexOperation::DocumentClear { ref index_uid, .. } => {
|
||||
// only get the index, don't create it
|
||||
let rtxn = self.env.read_txn()?;
|
||||
self.index_mapper.index(&rtxn, &index_uid)?
|
||||
self.index_mapper.index(&rtxn, index_uid)?
|
||||
}
|
||||
IndexOperation::DocumentImport { ref index_uid, .. }
|
||||
| IndexOperation::Settings { ref index_uid, .. }
|
||||
@ -390,7 +390,7 @@ impl IndexScheduler {
|
||||
| IndexOperation::SettingsAndDocumentImport { ref index_uid, .. } => {
|
||||
// create the index if it doesn't already exist
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let index = self.index_mapper.index(&mut wtxn, index_uid)?;
|
||||
let index = self.index_mapper.create_index(&mut wtxn, index_uid)?;
|
||||
wtxn.commit()?;
|
||||
index
|
||||
}
|
||||
|
@ -72,8 +72,8 @@ impl IndexMapper {
|
||||
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
|
||||
let uuid = self
|
||||
.index_mapping
|
||||
.get(&rtxn, name)?
|
||||
.ok_or(Error::IndexNotFound(name.to_string()))?;
|
||||
.get(rtxn, name)?
|
||||
.ok_or_else(|| Error::IndexNotFound(name.to_string()))?;
|
||||
|
||||
// we clone here to drop the lock before entering the match
|
||||
let index = self.index_map.read().unwrap().get(&uuid).cloned();
|
||||
@ -109,7 +109,7 @@ impl IndexMapper {
|
||||
|
||||
pub fn indexes(&self, rtxn: &RoTxn) -> Result<Vec<Index>> {
|
||||
self.index_mapping
|
||||
.iter(&rtxn)?
|
||||
.iter(rtxn)?
|
||||
.map(|ret| {
|
||||
ret.map_err(Error::from)
|
||||
.and_then(|(name, _)| self.index(rtxn, name))
|
||||
@ -122,11 +122,11 @@ impl IndexMapper {
|
||||
let lhs_uuid = self
|
||||
.index_mapping
|
||||
.get(wtxn, lhs)?
|
||||
.ok_or(Error::IndexNotFound(lhs.to_string()))?;
|
||||
.ok_or_else(|| Error::IndexNotFound(lhs.to_string()))?;
|
||||
let rhs_uuid = self
|
||||
.index_mapping
|
||||
.get(wtxn, rhs)?
|
||||
.ok_or(Error::IndexNotFound(rhs.to_string()))?;
|
||||
.ok_or_else(|| Error::IndexNotFound(rhs.to_string()))?;
|
||||
|
||||
self.index_mapping.put(wtxn, lhs, &rhs_uuid)?;
|
||||
self.index_mapping.put(wtxn, rhs, &lhs_uuid)?;
|
||||
|
@ -189,10 +189,10 @@ impl IndexScheduler {
|
||||
processing_tasks: self.processing_tasks.clone(),
|
||||
file_store: self.file_store.clone(),
|
||||
env: self.env.clone(),
|
||||
all_tasks: self.all_tasks.clone(),
|
||||
status: self.status.clone(),
|
||||
kind: self.kind.clone(),
|
||||
index_tasks: self.index_tasks.clone(),
|
||||
all_tasks: self.all_tasks,
|
||||
status: self.status,
|
||||
kind: self.kind,
|
||||
index_tasks: self.index_tasks,
|
||||
index_mapper: self.index_mapper.clone(),
|
||||
wake_up: self.wake_up.clone(),
|
||||
|
||||
@ -279,7 +279,7 @@ impl IndexScheduler {
|
||||
.map(|task| match processing.contains(task.uid) {
|
||||
true => TaskView {
|
||||
status: Status::Processing,
|
||||
started_at: Some(started_at.clone()),
|
||||
started_at: Some(started_at),
|
||||
..task
|
||||
},
|
||||
false => task,
|
||||
@ -309,7 +309,9 @@ impl IndexScheduler {
|
||||
|
||||
if let Some(indexes) = task.indexes() {
|
||||
for index in indexes {
|
||||
self.update_index(&mut wtxn, index, |bitmap| drop(bitmap.insert(task.uid)))?;
|
||||
self.update_index(&mut wtxn, index, |bitmap| {
|
||||
bitmap.insert(task.uid);
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,12 +73,12 @@ impl IndexScheduler {
|
||||
})?;
|
||||
}
|
||||
|
||||
self.all_tasks.put(wtxn, &BEU32::new(task.uid), &task)?;
|
||||
self.all_tasks.put(wtxn, &BEU32::new(task.uid), task)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn get_index(&self, rtxn: &RoTxn, index: &str) -> Result<RoaringBitmap> {
|
||||
Ok(self.index_tasks.get(&rtxn, index)?.unwrap_or_default())
|
||||
Ok(self.index_tasks.get(rtxn, index)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
pub(crate) fn put_index(
|
||||
@ -96,7 +96,7 @@ impl IndexScheduler {
|
||||
index: &str,
|
||||
f: impl Fn(&mut RoaringBitmap),
|
||||
) -> Result<()> {
|
||||
let mut tasks = self.get_index(&wtxn, index)?;
|
||||
let mut tasks = self.get_index(wtxn, index)?;
|
||||
f(&mut tasks);
|
||||
self.put_index(wtxn, index, &tasks)?;
|
||||
|
||||
@ -104,7 +104,7 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
pub(crate) fn get_status(&self, rtxn: &RoTxn, status: Status) -> Result<RoaringBitmap> {
|
||||
Ok(self.status.get(&rtxn, &status)?.unwrap_or_default())
|
||||
Ok(self.status.get(rtxn, &status)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
pub(crate) fn put_status(
|
||||
@ -122,7 +122,7 @@ impl IndexScheduler {
|
||||
status: Status,
|
||||
f: impl Fn(&mut RoaringBitmap),
|
||||
) -> Result<()> {
|
||||
let mut tasks = self.get_status(&wtxn, status)?;
|
||||
let mut tasks = self.get_status(wtxn, status)?;
|
||||
f(&mut tasks);
|
||||
self.put_status(wtxn, status, &tasks)?;
|
||||
|
||||
@ -130,7 +130,7 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
pub(crate) fn get_kind(&self, rtxn: &RoTxn, kind: Kind) -> Result<RoaringBitmap> {
|
||||
Ok(self.kind.get(&rtxn, &kind)?.unwrap_or_default())
|
||||
Ok(self.kind.get(rtxn, &kind)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
pub(crate) fn put_kind(
|
||||
@ -148,7 +148,7 @@ impl IndexScheduler {
|
||||
kind: Kind,
|
||||
f: impl Fn(&mut RoaringBitmap),
|
||||
) -> Result<()> {
|
||||
let mut tasks = self.get_kind(&wtxn, kind)?;
|
||||
let mut tasks = self.get_kind(wtxn, kind)?;
|
||||
f(&mut tasks);
|
||||
self.put_kind(wtxn, kind, &tasks)?;
|
||||
|
||||
|
@ -256,10 +256,10 @@ impl Index {
|
||||
&self,
|
||||
rtxn: &'a RoTxn,
|
||||
) -> Result<impl Iterator<Item = Result<Document>> + 'a> {
|
||||
let fields_ids_map = self.fields_ids_map(&rtxn)?;
|
||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
||||
|
||||
Ok(self.inner.all_documents(&rtxn)?.map(move |ret| {
|
||||
Ok(self.inner.all_documents(rtxn)?.map(move |ret| {
|
||||
ret.map_err(IndexError::from)
|
||||
.and_then(|(_key, document)| -> Result<_> {
|
||||
Ok(obkv_to_json(&all_fields, &fields_ids_map, document)?)
|
||||
|
@ -239,13 +239,12 @@ async fn document_addition(
|
||||
return Err(MeilisearchHttpError::InvalidContentType(
|
||||
format!("{}/{}", type_, subtype),
|
||||
ACCEPTED_CONTENT_TYPE.clone(),
|
||||
)
|
||||
.into())
|
||||
))
|
||||
}
|
||||
None => {
|
||||
return Err(
|
||||
MeilisearchHttpError::MissingContentType(ACCEPTED_CONTENT_TYPE.clone()).into(),
|
||||
)
|
||||
return Err(MeilisearchHttpError::MissingContentType(
|
||||
ACCEPTED_CONTENT_TYPE.clone(),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
@ -277,7 +276,7 @@ async fn document_addition(
|
||||
Ok(Ok(documents_count)) => documents_count,
|
||||
Ok(Err(e)) => {
|
||||
index_scheduler.delete_update_file(uuid)?;
|
||||
return Err(e.into());
|
||||
return Err(e);
|
||||
}
|
||||
Err(e) => {
|
||||
index_scheduler.delete_update_file(uuid)?;
|
||||
|
Loading…
x
Reference in New Issue
Block a user