mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-26 14:54:27 +01:00
Merge #1007
1007: fix clippy errors r=MarinPostma a=qdequele I fixed clippy warning and errors. It will allow us to not have future issues when bors try to merge a branch. Co-authored-by: qdequele <quentin@dequelen.me>
This commit is contained in:
commit
5f59f93804
@ -193,9 +193,9 @@ fn version_guard(path: &Path, create: bool) -> MResult<(u32, u32, u32)> {
|
||||
Err(Error::VersionMismatch(format!("{}.{}.XX", version_major, version_minor)))
|
||||
} else {
|
||||
Ok((
|
||||
version_major.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
version_minor.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
version_patch.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?
|
||||
version_major.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
version_minor.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
version_patch.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -212,9 +212,9 @@ fn version_guard(path: &Path, create: bool) -> MResult<(u32, u32, u32)> {
|
||||
current_version_patch).as_bytes())?;
|
||||
|
||||
Ok((
|
||||
current_version_major.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
current_version_minor.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
current_version_patch.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?
|
||||
current_version_major.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
current_version_minor.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
current_version_patch.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?
|
||||
))
|
||||
} else {
|
||||
// when no version file is found and we were not told to create one, this
|
||||
|
@ -112,7 +112,7 @@ fn import_index_v1(
|
||||
|
||||
// extract `settings.json` file and import content
|
||||
let settings = settings_from_path(&index_path)?;
|
||||
let settings = settings.to_update().or_else(|_e| Err(Error::dump_failed()))?;
|
||||
let settings = settings.to_update().map_err(|_e| Error::dump_failed())?;
|
||||
apply_settings_update(write_txn, &index, settings)?;
|
||||
|
||||
// create iterator over documents in `documents.jsonl` to make batch importation
|
||||
@ -286,7 +286,7 @@ fn dump_index_documents(data: &web::Data<Data>, reader: &MainReader, folder_path
|
||||
let mut offset = 0;
|
||||
loop {
|
||||
let documents = crate::routes::document::get_all_documents_sync(data, reader, index_uid, offset, dump_batch_size, None)?;
|
||||
if documents.len() == 0 { break; } else { offset += dump_batch_size; }
|
||||
if documents.is_empty() { break; } else { offset += dump_batch_size; }
|
||||
|
||||
for document in documents {
|
||||
serde_json::to_writer(&file, &document)?;
|
||||
|
@ -293,12 +293,18 @@ impl<'a> SearchBuilder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct MatchPosition {
|
||||
pub start: usize,
|
||||
pub length: usize,
|
||||
}
|
||||
|
||||
impl PartialOrd for MatchPosition {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for MatchPosition {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
match self.start.cmp(&other.start) {
|
||||
|
@ -33,7 +33,7 @@ pub fn create_snapshot(data: &Data, snapshot_path: &Path) -> Result<(), Error> {
|
||||
|
||||
data.db.copy_and_compact_to_path(tmp_dir.path())?;
|
||||
|
||||
compression::to_tar_gz(tmp_dir.path(), snapshot_path).or_else(|e| Err(Error::Internal(format!("something went wrong during snapshot compression: {}", e))))
|
||||
compression::to_tar_gz(tmp_dir.path(), snapshot_path).map_err(|e| Error::Internal(format!("something went wrong during snapshot compression: {}", e)))
|
||||
}
|
||||
|
||||
pub fn schedule_snapshot(data: Data, snapshot_dir: &Path, time_gap_s: u64) -> Result<(), Error> {
|
||||
|
@ -25,10 +25,7 @@ impl<T> OptionAll<T> {
|
||||
}
|
||||
|
||||
pub fn is_all(&self) -> bool {
|
||||
match self {
|
||||
OptionAll::All => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, OptionAll::All)
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user