mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 20:37:15 +02:00
clippy, fmt & tests
This commit is contained in:
parent
10fc870684
commit
1c4f0b2ccf
31 changed files with 196 additions and 133 deletions
|
@ -1,13 +1,16 @@
|
|||
use std::{collections::HashMap, path::{Path, PathBuf}};
|
||||
use std::sync::Arc;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use async_stream::stream;
|
||||
use chrono::Utc;
|
||||
use futures::{lock::Mutex, stream::StreamExt};
|
||||
use log::{error, info};
|
||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||
use update_actor::UpdateActorHandle;
|
||||
use uuid_resolver::UuidResolverHandle;
|
||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||
|
||||
use super::{DumpError, DumpInfo, DumpMsg, DumpResult, DumpStatus, DumpTask};
|
||||
use crate::index_controller::{update_actor, uuid_resolver};
|
||||
|
@ -107,7 +110,10 @@ where
|
|||
}
|
||||
};
|
||||
|
||||
self.dump_infos.write().await.insert(uid.clone(), info.clone());
|
||||
self.dump_infos
|
||||
.write()
|
||||
.await
|
||||
.insert(uid.clone(), info.clone());
|
||||
|
||||
ret.send(Ok(info)).expect("Dump actor is dead");
|
||||
|
||||
|
@ -122,11 +128,8 @@ where
|
|||
|
||||
let task_result = tokio::task::spawn(task.run()).await;
|
||||
|
||||
let mut dump_infos = self.dump_infos
|
||||
.write()
|
||||
.await;
|
||||
let dump_infos =
|
||||
dump_infos
|
||||
let mut dump_infos = self.dump_infos.write().await;
|
||||
let dump_infos = dump_infos
|
||||
.get_mut(&uid)
|
||||
.expect("dump entry deleted while lock was acquired");
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::path::Path;
|
||||
use actix_web::web::Bytes;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
use super::{DumpActor, DumpActorHandle, DumpInfo, DumpMsg, DumpResult};
|
||||
use actix_web::web::Bytes;
|
||||
use std::path::Path;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DumpActorHandleImpl {
|
||||
|
@ -34,7 +34,14 @@ impl DumpActorHandleImpl {
|
|||
update_db_size: u64,
|
||||
) -> anyhow::Result<Self> {
|
||||
let (sender, receiver) = mpsc::channel(10);
|
||||
let actor = DumpActor::new(receiver, uuid_resolver, update, path, index_db_size, update_db_size);
|
||||
let actor = DumpActor::new(
|
||||
receiver,
|
||||
uuid_resolver,
|
||||
update,
|
||||
path,
|
||||
index_db_size,
|
||||
update_db_size,
|
||||
);
|
||||
|
||||
tokio::task::spawn(actor.run());
|
||||
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
use std::{collections::{BTreeMap, BTreeSet}, fs::File, io::BufRead, marker::PhantomData, path::Path, sync::Arc};
|
||||
use std::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
fs::File,
|
||||
io::BufRead,
|
||||
marker::PhantomData,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use heed::EnvOpenOptions;
|
||||
use log::{error, info, warn};
|
||||
|
|
|
@ -4,7 +4,11 @@ use chrono::{DateTime, Utc};
|
|||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{index::Index, index_controller::{update_actor::UpdateStore, uuid_resolver::HeedUuidStore}, option::IndexerOpts};
|
||||
use crate::{
|
||||
index::Index,
|
||||
index_controller::{update_actor::UpdateStore, uuid_resolver::HeedUuidStore},
|
||||
option::IndexerOpts,
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use tokio::sync::oneshot;
|
||||
|
||||
use super::{DumpResult, DumpInfo};
|
||||
|
||||
use super::{DumpInfo, DumpResult};
|
||||
|
||||
pub enum DumpMsg {
|
||||
CreateDump {
|
||||
|
@ -12,4 +11,3 @@ pub enum DumpMsg {
|
|||
ret: oneshot::Sender<DumpResult<DumpInfo>>,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
use std::fs::File;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Context;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{error, info, warn};
|
||||
#[cfg(test)]
|
||||
use mockall::automock;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
use anyhow::Context;
|
||||
|
||||
use loaders::v1::MetadataV1;
|
||||
use loaders::v2::MetadataV2;
|
||||
|
@ -25,7 +25,7 @@ mod handle_impl;
|
|||
mod loaders;
|
||||
mod message;
|
||||
|
||||
const META_FILE_NAME: &'static str = "metadata.json";
|
||||
const META_FILE_NAME: &str = "metadata.json";
|
||||
|
||||
pub type DumpResult<T> = std::result::Result<T, DumpError>;
|
||||
|
||||
|
@ -138,7 +138,9 @@ pub fn load_dump(
|
|||
let tmp_dst = tempfile::tempdir_in(dst_dir)?;
|
||||
|
||||
match meta {
|
||||
Metadata::V1(meta) => meta.load_dump(&tmp_src_path, tmp_dst.path(), index_db_size as usize)?,
|
||||
Metadata::V1(meta) => {
|
||||
meta.load_dump(&tmp_src_path, tmp_dst.path(), index_db_size as usize)?
|
||||
}
|
||||
Metadata::V2(meta) => meta.load_dump(
|
||||
&tmp_src_path,
|
||||
tmp_dst.path(),
|
||||
|
|
|
@ -6,14 +6,15 @@ use async_stream::stream;
|
|||
use futures::stream::StreamExt;
|
||||
use heed::CompactionOption;
|
||||
use log::debug;
|
||||
use tokio::{fs, sync::mpsc};
|
||||
use tokio::task::spawn_blocking;
|
||||
use tokio::{fs, sync::mpsc};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index::{Checked, Document, SearchQuery, SearchResult, Settings, update_handler::UpdateHandler};
|
||||
use crate::index::{
|
||||
update_handler::UpdateHandler, Checked, Document, SearchQuery, SearchResult, Settings,
|
||||
};
|
||||
use crate::index_controller::{
|
||||
get_arc_ownership_blocking, Failed, IndexStats, Processed,
|
||||
Processing,
|
||||
get_arc_ownership_blocking, Failed, IndexStats, Processed, Processing,
|
||||
};
|
||||
use crate::option::IndexerOpts;
|
||||
|
||||
|
|
|
@ -3,7 +3,10 @@ use std::path::{Path, PathBuf};
|
|||
use tokio::sync::{mpsc, oneshot};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{index::Checked, index_controller::{IndexSettings, IndexStats, Processing}};
|
||||
use crate::{
|
||||
index::Checked,
|
||||
index_controller::{IndexSettings, IndexStats, Processing},
|
||||
};
|
||||
use crate::{
|
||||
index::{Document, SearchQuery, SearchResult, Settings},
|
||||
index_controller::{Failed, Processed},
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::path::PathBuf;
|
|||
use tokio::sync::oneshot;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index::{Document, SearchQuery, SearchResult, Settings, Checked};
|
||||
use crate::index::{Checked, Document, SearchQuery, SearchResult, Settings};
|
||||
use crate::index_controller::{Failed, IndexStats, Processed, Processing};
|
||||
|
||||
use super::{IndexMeta, IndexResult, IndexSettings};
|
||||
|
|
|
@ -15,7 +15,7 @@ use message::IndexMsg;
|
|||
use store::{IndexStore, MapIndexStore};
|
||||
|
||||
use crate::index::{Checked, Document, Index, SearchQuery, SearchResult, Settings};
|
||||
use crate::index_controller::{Failed, Processed, Processing, IndexStats};
|
||||
use crate::index_controller::{Failed, IndexStats, Processed, Processing};
|
||||
|
||||
use super::IndexSettings;
|
||||
|
||||
|
@ -44,7 +44,11 @@ impl IndexMeta {
|
|||
let created_at = index.created_at(&txn)?;
|
||||
let updated_at = index.updated_at(&txn)?;
|
||||
let primary_key = index.primary_key(&txn)?.map(String::from);
|
||||
Ok(Self { created_at, updated_at, primary_key })
|
||||
Ok(Self {
|
||||
created_at,
|
||||
updated_at,
|
||||
primary_key,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,7 +61,7 @@ pub enum IndexError {
|
|||
#[error("Existing primary key")]
|
||||
ExistingPrimaryKey,
|
||||
#[error("Internal Index Error: {0}")]
|
||||
Internal(String)
|
||||
Internal(String),
|
||||
}
|
||||
|
||||
macro_rules! internal_error {
|
||||
|
@ -72,7 +76,12 @@ macro_rules! internal_error {
|
|||
}
|
||||
}
|
||||
|
||||
internal_error!(anyhow::Error, heed::Error, tokio::task::JoinError, std::io::Error);
|
||||
internal_error!(
|
||||
anyhow::Error,
|
||||
heed::Error,
|
||||
tokio::task::JoinError,
|
||||
std::io::Error
|
||||
);
|
||||
|
||||
#[async_trait::async_trait]
|
||||
#[cfg_attr(test, automock)]
|
||||
|
@ -190,8 +199,8 @@ mod test {
|
|||
self.as_ref().snapshot(uuid, path).await
|
||||
}
|
||||
|
||||
async fn dump(&self, uid: String, uuid: Uuid, path: PathBuf) -> IndexResult<()> {
|
||||
self.as_ref().dump(uid, uuid, path).await
|
||||
async fn dump(&self, uuid: Uuid, path: PathBuf) -> IndexResult<()> {
|
||||
self.as_ref().dump(uuid, path).await
|
||||
}
|
||||
|
||||
async fn get_index_stats(&self, uuid: Uuid) -> IndexResult<IndexStats> {
|
||||
|
|
|
@ -144,7 +144,7 @@ mod test {
|
|||
use crate::index_controller::update_actor::{
|
||||
MockUpdateActorHandle, UpdateActorHandleImpl, UpdateError,
|
||||
};
|
||||
use crate::index_controller::uuid_resolver::{MockUuidResolverHandle, UuidError};
|
||||
use crate::index_controller::uuid_resolver::{MockUuidResolverHandle, UuidResolverError};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_normal() {
|
||||
|
@ -193,7 +193,7 @@ mod test {
|
|||
.expect_snapshot()
|
||||
.times(1)
|
||||
// abitrary error
|
||||
.returning(|_| Box::pin(err(UuidError::NameAlreadyExist)));
|
||||
.returning(|_| Box::pin(err(UuidResolverError::NameAlreadyExist)));
|
||||
|
||||
let update_handle = MockUpdateActorHandle::new();
|
||||
|
||||
|
@ -248,7 +248,7 @@ mod test {
|
|||
// we expect the funtion to be called between 2 and 3 time in the given interval.
|
||||
.times(2..4)
|
||||
// abitrary error, to short-circuit the function
|
||||
.returning(move |_| Box::pin(err(UuidError::NameAlreadyExist)));
|
||||
.returning(move |_| Box::pin(err(UuidResolverError::NameAlreadyExist)));
|
||||
|
||||
let update_handle = MockUpdateActorHandle::new();
|
||||
|
||||
|
|
|
@ -11,7 +11,10 @@ use uuid::Uuid;
|
|||
|
||||
use super::UpdateStore;
|
||||
use super::{codec::UpdateKeyCodec, State};
|
||||
use crate::index_controller::{Enqueued, UpdateStatus, index_actor::IndexActorHandle, update_actor::store::update_uuid_to_file_path};
|
||||
use crate::index_controller::{
|
||||
index_actor::IndexActorHandle, update_actor::store::update_uuid_to_file_path, Enqueued,
|
||||
UpdateStatus,
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct UpdateEntry {
|
||||
|
@ -89,7 +92,7 @@ impl UpdateStore {
|
|||
};
|
||||
|
||||
serde_json::to_writer(&mut file, &update_json)?;
|
||||
file.write(b"\n")?;
|
||||
file.write_all(b"\n")?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -111,12 +114,12 @@ impl UpdateStore {
|
|||
for update in updates {
|
||||
let ((uuid, _), data) = update?;
|
||||
if uuids.contains(&uuid) {
|
||||
let update = data.decode()?.into();
|
||||
let update = data.decode()?;
|
||||
|
||||
let update_json = UpdateEntry { uuid, update };
|
||||
|
||||
serde_json::to_writer(&mut file, &update_json)?;
|
||||
file.write(b"\n")?;
|
||||
file.write_all(b"\n")?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -131,7 +134,6 @@ impl UpdateStore {
|
|||
let dst_update_path = dst.as_ref().join("updates/");
|
||||
create_dir_all(&dst_update_path)?;
|
||||
|
||||
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(db_size as usize);
|
||||
let (store, _) = UpdateStore::new(options, &dst_update_path)?;
|
||||
|
@ -152,7 +154,11 @@ impl UpdateStore {
|
|||
store.register_raw_updates(&mut wtxn, &update, uuid)?;
|
||||
|
||||
// Copy ascociated update path if it exists
|
||||
if let UpdateStatus::Enqueued(Enqueued { content: Some(uuid), .. }) = update {
|
||||
if let UpdateStatus::Enqueued(Enqueued {
|
||||
content: Some(uuid),
|
||||
..
|
||||
}) = update
|
||||
{
|
||||
let src = update_uuid_to_file_path(&src_update_path, uuid);
|
||||
let dst = update_uuid_to_file_path(&dst_update_path, uuid);
|
||||
std::fs::copy(src, dst)?;
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
pub mod dump;
|
||||
mod codec;
|
||||
pub mod dump;
|
||||
|
||||
use std::{collections::{BTreeMap, HashSet}, path::PathBuf};
|
||||
use std::fs::{copy, create_dir_all, remove_file, File};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::{
|
||||
collections::{BTreeMap, HashSet},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use arc_swap::ArcSwap;
|
||||
use futures::StreamExt;
|
||||
|
@ -20,13 +23,13 @@ use uuid::Uuid;
|
|||
use codec::*;
|
||||
|
||||
use super::UpdateMeta;
|
||||
use crate::{helpers::EnvSizer, index_controller::index_actor::IndexResult};
|
||||
use crate::index_controller::{index_actor::CONCURRENT_INDEX_MSG, updates::*, IndexActorHandle};
|
||||
use crate::{helpers::EnvSizer, index_controller::index_actor::IndexResult};
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
type BEU64 = U64<heed::byteorder::BE>;
|
||||
|
||||
const UPDATE_DIR: &'static str = "update_files";
|
||||
const UPDATE_DIR: &str = "update_files";
|
||||
|
||||
pub struct UpdateStoreInfo {
|
||||
/// Size of the update store in bytes.
|
||||
|
@ -441,11 +444,12 @@ impl UpdateStore {
|
|||
|
||||
txn.commit()?;
|
||||
|
||||
uuids_to_remove.iter()
|
||||
uuids_to_remove
|
||||
.iter()
|
||||
.map(|uuid| update_uuid_to_file_path(&self.path, *uuid))
|
||||
.for_each(|path| {
|
||||
let _ = remove_file(path);
|
||||
});
|
||||
let _ = remove_file(path);
|
||||
});
|
||||
|
||||
// We don't care about the currently processing update, since it will be removed by itself
|
||||
// once its done processing, and we can't abort a running update.
|
||||
|
@ -482,7 +486,11 @@ impl UpdateStore {
|
|||
for entry in pendings {
|
||||
let ((_, uuid, _), pending) = entry?;
|
||||
if uuids.contains(&uuid) {
|
||||
if let Enqueued { content: Some(uuid), .. } = pending.decode()? {
|
||||
if let Enqueued {
|
||||
content: Some(uuid),
|
||||
..
|
||||
} = pending.decode()?
|
||||
{
|
||||
let path = update_uuid_to_file_path(&self.path, uuid);
|
||||
copy(path, &update_files_path)?;
|
||||
}
|
||||
|
@ -507,13 +515,16 @@ impl UpdateStore {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub fn get_info(&self) -> anyhow::Result<UpdateStoreInfo> {
|
||||
let mut size = self.env.size();
|
||||
let txn = self.env.read_txn()?;
|
||||
for entry in self.pending_queue.iter(&txn)? {
|
||||
let (_, pending) = entry?;
|
||||
if let Enqueued { content: Some(uuid), .. } = pending {
|
||||
if let Enqueued {
|
||||
content: Some(uuid),
|
||||
..
|
||||
} = pending
|
||||
{
|
||||
let path = update_uuid_to_file_path(&self.path, uuid);
|
||||
size += File::open(path)?.metadata()?.len();
|
||||
}
|
||||
|
@ -528,7 +539,9 @@ impl UpdateStore {
|
|||
}
|
||||
|
||||
fn update_uuid_to_file_path(root: impl AsRef<Path>, uuid: Uuid) -> PathBuf {
|
||||
root.as_ref().join(UPDATE_DIR).join(format!("update_{}", uuid))
|
||||
root.as_ref()
|
||||
.join(UPDATE_DIR)
|
||||
.join(format!("update_{}", uuid))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -577,7 +590,7 @@ mod test {
|
|||
let store_clone = update_store.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
store_clone
|
||||
.register_update(meta, Some("here"), uuid)
|
||||
.register_update(meta, None, uuid)
|
||||
.unwrap();
|
||||
})
|
||||
.await
|
||||
|
|
|
@ -4,7 +4,7 @@ use log::{info, warn};
|
|||
use tokio::sync::mpsc;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::{Result, UuidResolverError, UuidResolveMsg, UuidStore};
|
||||
use super::{Result, UuidResolveMsg, UuidResolverError, UuidStore};
|
||||
|
||||
pub struct UuidResolverActor<S> {
|
||||
inbox: mpsc::Receiver<UuidResolveMsg>,
|
||||
|
|
|
@ -37,5 +37,5 @@ pub enum UuidResolveMsg {
|
|||
DumpRequest {
|
||||
path: PathBuf,
|
||||
ret: oneshot::Sender<Result<HashSet<Uuid>>>,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -164,7 +164,7 @@ impl HeedUuidStore {
|
|||
|
||||
let entry = DumpEntry { uuid, uid };
|
||||
serde_json::to_writer(&mut dump_file, &entry)?;
|
||||
dump_file.write(b"\n").unwrap();
|
||||
dump_file.write_all(b"\n").unwrap();
|
||||
|
||||
uuids.insert(uuid);
|
||||
}
|
||||
|
@ -192,7 +192,7 @@ impl HeedUuidStore {
|
|||
println!("importing {} {}", uid, uuid);
|
||||
db.db.put(&mut txn, &uid, uuid.as_bytes())?;
|
||||
}
|
||||
Err(e) => Err(e)?,
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
|
||||
line.clear();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue