mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 03:47:02 +02:00
reintroduce anyhow
This commit is contained in:
parent
439db1aae0
commit
02277ec2cf
36 changed files with 110 additions and 154 deletions
|
@ -10,9 +10,9 @@ use tokio::sync::{mpsc, oneshot, RwLock};
|
|||
use update_actor::UpdateActorHandle;
|
||||
use uuid_resolver::UuidResolverHandle;
|
||||
|
||||
use super::error::{DumpActorError, Result};
|
||||
use super::{DumpInfo, DumpMsg, DumpStatus, DumpTask};
|
||||
use crate::index_controller::{update_actor, uuid_resolver};
|
||||
use super::error::{DumpActorError, Result};
|
||||
|
||||
pub const CONCURRENT_DUMP_MSG: usize = 10;
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
use meilisearch_error::{Code, ErrorCode};
|
||||
|
||||
use crate::index_controller::{update_actor::error::UpdateActorError, uuid_resolver::UuidResolverError};
|
||||
use crate::index_controller::{
|
||||
update_actor::error::UpdateActorError, uuid_resolver::UuidResolverError,
|
||||
};
|
||||
|
||||
pub type Result<T> = std::result::Result<T, DumpActorError>;
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@ use std::path::Path;
|
|||
use actix_web::web::Bytes;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
|
||||
use super::{DumpActor, DumpActorHandle, DumpInfo, DumpMsg};
|
||||
use super::error::Result;
|
||||
use super::{DumpActor, DumpActorHandle, DumpInfo, DumpMsg};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DumpActorHandleImpl {
|
||||
|
@ -35,7 +35,7 @@ impl DumpActorHandleImpl {
|
|||
update: crate::index_controller::update_actor::UpdateActorHandleImpl<Bytes>,
|
||||
index_db_size: usize,
|
||||
update_db_size: usize,
|
||||
) -> std::result::Result<Self, Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<Self> {
|
||||
let (sender, receiver) = mpsc::channel(10);
|
||||
let actor = DumpActor::new(
|
||||
receiver,
|
||||
|
|
|
@ -31,7 +31,7 @@ impl MetadataV1 {
|
|||
dst: impl AsRef<Path>,
|
||||
size: usize,
|
||||
indexer_options: &IndexerOpts,
|
||||
) -> std::result::Result<(), Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<()> {
|
||||
info!(
|
||||
"Loading dump, dump database version: {}, dump version: V1",
|
||||
self.db_version
|
||||
|
@ -83,7 +83,7 @@ fn load_index(
|
|||
primary_key: Option<&str>,
|
||||
size: usize,
|
||||
indexer_options: &IndexerOpts,
|
||||
) -> std::result::Result<(), Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<()> {
|
||||
let index_path = dst.as_ref().join(&format!("indexes/index-{}", uuid));
|
||||
|
||||
create_dir_all(&index_path)?;
|
||||
|
@ -172,7 +172,7 @@ impl From<Settings> for index_controller::Settings<Unchecked> {
|
|||
}
|
||||
|
||||
/// Extract Settings from `settings.json` file present at provided `dir_path`
|
||||
fn import_settings(dir_path: impl AsRef<Path>) -> std::result::Result<Settings, Box<dyn std::error::Error>> {
|
||||
fn import_settings(dir_path: impl AsRef<Path>) -> anyhow::Result<Settings> {
|
||||
let path = dir_path.as_ref().join("settings.json");
|
||||
let file = File::open(path)?;
|
||||
let reader = std::io::BufReader::new(file);
|
||||
|
|
|
@ -34,7 +34,7 @@ impl MetadataV2 {
|
|||
index_db_size: usize,
|
||||
update_db_size: usize,
|
||||
indexing_options: &IndexerOpts,
|
||||
) -> std::result::Result<(), Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<()> {
|
||||
info!(
|
||||
"Loading dump from {}, dump database version: {}, dump version: V2",
|
||||
self.dump_date, self.db_version
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use tokio::sync::oneshot;
|
||||
|
||||
use super::DumpInfo;
|
||||
use super::error::Result;
|
||||
use super::DumpInfo;
|
||||
|
||||
pub enum DumpMsg {
|
||||
CreateDump {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::fs::File;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Context;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{info, warn};
|
||||
#[cfg(test)]
|
||||
|
@ -21,10 +22,10 @@ use crate::{helpers::compression, option::IndexerOpts};
|
|||
use error::Result;
|
||||
|
||||
mod actor;
|
||||
pub mod error;
|
||||
mod handle_impl;
|
||||
mod loaders;
|
||||
mod message;
|
||||
pub mod error;
|
||||
|
||||
const META_FILE_NAME: &str = "metadata.json";
|
||||
|
||||
|
@ -107,7 +108,7 @@ pub fn load_dump(
|
|||
index_db_size: usize,
|
||||
update_db_size: usize,
|
||||
indexer_opts: &IndexerOpts,
|
||||
) -> std::result::Result<(), Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<()> {
|
||||
let tmp_src = tempfile::tempdir_in(".")?;
|
||||
let tmp_src_path = tmp_src.path();
|
||||
|
||||
|
@ -120,9 +121,7 @@ pub fn load_dump(
|
|||
let dst_dir = dst_path
|
||||
.as_ref()
|
||||
.parent()
|
||||
// TODO
|
||||
//.with_context(|| format!("Invalid db path: {}", dst_path.as_ref().display()))?;
|
||||
.unwrap();
|
||||
.with_context(|| format!("Invalid db path: {}", dst_path.as_ref().display()))?;
|
||||
|
||||
let tmp_dst = tempfile::tempdir_in(dst_dir)?;
|
||||
|
||||
|
@ -188,7 +187,7 @@ where
|
|||
let dump_path = tokio::task::spawn_blocking(move || -> Result<PathBuf> {
|
||||
let temp_dump_file = tempfile::NamedTempFile::new_in(&self.path)?;
|
||||
compression::to_tar_gz(temp_dump_path, temp_dump_file.path())
|
||||
.map_err(|e| DumpActorError::Internal(e))?;
|
||||
.map_err(|e| DumpActorError::Internal(e.into()))?;
|
||||
|
||||
let dump_path = self.path.join(self.uid).with_extension("dump");
|
||||
temp_dump_file.persist(&dump_path)?;
|
||||
|
|
|
@ -19,8 +19,8 @@ use crate::index_controller::{
|
|||
};
|
||||
use crate::option::IndexerOpts;
|
||||
|
||||
use super::error::{IndexActorError, Result};
|
||||
use super::{IndexMeta, IndexMsg, IndexSettings, IndexStore};
|
||||
use super::error::{Result, IndexActorError};
|
||||
|
||||
pub const CONCURRENT_INDEX_MSG: usize = 10;
|
||||
|
||||
|
@ -31,7 +31,7 @@ pub struct IndexActor<S> {
|
|||
}
|
||||
|
||||
impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
||||
pub fn new(receiver: mpsc::Receiver<IndexMsg>, store: S) -> std::result::Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn new(receiver: mpsc::Receiver<IndexMsg>, store: S) -> anyhow::Result<Self> {
|
||||
let options = IndexerOpts::default();
|
||||
let update_handler = UpdateHandler::new(&options)?;
|
||||
let update_handler = Arc::new(update_handler);
|
||||
|
@ -146,7 +146,6 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
|||
.ok_or(IndexActorError::UnexistingIndex)?;
|
||||
let result = spawn_blocking(move || index.perform_search(query)).await??;
|
||||
Ok(result)
|
||||
|
||||
}
|
||||
|
||||
async fn handle_create_index(
|
||||
|
@ -269,7 +268,8 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
|||
}
|
||||
let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index);
|
||||
builder.set_primary_key(primary_key);
|
||||
builder.execute(|_, _| ())
|
||||
builder
|
||||
.execute(|_, _| ())
|
||||
.map_err(|e| IndexActorError::Internal(Box::new(e)))?;
|
||||
let meta = IndexMeta::new_txn(&index, &txn)?;
|
||||
txn.commit()?;
|
||||
|
@ -340,10 +340,12 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
|||
|
||||
Ok(IndexStats {
|
||||
size: index.size(),
|
||||
number_of_documents: index.number_of_documents(&rtxn)
|
||||
number_of_documents: index
|
||||
.number_of_documents(&rtxn)
|
||||
.map_err(|e| IndexActorError::Internal(Box::new(e)))?,
|
||||
is_indexing: None,
|
||||
fields_distribution: index.fields_distribution(&rtxn)
|
||||
fields_distribution: index
|
||||
.fields_distribution(&rtxn)
|
||||
.map_err(|e| IndexActorError::Internal(e.into()))?,
|
||||
})
|
||||
})
|
||||
|
|
|
@ -30,11 +30,7 @@ macro_rules! internal_error {
|
|||
}
|
||||
}
|
||||
|
||||
internal_error!(
|
||||
heed::Error,
|
||||
tokio::task::JoinError,
|
||||
std::io::Error
|
||||
);
|
||||
internal_error!(heed::Error, tokio::task::JoinError, std::io::Error);
|
||||
|
||||
impl ErrorCode for IndexActorError {
|
||||
fn error_code(&self) -> Code {
|
||||
|
|
|
@ -12,8 +12,8 @@ use crate::{
|
|||
index_controller::{Failed, Processed},
|
||||
};
|
||||
|
||||
use super::{IndexActor, IndexActorHandle, IndexMeta, IndexMsg, MapIndexStore};
|
||||
use super::error::Result;
|
||||
use super::{IndexActor, IndexActorHandle, IndexMeta, IndexMsg, MapIndexStore};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct IndexActorHandleImpl {
|
||||
|
@ -22,11 +22,7 @@ pub struct IndexActorHandleImpl {
|
|||
|
||||
#[async_trait::async_trait]
|
||||
impl IndexActorHandle for IndexActorHandleImpl {
|
||||
async fn create_index(
|
||||
&self,
|
||||
uuid: Uuid,
|
||||
primary_key: Option<String>,
|
||||
) -> Result<IndexMeta> {
|
||||
async fn create_index(&self, uuid: Uuid, primary_key: Option<String>) -> Result<IndexMeta> {
|
||||
let (ret, receiver) = oneshot::channel();
|
||||
let msg = IndexMsg::CreateIndex {
|
||||
ret,
|
||||
|
@ -118,11 +114,7 @@ impl IndexActorHandle for IndexActorHandleImpl {
|
|||
Ok(receiver.await.expect("IndexActor has been killed")?)
|
||||
}
|
||||
|
||||
async fn update_index(
|
||||
&self,
|
||||
uuid: Uuid,
|
||||
index_settings: IndexSettings,
|
||||
) -> Result<IndexMeta> {
|
||||
async fn update_index(&self, uuid: Uuid, index_settings: IndexSettings) -> Result<IndexMeta> {
|
||||
let (ret, receiver) = oneshot::channel();
|
||||
let msg = IndexMsg::UpdateIndex {
|
||||
uuid,
|
||||
|
@ -156,7 +148,7 @@ impl IndexActorHandle for IndexActorHandleImpl {
|
|||
}
|
||||
|
||||
impl IndexActorHandleImpl {
|
||||
pub fn new(path: impl AsRef<Path>, index_size: usize) -> std::result::Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn new(path: impl AsRef<Path>, index_size: usize) -> anyhow::Result<Self> {
|
||||
let (sender, receiver) = mpsc::channel(100);
|
||||
|
||||
let store = MapIndexStore::new(path, index_size);
|
||||
|
|
|
@ -3,9 +3,9 @@ use std::path::PathBuf;
|
|||
use tokio::sync::oneshot;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::error::Result as IndexResult;
|
||||
use crate::index::{Checked, Document, SearchQuery, SearchResult, Settings};
|
||||
use crate::index_controller::{Failed, IndexStats, Processed, Processing};
|
||||
use super::error::Result as IndexResult;
|
||||
|
||||
use super::{IndexMeta, IndexSettings};
|
||||
|
||||
|
|
|
@ -22,10 +22,10 @@ use self::error::IndexActorError;
|
|||
use super::IndexSettings;
|
||||
|
||||
mod actor;
|
||||
pub mod error;
|
||||
mod handle_impl;
|
||||
mod message;
|
||||
mod store;
|
||||
pub mod error;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -60,8 +60,7 @@ impl IndexMeta {
|
|||
#[async_trait::async_trait]
|
||||
#[cfg_attr(test, automock)]
|
||||
pub trait IndexActorHandle {
|
||||
async fn create_index(&self, uuid: Uuid, primary_key: Option<String>)
|
||||
-> Result<IndexMeta>;
|
||||
async fn create_index(&self, uuid: Uuid, primary_key: Option<String>) -> Result<IndexMeta>;
|
||||
async fn update(
|
||||
&self,
|
||||
uuid: Uuid,
|
||||
|
@ -86,11 +85,7 @@ pub trait IndexActorHandle {
|
|||
) -> Result<Document>;
|
||||
async fn delete(&self, uuid: Uuid) -> Result<()>;
|
||||
async fn get_index_meta(&self, uuid: Uuid) -> Result<IndexMeta>;
|
||||
async fn update_index(
|
||||
&self,
|
||||
uuid: Uuid,
|
||||
index_settings: IndexSettings,
|
||||
) -> Result<IndexMeta>;
|
||||
async fn update_index(&self, uuid: Uuid, index_settings: IndexSettings) -> Result<IndexMeta>;
|
||||
async fn snapshot(&self, uuid: Uuid, path: PathBuf) -> Result<()>;
|
||||
async fn dump(&self, uuid: Uuid, path: PathBuf) -> Result<()>;
|
||||
async fn get_index_stats(&self, uuid: Uuid) -> Result<IndexStats>;
|
||||
|
@ -105,11 +100,7 @@ mod test {
|
|||
#[async_trait::async_trait]
|
||||
/// Useful for passing around an `Arc<MockIndexActorHandle>` in tests.
|
||||
impl IndexActorHandle for Arc<MockIndexActorHandle> {
|
||||
async fn create_index(
|
||||
&self,
|
||||
uuid: Uuid,
|
||||
primary_key: Option<String>,
|
||||
) -> Result<IndexMeta> {
|
||||
async fn create_index(&self, uuid: Uuid, primary_key: Option<String>) -> Result<IndexMeta> {
|
||||
self.as_ref().create_index(uuid, primary_key).await
|
||||
}
|
||||
|
||||
|
|
|
@ -29,12 +29,12 @@ use self::dump_actor::load_dump;
|
|||
use self::error::IndexControllerError;
|
||||
|
||||
mod dump_actor;
|
||||
pub mod error;
|
||||
mod index_actor;
|
||||
mod snapshot;
|
||||
mod update_actor;
|
||||
mod updates;
|
||||
mod uuid_resolver;
|
||||
pub mod error;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -83,7 +83,7 @@ pub struct Stats {
|
|||
}
|
||||
|
||||
impl IndexController {
|
||||
pub fn new(path: impl AsRef<Path>, options: &Opt) -> std::result::Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn new(path: impl AsRef<Path>, options: &Opt) -> anyhow::Result<Self> {
|
||||
let index_size = options.max_mdb_size.get_bytes() as usize;
|
||||
let update_store_size = options.max_udb_size.get_bytes() as usize;
|
||||
|
||||
|
@ -238,10 +238,7 @@ impl IndexController {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn create_index(
|
||||
&self,
|
||||
index_settings: IndexSettings,
|
||||
) -> Result<IndexMetadata> {
|
||||
pub async fn create_index(&self, index_settings: IndexSettings) -> Result<IndexMetadata> {
|
||||
let IndexSettings { uid, primary_key } = index_settings;
|
||||
let uid = uid.ok_or(IndexControllerError::MissingUid)?;
|
||||
let uuid = Uuid::new_v4();
|
||||
|
|
|
@ -52,7 +52,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
async fn perform_snapshot(&self) -> std::result::Result<(), Box<dyn std::error::Error + Sync + Send + 'static>> {
|
||||
async fn perform_snapshot(&self) -> anyhow::Result<()> {
|
||||
info!("Performing snapshot.");
|
||||
|
||||
let snapshot_dir = self.snapshot_path.clone();
|
||||
|
@ -77,7 +77,7 @@ where
|
|||
let snapshot_path = self
|
||||
.snapshot_path
|
||||
.join(format!("{}.snapshot", self.db_name));
|
||||
let snapshot_path = spawn_blocking(move || -> Result<PathBuf, Box<dyn std::error::Error + Sync + Send + 'static>> {
|
||||
let snapshot_path = spawn_blocking(move || -> anyhow::Result<PathBuf> {
|
||||
let temp_snapshot_file = tempfile::NamedTempFile::new_in(snapshot_dir)?;
|
||||
let temp_snapshot_file_path = temp_snapshot_file.path().to_owned();
|
||||
compression::to_tar_gz(temp_snapshot_path, temp_snapshot_file_path)?;
|
||||
|
@ -97,7 +97,7 @@ pub fn load_snapshot(
|
|||
snapshot_path: impl AsRef<Path>,
|
||||
ignore_snapshot_if_db_exists: bool,
|
||||
ignore_missing_snapshot: bool,
|
||||
) -> std::result::Result<(), Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<()> {
|
||||
if !db_path.as_ref().exists() && snapshot_path.as_ref().exists() {
|
||||
match compression::from_tar_gz(snapshot_path, &db_path) {
|
||||
Ok(()) => Ok(()),
|
||||
|
|
|
@ -13,8 +13,8 @@ use tokio::io::AsyncWriteExt;
|
|||
use tokio::sync::mpsc;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::{PayloadData, UpdateMsg, UpdateStore, UpdateStoreInfo};
|
||||
use super::error::{Result, UpdateActorError};
|
||||
use super::{PayloadData, UpdateMsg, UpdateStore, UpdateStoreInfo};
|
||||
use crate::index_controller::index_actor::IndexActorHandle;
|
||||
use crate::index_controller::{UpdateMeta, UpdateStatus};
|
||||
|
||||
|
@ -36,7 +36,7 @@ where
|
|||
inbox: mpsc::Receiver<UpdateMsg<D>>,
|
||||
path: impl AsRef<Path>,
|
||||
index_handle: I,
|
||||
) -> std::result::Result<Self, Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<Self> {
|
||||
let path = path.as_ref().join("updates");
|
||||
|
||||
std::fs::create_dir_all(&path)?;
|
||||
|
@ -201,9 +201,9 @@ where
|
|||
async fn handle_get_update(&self, uuid: Uuid, id: u64) -> Result<UpdateStatus> {
|
||||
let store = self.store.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let result = store
|
||||
.meta(uuid, id)?
|
||||
.ok_or(UpdateActorError::UnexistingUpdate(id))?;
|
||||
let result = store
|
||||
.meta(uuid, id)?
|
||||
.ok_or(UpdateActorError::UnexistingUpdate(id))?;
|
||||
Ok(result)
|
||||
})
|
||||
.await?
|
||||
|
|
|
@ -45,6 +45,7 @@ impl From<tokio::sync::oneshot::error::RecvError> for UpdateActorError {
|
|||
}
|
||||
|
||||
internal_error!(
|
||||
UpdateActorError:
|
||||
heed::Error,
|
||||
std::io::Error,
|
||||
serde_json::Error,
|
||||
|
|
|
@ -22,7 +22,7 @@ where
|
|||
index_handle: I,
|
||||
path: impl AsRef<Path>,
|
||||
update_store_size: usize,
|
||||
) -> std::result::Result<Self, Box<dyn std::error::Error>>
|
||||
) -> anyhow::Result<Self>
|
||||
where
|
||||
I: IndexActorHandle + Clone + Send + Sync + 'static,
|
||||
{
|
||||
|
|
|
@ -4,8 +4,8 @@ use std::path::PathBuf;
|
|||
use tokio::sync::{mpsc, oneshot};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::{PayloadData, UpdateMeta, UpdateStatus, UpdateStoreInfo};
|
||||
use super::error::Result;
|
||||
use super::{PayloadData, UpdateMeta, UpdateStatus, UpdateStoreInfo};
|
||||
|
||||
pub enum UpdateMsg<D> {
|
||||
Update {
|
||||
|
|
|
@ -7,16 +7,16 @@ use uuid::Uuid;
|
|||
use crate::index_controller::{UpdateMeta, UpdateStatus};
|
||||
|
||||
use actor::UpdateActor;
|
||||
use message::UpdateMsg;
|
||||
use error::Result;
|
||||
use message::UpdateMsg;
|
||||
|
||||
pub use handle_impl::UpdateActorHandleImpl;
|
||||
pub use store::{UpdateStore, UpdateStoreInfo};
|
||||
|
||||
mod actor;
|
||||
pub mod error;
|
||||
mod handle_impl;
|
||||
mod message;
|
||||
pub mod error;
|
||||
pub mod store;
|
||||
|
||||
type PayloadData<D> = std::result::Result<D, PayloadError>;
|
||||
|
|
|
@ -9,7 +9,7 @@ use heed::{EnvOpenOptions, RoTxn};
|
|||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::{State, UpdateStore, Result};
|
||||
use super::{Result, State, UpdateStore};
|
||||
use crate::index_controller::{
|
||||
index_actor::IndexActorHandle, update_actor::store::update_uuid_to_file_path, Enqueued,
|
||||
UpdateStatus,
|
||||
|
@ -125,7 +125,7 @@ impl UpdateStore {
|
|||
src: impl AsRef<Path>,
|
||||
dst: impl AsRef<Path>,
|
||||
db_size: usize,
|
||||
) -> std::result::Result<(), Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<()> {
|
||||
let dst_update_path = dst.as_ref().join("updates/");
|
||||
create_dir_all(&dst_update_path)?;
|
||||
|
||||
|
|
|
@ -23,10 +23,10 @@ use uuid::Uuid;
|
|||
|
||||
use codec::*;
|
||||
|
||||
use super::UpdateMeta;
|
||||
use super::error::Result;
|
||||
use crate::index_controller::{index_actor::CONCURRENT_INDEX_MSG, updates::*, IndexActorHandle};
|
||||
use super::UpdateMeta;
|
||||
use crate::helpers::EnvSizer;
|
||||
use crate::index_controller::{index_actor::CONCURRENT_INDEX_MSG, updates::*, IndexActorHandle};
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
type BEU64 = U64<heed::byteorder::BE>;
|
||||
|
@ -110,7 +110,7 @@ impl UpdateStore {
|
|||
fn new(
|
||||
mut options: EnvOpenOptions,
|
||||
path: impl AsRef<Path>,
|
||||
) -> std::result::Result<(Self, mpsc::Receiver<()>), Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<(Self, mpsc::Receiver<()>)> {
|
||||
options.max_dbs(5);
|
||||
|
||||
let env = options.open(&path)?;
|
||||
|
@ -141,7 +141,7 @@ impl UpdateStore {
|
|||
path: impl AsRef<Path>,
|
||||
index_handle: impl IndexActorHandle + Clone + Sync + Send + 'static,
|
||||
must_exit: Arc<AtomicBool>,
|
||||
) -> std::result::Result<Arc<Self>, Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<Arc<Self>> {
|
||||
let (update_store, mut notification_receiver) = Self::new(options, path)?;
|
||||
let update_store = Arc::new(update_store);
|
||||
|
||||
|
@ -270,11 +270,8 @@ impl UpdateStore {
|
|||
}
|
||||
_ => {
|
||||
let _update_id = self.next_update_id_raw(wtxn, index_uuid)?;
|
||||
self.updates.put(
|
||||
wtxn,
|
||||
&(index_uuid, update.id()),
|
||||
&update,
|
||||
)?;
|
||||
self.updates
|
||||
.put(wtxn, &(index_uuid, update.id()), &update)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -283,10 +280,7 @@ impl UpdateStore {
|
|||
/// Executes the user provided function on the next pending update (the one with the lowest id).
|
||||
/// This is asynchronous as it let the user process the update with a read-only txn and
|
||||
/// only writing the result meta to the processed-meta store *after* it has been processed.
|
||||
fn process_pending_update(
|
||||
&self,
|
||||
index_handle: impl IndexActorHandle,
|
||||
) -> Result<Option<()>> {
|
||||
fn process_pending_update(&self, index_handle: impl IndexActorHandle) -> Result<Option<()>> {
|
||||
// Create a read transaction to be able to retrieve the pending update in order.
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let first_meta = self.pending_queue.first(&rtxn)?;
|
||||
|
@ -353,11 +347,8 @@ impl UpdateStore {
|
|||
Err(res) => res.into(),
|
||||
};
|
||||
|
||||
self.updates.put(
|
||||
&mut wtxn,
|
||||
&(index_uuid, update_id),
|
||||
&result,
|
||||
)?;
|
||||
self.updates
|
||||
.put(&mut wtxn, &(index_uuid, update_id), &result)?;
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
|
@ -704,18 +695,10 @@ mod test {
|
|||
let txn = store.env.read_txn().unwrap();
|
||||
|
||||
assert!(store.pending_queue.first(&txn).unwrap().is_none());
|
||||
let update = store
|
||||
.updates
|
||||
.get(&txn, &(uuid, 0))
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let update = store.updates.get(&txn, &(uuid, 0)).unwrap().unwrap();
|
||||
|
||||
assert!(matches!(update, UpdateStatus::Processed(_)));
|
||||
let update = store
|
||||
.updates
|
||||
.get(&txn, &(uuid, 1))
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let update = store.updates.get(&txn, &(uuid, 1)).unwrap().unwrap();
|
||||
|
||||
assert!(matches!(update, UpdateStatus::Failed(_)));
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ pub enum UpdateMeta {
|
|||
},
|
||||
ClearDocuments,
|
||||
DeleteDocuments {
|
||||
ids: Vec<String>
|
||||
ids: Vec<String>,
|
||||
},
|
||||
Settings(Settings<Unchecked>),
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue