MeiliSearch/meilisearch-lib/src/index_controller/dump_actor/mod.rs

387 lines
11 KiB
Rust
Raw Normal View History

2021-05-27 14:30:20 +02:00
use std::fs::File;
use std::path::{Path, PathBuf};
2021-09-24 11:53:11 +02:00
use std::sync::Arc;
2021-05-10 20:25:09 +02:00
2021-05-30 15:55:17 +02:00
use chrono::{DateTime, Utc};
2021-06-23 10:41:55 +02:00
use log::{info, trace, warn};
2021-05-10 20:23:12 +02:00
use serde::{Deserialize, Serialize};
2021-05-31 16:40:59 +02:00
use tokio::fs::create_dir_all;
2021-04-28 16:43:49 +02:00
2021-05-26 20:42:09 +02:00
use loaders::v1::MetadataV1;
2021-05-10 20:25:09 +02:00
pub use actor::DumpActor;
pub use handle_impl::*;
2021-05-10 20:25:09 +02:00
pub use message::DumpMsg;
2021-10-05 13:53:22 +02:00
use super::index_resolver::index_store::IndexStore;
use super::index_resolver::uuid_store::UuidStore;
use super::index_resolver::IndexResolver;
2021-09-22 11:52:29 +02:00
use super::updates::UpdateSender;
use crate::analytics;
2021-09-29 12:02:27 +02:00
use crate::compression::{from_tar_gz, to_tar_gz};
use crate::index_controller::dump_actor::error::DumpActorError;
2021-09-29 15:41:25 +02:00
use crate::index_controller::dump_actor::loaders::{v2, v3};
2021-09-22 11:52:29 +02:00
use crate::index_controller::updates::UpdateMsg;
use crate::options::IndexerOpts;
use error::Result;
2021-05-26 22:52:06 +02:00
mod actor;
2021-06-15 17:39:07 +02:00
pub mod error;
2021-05-26 22:52:06 +02:00
mod handle_impl;
mod loaders;
mod message;
2021-05-31 16:03:39 +02:00
const META_FILE_NAME: &str = "metadata.json";
2021-05-27 14:30:20 +02:00
2021-09-29 15:24:59 +02:00
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
db_version: String,
index_db_size: usize,
update_db_size: usize,
dump_date: DateTime<Utc>,
}
impl Metadata {
pub fn new(index_db_size: usize, update_db_size: usize) -> Self {
Self {
db_version: env!("CARGO_PKG_VERSION").to_string(),
index_db_size,
update_db_size,
dump_date: Utc::now(),
}
}
}
2021-05-10 20:25:09 +02:00
#[async_trait::async_trait]
2021-10-06 13:01:02 +02:00
#[cfg_attr(test, mockall::automock)]
2021-05-10 20:25:09 +02:00
pub trait DumpActorHandle {
/// Start the creation of a dump
/// Implementation: [handle_impl::DumpActorHandleImpl::create_dump]
async fn create_dump(&self) -> Result<DumpInfo>;
2021-05-10 20:25:09 +02:00
/// Return the status of an already created dump
/// Implementation: [handle_impl::DumpActorHandleImpl::dump_info]
async fn dump_info(&self, uid: String) -> Result<DumpInfo>;
2021-05-10 20:25:09 +02:00
}
2021-04-28 16:43:49 +02:00
#[derive(Debug, Serialize, Deserialize)]
2021-05-31 10:42:31 +02:00
#[serde(tag = "dumpVersion")]
2021-09-29 15:24:59 +02:00
pub enum MetadataVersion {
2021-05-31 10:42:31 +02:00
V1(MetadataV1),
2021-09-29 15:24:59 +02:00
V2(Metadata),
V3(Metadata),
2021-04-28 16:43:49 +02:00
}
2021-09-29 15:24:59 +02:00
impl MetadataVersion {
pub fn new_v3(index_db_size: usize, update_db_size: usize) -> Self {
let meta = Metadata::new(index_db_size, update_db_size);
Self::V3(meta)
2021-05-27 10:51:19 +02:00
}
2021-09-29 15:41:25 +02:00
pub fn db_version(&self) -> &str {
match self {
Self::V1(meta) => &meta.db_version,
Self::V2(meta) | Self::V3(meta) => &meta.db_version,
}
}
pub fn version(&self) -> &str {
match self {
MetadataVersion::V1(_) => "V1",
MetadataVersion::V2(_) => "V2",
MetadataVersion::V3(_) => "V3",
}
}
pub fn dump_date(&self) -> Option<&DateTime<Utc>> {
match self {
MetadataVersion::V1(_) => None,
MetadataVersion::V2(meta) | MetadataVersion::V3(meta) => Some(&meta.dump_date),
}
}
2021-04-28 16:43:49 +02:00
}
2021-05-10 20:25:09 +02:00
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum DumpStatus {
Done,
InProgress,
Failed,
2021-04-28 16:43:49 +02:00
}
2021-05-10 20:25:09 +02:00
#[derive(Debug, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DumpInfo {
pub uid: String,
pub status: DumpStatus,
2021-05-25 10:48:57 +02:00
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
2021-05-30 15:55:17 +02:00
started_at: DateTime<Utc>,
#[serde(skip_serializing_if = "Option::is_none")]
finished_at: Option<DateTime<Utc>>,
2021-05-10 20:25:09 +02:00
}
2021-04-28 16:43:49 +02:00
2021-05-10 20:25:09 +02:00
impl DumpInfo {
pub fn new(uid: String, status: DumpStatus) -> Self {
Self {
uid,
status,
error: None,
2021-05-30 15:55:17 +02:00
started_at: Utc::now(),
finished_at: None,
2021-05-10 20:25:09 +02:00
}
2021-05-05 14:11:56 +02:00
}
2021-04-28 16:43:49 +02:00
2021-05-10 20:25:09 +02:00
pub fn with_error(&mut self, error: String) {
self.status = DumpStatus::Failed;
2021-05-30 15:55:17 +02:00
self.finished_at = Some(Utc::now());
self.error = Some(error);
2021-05-05 14:11:56 +02:00
}
2021-04-28 16:43:49 +02:00
2021-05-10 20:25:09 +02:00
pub fn done(&mut self) {
2021-05-30 15:55:17 +02:00
self.finished_at = Some(Utc::now());
2021-05-10 20:25:09 +02:00
self.status = DumpStatus::Done;
}
2021-04-28 16:43:49 +02:00
2021-05-10 20:25:09 +02:00
pub fn dump_already_in_progress(&self) -> bool {
self.status == DumpStatus::InProgress
}
2021-04-28 16:43:49 +02:00
}
2021-05-27 14:30:20 +02:00
pub fn load_dump(
2021-05-26 20:42:09 +02:00
dst_path: impl AsRef<Path>,
src_path: impl AsRef<Path>,
2021-05-31 16:40:59 +02:00
index_db_size: usize,
update_db_size: usize,
2021-05-26 22:52:06 +02:00
indexer_opts: &IndexerOpts,
2021-06-15 17:39:07 +02:00
) -> anyhow::Result<()> {
2021-09-29 12:34:39 +02:00
// Setup a temp directory path in the same path as the database, to prevent cross devices
// references.
2021-09-29 15:41:25 +02:00
let temp_path = dst_path
.as_ref()
.parent()
.map(ToOwned::to_owned)
.unwrap_or_else(|| ".".into());
2021-09-29 12:34:39 +02:00
if cfg!(windows) {
std::env::set_var("TMP", temp_path);
} else {
std::env::set_var("TMPDIR", temp_path);
}
2021-09-28 18:10:09 +02:00
let tmp_src = tempfile::tempdir()?;
2021-05-27 14:30:20 +02:00
let tmp_src_path = tmp_src.path();
2021-09-29 12:02:27 +02:00
from_tar_gz(&src_path, tmp_src_path)?;
2021-05-27 14:30:20 +02:00
let meta_path = tmp_src_path.join(META_FILE_NAME);
2021-05-26 20:42:09 +02:00
let mut meta_file = File::open(&meta_path)?;
2021-09-29 15:24:59 +02:00
let meta: MetadataVersion = serde_json::from_reader(&mut meta_file)?;
2021-04-28 16:43:49 +02:00
2021-09-28 18:10:09 +02:00
let tmp_dst = tempfile::tempdir()?;
2021-05-31 10:42:31 +02:00
2021-09-29 15:41:25 +02:00
info!(
"Loading dump {}, dump database version: {}, dump version: {}",
meta.dump_date()
.map(|t| format!("from {}", t))
.unwrap_or_else(String::new),
meta.db_version(),
meta.version()
);
2021-09-29 12:34:39 +02:00
2021-05-26 20:42:09 +02:00
match meta {
2021-09-29 15:24:59 +02:00
MetadataVersion::V1(meta) => {
2021-05-31 16:40:59 +02:00
meta.load_dump(&tmp_src_path, tmp_dst.path(), index_db_size, indexer_opts)?
2021-05-31 16:03:39 +02:00
}
2021-09-29 15:41:25 +02:00
MetadataVersion::V2(meta) => v2::load_dump(
meta,
&tmp_src_path,
tmp_dst.path(),
index_db_size,
update_db_size,
indexer_opts,
)?,
2021-09-29 15:24:59 +02:00
MetadataVersion::V3(meta) => v3::load_dump(
meta,
2021-05-27 14:30:20 +02:00
&tmp_src_path,
2021-05-31 10:42:31 +02:00
tmp_dst.path(),
2021-05-27 14:30:20 +02:00
index_db_size,
update_db_size,
indexer_opts,
)?,
}
2021-05-31 10:42:31 +02:00
// Persist and atomically rename the db
let persisted_dump = tmp_dst.into_path();
if dst_path.as_ref().exists() {
warn!("Overwriting database at {}", dst_path.as_ref().display());
std::fs::remove_dir_all(&dst_path)?;
}
std::fs::rename(&persisted_dump, &dst_path)?;
2021-05-06 18:44:16 +02:00
2021-04-28 16:43:49 +02:00
Ok(())
}
2021-05-27 14:30:20 +02:00
2021-10-05 13:53:22 +02:00
struct DumpTask<U, I> {
2021-10-26 13:02:40 +02:00
dump_path: PathBuf,
db_path: PathBuf,
2021-10-05 13:53:22 +02:00
index_resolver: Arc<IndexResolver<U, I>>,
update_sender: UpdateSender,
2021-05-27 14:30:20 +02:00
uid: String,
2021-05-31 16:40:59 +02:00
update_db_size: usize,
index_db_size: usize,
2021-05-27 14:30:20 +02:00
}
2021-10-05 13:53:22 +02:00
impl<U, I> DumpTask<U, I>
where
U: UuidStore + Sync + Send + 'static,
I: IndexStore + Sync + Send + 'static,
{
async fn run(self) -> Result<()> {
2021-06-23 10:41:55 +02:00
trace!("Performing dump.");
2021-05-27 14:30:20 +02:00
2021-10-26 13:02:40 +02:00
create_dir_all(&self.dump_path).await?;
2021-05-27 14:30:20 +02:00
2021-09-28 22:22:59 +02:00
let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??;
2021-05-27 14:30:20 +02:00
let temp_dump_path = temp_dump_dir.path().to_owned();
2021-09-29 15:24:59 +02:00
let meta = MetadataVersion::new_v3(self.index_db_size, self.update_db_size);
2021-05-27 14:30:20 +02:00
let meta_path = temp_dump_path.join(META_FILE_NAME);
let mut meta_file = File::create(&meta_path)?;
serde_json::to_writer(&mut meta_file, &meta)?;
2021-10-26 13:02:40 +02:00
analytics::copy_user_id(&self.db_path, &temp_dump_path);
2021-05-27 14:30:20 +02:00
create_dir_all(&temp_dump_path.join("indexes")).await?;
2021-09-24 11:53:11 +02:00
let uuids = self.index_resolver.dump(temp_dump_path.clone()).await?;
2021-05-27 14:30:20 +02:00
2021-10-05 13:53:22 +02:00
UpdateMsg::dump(&self.update_sender, uuids, temp_dump_path.clone()).await?;
2021-05-27 14:30:20 +02:00
let dump_path = tokio::task::spawn_blocking(move || -> Result<PathBuf> {
2021-10-26 13:02:40 +02:00
let temp_dump_file = tempfile::NamedTempFile::new_in(&self.dump_path)?;
2021-09-29 12:02:27 +02:00
to_tar_gz(temp_dump_path, temp_dump_file.path())
2021-06-15 17:39:07 +02:00
.map_err(|e| DumpActorError::Internal(e.into()))?;
2021-05-27 14:30:20 +02:00
2021-10-26 13:02:40 +02:00
let dump_path = self.dump_path.join(self.uid).with_extension("dump");
2021-05-27 14:30:20 +02:00
temp_dump_file.persist(&dump_path)?;
Ok(dump_path)
})
.await??;
info!("Created dump in {:?}.", dump_path);
Ok(())
}
}
2021-10-05 13:53:22 +02:00
#[cfg(test)]
mod test {
use std::collections::HashSet;
use futures::future::{err, ok};
use once_cell::sync::Lazy;
use uuid::Uuid;
use super::*;
2021-10-06 13:01:02 +02:00
use crate::index::error::Result as IndexResult;
2021-10-05 13:53:22 +02:00
use crate::index::test::Mocker;
use crate::index::Index;
2021-10-06 13:01:02 +02:00
use crate::index_controller::index_resolver::error::IndexResolverError;
2021-10-05 13:53:22 +02:00
use crate::index_controller::index_resolver::index_store::MockIndexStore;
use crate::index_controller::index_resolver::uuid_store::MockUuidStore;
use crate::index_controller::updates::create_update_handler;
fn setup() {
static SETUP: Lazy<()> = Lazy::new(|| {
if cfg!(windows) {
std::env::set_var("TMP", ".");
} else {
std::env::set_var("TMPDIR", ".");
}
});
// just deref to make sure the env is setup
*SETUP
}
#[actix_rt::test]
async fn test_dump_normal() {
setup();
let tmp = tempfile::tempdir().unwrap();
let uuids = std::iter::repeat_with(Uuid::new_v4)
.take(4)
.collect::<HashSet<_>>();
let mut uuid_store = MockUuidStore::new();
let uuids_cloned = uuids.clone();
uuid_store
.expect_dump()
.once()
.returning(move |_| Box::pin(ok(uuids_cloned.clone())));
let mut index_store = MockIndexStore::new();
index_store.expect_get().times(4).returning(move |uuid| {
let mocker = Mocker::default();
let uuids_clone = uuids.clone();
mocker.when::<(), Uuid>("uuid").once().then(move |_| {
assert!(uuids_clone.contains(&uuid));
uuid
});
2021-10-06 13:01:02 +02:00
mocker
.when::<&Path, IndexResult<()>>("dump")
.once()
.then(move |_| Ok(()));
2021-10-05 13:53:22 +02:00
Box::pin(ok(Some(Index::faux(mocker))))
});
let index_resolver = Arc::new(IndexResolver::new(uuid_store, index_store));
let update_sender =
create_update_handler(index_resolver.clone(), tmp.path(), 4096 * 100).unwrap();
let task = DumpTask {
2021-10-26 13:02:40 +02:00
dump_path: tmp.path().into(),
// this should do nothing
2021-10-26 13:02:40 +02:00
db_path: tmp.path().into(),
2021-10-05 13:53:22 +02:00
index_resolver,
update_sender,
uid: String::from("test"),
update_db_size: 4096 * 10,
index_db_size: 4096 * 10,
};
task.run().await.unwrap();
}
#[actix_rt::test]
async fn error_performing_dump() {
let tmp = tempfile::tempdir().unwrap();
let mut uuid_store = MockUuidStore::new();
uuid_store
.expect_dump()
.once()
.returning(move |_| Box::pin(err(IndexResolverError::ExistingPrimaryKey)));
let index_store = MockIndexStore::new();
let index_resolver = Arc::new(IndexResolver::new(uuid_store, index_store));
let update_sender =
create_update_handler(index_resolver.clone(), tmp.path(), 4096 * 100).unwrap();
let task = DumpTask {
2021-10-26 13:02:40 +02:00
dump_path: tmp.path().into(),
// this should do nothing
2021-10-26 13:02:40 +02:00
db_path: tmp.path().into(),
2021-10-05 13:53:22 +02:00
index_resolver,
update_sender,
uid: String::from("test"),
update_db_size: 4096 * 10,
index_db_size: 4096 * 10,
};
assert!(task.run().await.is_err());
}
}