write and load the user-id in the dumps

This commit is contained in:
Tamo 2021-10-26 12:34:00 +02:00 committed by marin postma
parent ba14ea1243
commit 87a8bf5e96
No known key found for this signature in database
GPG Key ID: 6088B7721C3E39F9
9 changed files with 51 additions and 13 deletions

View File

@ -6,5 +6,8 @@ members = [
]
resolver = "2"
[profile.release]
debug = true
[patch.crates-io]
pest = { git = "https://github.com/pest-parser/pest.git", rev = "51fd1d49f1041f7839975664ef71fe15c7dcaf67" }

View File

@ -7,7 +7,7 @@ use platform_dirs::AppDirs;
use serde_json::Value;
use std::fmt::Display;
use std::fs;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
/// The MeiliSearch config dir:
/// `~/.config/MeiliSearch` on *NIX or *BSD.
@ -16,8 +16,8 @@ use std::path::PathBuf;
static MEILISEARCH_CONFIG_PATH: Lazy<Option<PathBuf>> =
Lazy::new(|| AppDirs::new(Some("MeiliSearch"), false).map(|appdir| appdir.config_dir));
fn config_user_id_path(opt: &Opt) -> Option<PathBuf> {
opt.db_path
fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
db_path
.canonicalize()
.ok()
.map(|path| path.join("user-id").display().to_string().replace("/", "-"))
@ -26,19 +26,18 @@ fn config_user_id_path(opt: &Opt) -> Option<PathBuf> {
}
/// Look for the user-id in the `data.ms` or in `~/.config/MeiliSearch/path-to-db-user-id`
fn find_user_id(opt: &Opt) -> Option<String> {
fs::read_to_string(opt.db_path.join("user-id"))
fn find_user_id(db_path: &Path) -> Option<String> {
fs::read_to_string(db_path.join("user-id"))
.ok()
.or_else(|| fs::read_to_string(&config_user_id_path(opt)?).ok())
.or_else(|| fs::read_to_string(&config_user_id_path(db_path)?).ok())
}
#[cfg(all(not(debug_assertions), feature = "analytics"))]
/// Write the user-id in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-user-id`. Ignore the errors.
fn write_user_id(opt: &Opt, user_id: &str) {
let _ = fs::write(opt.db_path.join("user-id"), user_id.as_bytes());
fn write_user_id(db_path: &Path, user_id: &str) {
let _ = fs::write(db_path.join("user-id"), user_id.as_bytes());
if let Some((meilisearch_config_path, user_id_path)) = MEILISEARCH_CONFIG_PATH
.as_ref()
.zip(config_user_id_path(opt))
.zip(config_user_id_path(db_path))
{
println!("{}", user_id_path.display());
let _ = fs::create_dir_all(&meilisearch_config_path);
@ -139,11 +138,11 @@ mod segment {
pub async fn new(opt: &Opt, meilisearch: &MeiliSearch) -> &'static Self {
// see if there is already a user-id in the `data.ms` or in `/tmp/path-to-db-user-id`
let user_id = super::find_user_id(opt);
let user_id = super::find_user_id(&opt.db_path);
let first_time_run = user_id.is_none();
// if not, generate a new user-id and save it to the fs
let user_id = user_id.unwrap_or_else(|| Uuid::new_v4().to_string());
super::write_user_id(opt, &user_id);
super::write_user_id(&opt.db_path, &user_id);
let client = HttpClient::default();
let user = User::UserId {
@ -569,7 +568,7 @@ pub struct MockAnalytics {
impl MockAnalytics {
pub fn new(opt: &Opt) -> &'static Self {
let user = find_user_id(opt).unwrap_or(String::new());
let user = find_user_id(&opt.db_path).unwrap_or(String::new());
let analytics = Box::new(Self { user });
Box::leak(analytics)
}

View File

@ -0,0 +1,17 @@
use std::{fs, path::Path};
/// To load a dump we get the user id from the source directory;
/// If there was a user-id, write it to the new destination if not ignore the error
pub fn load_dump(src: &Path, dst: &Path) {
if let Ok(user_id) = fs::read_to_string(src.join("user-id")) {
let _ = fs::write(dst.join("user-id"), &user_id);
}
}
/// To load a dump we get the user id either from the source directory;
/// If there was a user-id, write it to the new destination if not ignore the error
pub fn write_dump(src: &Path, dst: &Path) {
if let Ok(user_id) = fs::read_to_string(src) {
let _ = fs::write(dst, &user_id);
}
}

View File

@ -22,6 +22,7 @@ pub struct DumpActor<U, I> {
index_resolver: Arc<IndexResolver<U, I>>,
update: UpdateSender,
dump_path: PathBuf,
analytics_path: PathBuf,
lock: Arc<Mutex<()>>,
dump_infos: Arc<RwLock<HashMap<String, DumpInfo>>>,
update_db_size: usize,
@ -43,6 +44,7 @@ where
index_resolver: Arc<IndexResolver<U, I>>,
update: UpdateSender,
dump_path: impl AsRef<Path>,
analytics_path: impl AsRef<Path>,
index_db_size: usize,
update_db_size: usize,
) -> Self {
@ -53,6 +55,7 @@ where
index_resolver,
update,
dump_path: dump_path.as_ref().into(),
analytics_path: analytics_path.as_ref().into(),
dump_infos,
lock,
index_db_size,
@ -119,6 +122,7 @@ where
let task = DumpTask {
path: self.dump_path.clone(),
analytics_path: self.analytics_path.clone(),
index_resolver: self.index_resolver.clone(),
update_sender: self.update.clone(),
uid: uid.clone(),

View File

@ -33,6 +33,7 @@ impl DumpActorHandle for DumpActorHandleImpl {
impl DumpActorHandleImpl {
pub fn new(
path: impl AsRef<Path>,
analytics_path: impl AsRef<Path>,
index_resolver: Arc<HardStateIndexResolver>,
update: crate::index_controller::updates::UpdateSender,
index_db_size: usize,
@ -44,6 +45,7 @@ impl DumpActorHandleImpl {
index_resolver,
update,
path,
analytics_path,
index_db_size,
update_db_size,
);

View File

@ -2,6 +2,7 @@ use std::path::Path;
use log::info;
use crate::analytics;
use crate::index_controller::dump_actor::Metadata;
use crate::index_controller::index_resolver::IndexResolver;
use crate::index_controller::update_file_store::UpdateFileStore;
@ -24,6 +25,7 @@ pub fn load_dump(
IndexResolver::load_dump(src.as_ref(), &dst, index_db_size, indexing_options)?;
UpdateFileStore::load_dump(src.as_ref(), &dst)?;
UpdateStore::load_dump(&src, &dst, update_db_size)?;
analytics::load_dump(src.as_ref(), dst.as_ref());
info!("Loading indexes.");

View File

@ -17,6 +17,7 @@ use super::index_resolver::index_store::IndexStore;
use super::index_resolver::uuid_store::UuidStore;
use super::index_resolver::IndexResolver;
use super::updates::UpdateSender;
use crate::analytics;
use crate::compression::{from_tar_gz, to_tar_gz};
use crate::index_controller::dump_actor::error::DumpActorError;
use crate::index_controller::dump_actor::loaders::{v2, v3};
@ -223,6 +224,7 @@ pub fn load_dump(
struct DumpTask<U, I> {
path: PathBuf,
analytics_path: PathBuf,
index_resolver: Arc<IndexResolver<U, I>>,
update_sender: UpdateSender,
uid: String,
@ -247,6 +249,7 @@ where
let meta_path = temp_dump_path.join(META_FILE_NAME);
let mut meta_file = File::create(&meta_path)?;
serde_json::to_writer(&mut meta_file, &meta)?;
analytics::write_dump(&self.analytics_path, &temp_dump_path.join("user-id"));
create_dir_all(&temp_dump_path.join("indexes")).await?;
let uuids = self.index_resolver.dump(temp_dump_path.clone()).await?;
@ -339,6 +342,8 @@ mod test {
let task = DumpTask {
path: tmp.path().to_owned(),
// this should do nothing
analytics_path: tmp.path().join("user-id"),
index_resolver,
update_sender,
uid: String::from("test"),
@ -367,6 +372,8 @@ mod test {
let task = DumpTask {
path: tmp.path().to_owned(),
// this should do nothing
analytics_path: tmp.path().join("user-id"),
index_resolver,
update_sender,
uid: String::from("test"),

View File

@ -169,8 +169,10 @@ impl IndexControllerBuilder {
let dump_path = self
.dump_dst
.ok_or_else(|| anyhow::anyhow!("Missing dump directory path"))?;
let analytics_path = db_path.as_ref().join("user-id");
let dump_handle = dump_actor::DumpActorHandleImpl::new(
dump_path,
analytics_path,
index_resolver.clone(),
update_sender.clone(),
index_size,

View File

@ -5,6 +5,8 @@ pub mod options;
pub mod index;
pub mod index_controller;
mod analytics;
pub use index_controller::updates::store::Update;
pub use index_controller::MeiliSearch;