fix tests

This commit is contained in:
many 2020-10-20 16:37:12 +02:00
parent 10dace305d
commit a0eafea200
No known key found for this signature in database
GPG Key ID: 2CEF23B75189EACA
2 changed files with 9 additions and 9 deletions

View File

@ -97,15 +97,15 @@ pub struct Opt {
/// Defines the path of the snapshot file to import. /// Defines the path of the snapshot file to import.
/// This option will, by default, stop the process if a database already exist or if no snapshot exists at /// This option will, by default, stop the process if a database already exist or if no snapshot exists at
/// the given path. If this option is not specified no snapshot is imported. /// the given path. If this option is not specified no snapshot is imported.
#[structopt(long, env = "MEILI_IMPORT_SNAPSHOT")] #[structopt(long)]
pub import_snapshot: Option<PathBuf>, pub import_snapshot: Option<PathBuf>,
/// The engine will ignore a missing snapshot and not return an error in such case. /// The engine will ignore a missing snapshot and not return an error in such case.
#[structopt(long, requires = "import-snapshot", env = "MEILI_IGNORE_MISSING_SNAPSHOT")] #[structopt(long, requires = "import-snapshot")]
pub ignore_missing_snapshot: bool, pub ignore_missing_snapshot: bool,
/// The engine will skip snapshot importation and not return an error in such case. /// The engine will skip snapshot importation and not return an error in such case.
#[structopt(long, requires = "import-snapshot", env = "MEILI_IGNORE_SNAPSHOT_IF_DB_EXISTS")] #[structopt(long, requires = "import-snapshot")]
pub ignore_snapshot_if_db_exists: bool, pub ignore_snapshot_if_db_exists: bool,
/// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap. /// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap.
@ -125,7 +125,7 @@ pub struct Opt {
pub dumps_dir: PathBuf, pub dumps_dir: PathBuf,
/// Import a dump from the specified path, must be a `.tar.gz` file. /// Import a dump from the specified path, must be a `.tar.gz` file.
#[structopt(long, env = "MEILI_IMPORT_DUMP", conflicts_with = "load-from-snapshot")] #[structopt(long, conflicts_with = "import-snapshot")]
pub import_dump: Option<PathBuf>, pub import_dump: Option<PathBuf>,
/// The batch size used in the importation process, the bigger it is the faster the dump is created. /// The batch size used in the importation process, the bigger it is the faster the dump is created.

View File

@ -201,7 +201,7 @@ async fn dump_metadata_should_be_valid() {
let tmp_dir = TempDir::new().unwrap(); let tmp_dir = TempDir::new().unwrap();
let tmp_dir_path = tmp_dir.path(); let tmp_dir_path = tmp_dir.path();
compression::from_tar_gz(&dumps_dir.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap(); compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
let file = File::open(tmp_dir_path.join("metadata.json")).unwrap(); let file = File::open(tmp_dir_path.join("metadata.json")).unwrap();
let mut metadata: serde_json::Value = serde_json::from_reader(file).unwrap(); let mut metadata: serde_json::Value = serde_json::from_reader(file).unwrap();
@ -240,7 +240,7 @@ async fn dump_gzip_should_have_been_created() {
let dump_uid = trigger_and_wait_dump(&mut server).await; let dump_uid = trigger_and_wait_dump(&mut server).await;
let dumps_dir = Path::new(&server.data().dumps_dir); let dumps_dir = Path::new(&server.data().dumps_dir);
let compressed_path = dumps_dir.join(format!("{}.tar.gz", dump_uid)); let compressed_path = dumps_dir.join(format!("{}.dump", dump_uid));
assert!(File::open(compressed_path).is_ok()); assert!(File::open(compressed_path).is_ok());
} }
@ -316,7 +316,7 @@ async fn dump_index_settings_should_be_valid() {
let tmp_dir = TempDir::new().unwrap(); let tmp_dir = TempDir::new().unwrap();
let tmp_dir_path = tmp_dir.path(); let tmp_dir_path = tmp_dir.path();
compression::from_tar_gz(&dumps_dir.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap(); compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
let file = File::open(tmp_dir_path.join("test").join("settings.json")).unwrap(); let file = File::open(tmp_dir_path.join("test").join("settings.json")).unwrap();
let settings: serde_json::Value = serde_json::from_reader(file).unwrap(); let settings: serde_json::Value = serde_json::from_reader(file).unwrap();
@ -340,7 +340,7 @@ async fn dump_index_documents_should_be_valid() {
let tmp_dir = TempDir::new().unwrap(); let tmp_dir = TempDir::new().unwrap();
let tmp_dir_path = tmp_dir.path(); let tmp_dir_path = tmp_dir.path();
compression::from_tar_gz(&dumps_dir.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap(); compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
let file = File::open(tmp_dir_path.join("test").join("documents.jsonl")).unwrap(); let file = File::open(tmp_dir_path.join("test").join("documents.jsonl")).unwrap();
let documents = read_all_jsonline(file); let documents = read_all_jsonline(file);
@ -364,7 +364,7 @@ async fn dump_index_updates_should_be_valid() {
let tmp_dir = TempDir::new().unwrap(); let tmp_dir = TempDir::new().unwrap();
let tmp_dir_path = tmp_dir.path(); let tmp_dir_path = tmp_dir.path();
compression::from_tar_gz(&dumps_dir.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap(); compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
let file = File::open(tmp_dir_path.join("test").join("updates.jsonl")).unwrap(); let file = File::open(tmp_dir_path.join("test").join("updates.jsonl")).unwrap();
let mut updates = read_all_jsonline(file); let mut updates = read_all_jsonline(file);