From a0eafea200bc2aa7defbe2cb8ca129f273b1b9ea Mon Sep 17 00:00:00 2001 From: many Date: Tue, 20 Oct 2020 16:37:12 +0200 Subject: [PATCH] fix tests --- meilisearch-http/src/option.rs | 8 ++++---- meilisearch-http/tests/dump.rs | 10 +++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/meilisearch-http/src/option.rs b/meilisearch-http/src/option.rs index 58e88633a..e1d74fd63 100644 --- a/meilisearch-http/src/option.rs +++ b/meilisearch-http/src/option.rs @@ -97,15 +97,15 @@ pub struct Opt { /// Defines the path of the snapshot file to import. /// This option will, by default, stop the process if a database already exist or if no snapshot exists at /// the given path. If this option is not specified no snapshot is imported. - #[structopt(long, env = "MEILI_IMPORT_SNAPSHOT")] + #[structopt(long)] pub import_snapshot: Option, /// The engine will ignore a missing snapshot and not return an error in such case. - #[structopt(long, requires = "import-snapshot", env = "MEILI_IGNORE_MISSING_SNAPSHOT")] + #[structopt(long, requires = "import-snapshot")] pub ignore_missing_snapshot: bool, /// The engine will skip snapshot importation and not return an error in such case. - #[structopt(long, requires = "import-snapshot", env = "MEILI_IGNORE_SNAPSHOT_IF_DB_EXISTS")] + #[structopt(long, requires = "import-snapshot")] pub ignore_snapshot_if_db_exists: bool, /// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap. @@ -125,7 +125,7 @@ pub struct Opt { pub dumps_dir: PathBuf, /// Import a dump from the specified path, must be a `.tar.gz` file. - #[structopt(long, env = "MEILI_IMPORT_DUMP", conflicts_with = "load-from-snapshot")] + #[structopt(long, conflicts_with = "import-snapshot")] pub import_dump: Option, /// The batch size used in the importation process, the bigger it is the faster the dump is created. diff --git a/meilisearch-http/tests/dump.rs b/meilisearch-http/tests/dump.rs index 70b6b08d4..0c2a2b696 100644 --- a/meilisearch-http/tests/dump.rs +++ b/meilisearch-http/tests/dump.rs @@ -201,7 +201,7 @@ async fn dump_metadata_should_be_valid() { let tmp_dir = TempDir::new().unwrap(); let tmp_dir_path = tmp_dir.path(); - compression::from_tar_gz(&dumps_dir.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap(); + compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap(); let file = File::open(tmp_dir_path.join("metadata.json")).unwrap(); let mut metadata: serde_json::Value = serde_json::from_reader(file).unwrap(); @@ -240,7 +240,7 @@ async fn dump_gzip_should_have_been_created() { let dump_uid = trigger_and_wait_dump(&mut server).await; let dumps_dir = Path::new(&server.data().dumps_dir); - let compressed_path = dumps_dir.join(format!("{}.tar.gz", dump_uid)); + let compressed_path = dumps_dir.join(format!("{}.dump", dump_uid)); assert!(File::open(compressed_path).is_ok()); } @@ -316,7 +316,7 @@ async fn dump_index_settings_should_be_valid() { let tmp_dir = TempDir::new().unwrap(); let tmp_dir_path = tmp_dir.path(); - compression::from_tar_gz(&dumps_dir.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap(); + compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap(); let file = File::open(tmp_dir_path.join("test").join("settings.json")).unwrap(); let settings: serde_json::Value = serde_json::from_reader(file).unwrap(); @@ -340,7 +340,7 @@ async fn dump_index_documents_should_be_valid() { let tmp_dir = TempDir::new().unwrap(); let tmp_dir_path = tmp_dir.path(); - compression::from_tar_gz(&dumps_dir.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap(); + compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap(); let file = File::open(tmp_dir_path.join("test").join("documents.jsonl")).unwrap(); let documents = read_all_jsonline(file); @@ -364,7 +364,7 @@ async fn dump_index_updates_should_be_valid() { let tmp_dir = TempDir::new().unwrap(); let tmp_dir_path = tmp_dir.path(); - compression::from_tar_gz(&dumps_dir.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap(); + compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap(); let file = File::open(tmp_dir_path.join("test").join("updates.jsonl")).unwrap(); let mut updates = read_all_jsonline(file);