From 0714551101b7b635e406697f51220fcdec757be9 Mon Sep 17 00:00:00 2001 From: releasemops Date: Thu, 21 Apr 2022 13:23:46 +0000 Subject: [PATCH 001/185] bump meilisearch to v8000.1.0 --- Cargo.lock | 8 ++++---- meilisearch-auth/Cargo.toml | 2 +- meilisearch-error/Cargo.toml | 2 +- meilisearch-http/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68bb172aa..819c1d19e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1936,7 +1936,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "meilisearch-auth" -version = "0.26.0" +version = "8000.1.0" dependencies = [ "enum-iterator", "meilisearch-error", @@ -1951,7 +1951,7 @@ dependencies = [ [[package]] name = "meilisearch-error" -version = "0.26.0" +version = "8000.1.0" dependencies = [ "actix-web", "proptest", @@ -1962,7 +1962,7 @@ dependencies = [ [[package]] name = "meilisearch-http" -version = "0.26.0" +version = "8000.1.0" dependencies = [ "actix-cors", "actix-rt", @@ -2034,7 +2034,7 @@ dependencies = [ [[package]] name = "meilisearch-lib" -version = "0.26.0" +version = "8000.1.0" dependencies = [ "actix-rt", "actix-web", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 0d0d2a0f2..3c6da1539 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-auth" -version = "0.26.0" +version = "8000.1.0" edition = "2021" [dependencies] diff --git a/meilisearch-error/Cargo.toml b/meilisearch-error/Cargo.toml index ac1a4bddd..5974f03a6 100644 --- a/meilisearch-error/Cargo.toml +++ b/meilisearch-error/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-error" -version = "0.26.0" +version = "8000.1.0" authors = ["marin "] edition = "2021" diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 546e414ce..96b8334ea 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -4,7 +4,7 @@ description = "Meilisearch HTTP server" edition = "2021" license = "MIT" name = "meilisearch-http" -version = "0.26.0" +version = "8000.1.0" [[bin]] name = "meilisearch" diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 37c6af488..45a83ae13 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-lib" -version = "0.26.0" +version = "8000.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From acf3357cf306d9917886d42ae8cf4c591278600e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Thu, 21 Apr 2022 16:24:27 +0200 Subject: [PATCH 002/185] Revert "[TEST PURPOSE] Bump meilisearch to version 8000.1.0" --- Cargo.lock | 8 ++++---- meilisearch-auth/Cargo.toml | 2 +- meilisearch-error/Cargo.toml | 2 +- meilisearch-http/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 819c1d19e..68bb172aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1936,7 +1936,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "meilisearch-auth" -version = "8000.1.0" +version = "0.26.0" dependencies = [ "enum-iterator", "meilisearch-error", @@ -1951,7 +1951,7 @@ dependencies = [ [[package]] name = "meilisearch-error" -version = "8000.1.0" +version = "0.26.0" dependencies = [ "actix-web", "proptest", @@ -1962,7 +1962,7 @@ dependencies = [ [[package]] name = "meilisearch-http" -version = "8000.1.0" +version = "0.26.0" dependencies = [ "actix-cors", "actix-rt", @@ -2034,7 +2034,7 @@ dependencies = [ [[package]] name = "meilisearch-lib" -version = "8000.1.0" +version = "0.26.0" dependencies = [ "actix-rt", "actix-web", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 3c6da1539..0d0d2a0f2 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-auth" -version = "8000.1.0" +version = "0.26.0" edition = "2021" [dependencies] diff --git a/meilisearch-error/Cargo.toml b/meilisearch-error/Cargo.toml index 5974f03a6..ac1a4bddd 100644 --- a/meilisearch-error/Cargo.toml +++ b/meilisearch-error/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-error" -version = "8000.1.0" +version = "0.26.0" authors = ["marin "] edition = "2021" diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 96b8334ea..546e414ce 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -4,7 +4,7 @@ description = "Meilisearch HTTP server" edition = "2021" license = "MIT" name = "meilisearch-http" -version = "8000.1.0" +version = "0.26.0" [[bin]] name = "meilisearch" diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 45a83ae13..37c6af488 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-lib" -version = "8000.1.0" +version = "0.26.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From f1cd6b6ee80b76043c13b6e14e7665619e5fa056 Mon Sep 17 00:00:00 2001 From: releasemops Date: Thu, 21 Apr 2022 14:26:40 +0000 Subject: [PATCH 003/185] bump meilisearch to v9000.0.0 --- Cargo.lock | 8 ++++---- meilisearch-auth/Cargo.toml | 2 +- meilisearch-error/Cargo.toml | 2 +- meilisearch-http/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68bb172aa..48fcdebd5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1936,7 +1936,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "meilisearch-auth" -version = "0.26.0" +version = "9000.0.0" dependencies = [ "enum-iterator", "meilisearch-error", @@ -1951,7 +1951,7 @@ dependencies = [ [[package]] name = "meilisearch-error" -version = "0.26.0" +version = "9000.0.0" dependencies = [ "actix-web", "proptest", @@ -1962,7 +1962,7 @@ dependencies = [ [[package]] name = "meilisearch-http" -version = "0.26.0" +version = "9000.0.0" dependencies = [ "actix-cors", "actix-rt", @@ -2034,7 +2034,7 @@ dependencies = [ [[package]] name = "meilisearch-lib" -version = "0.26.0" +version = "9000.0.0" dependencies = [ "actix-rt", "actix-web", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 0d0d2a0f2..c8ec23ee3 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-auth" -version = "0.26.0" +version = "9000.0.0" edition = "2021" [dependencies] diff --git a/meilisearch-error/Cargo.toml b/meilisearch-error/Cargo.toml index ac1a4bddd..47728e6e5 100644 --- a/meilisearch-error/Cargo.toml +++ b/meilisearch-error/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-error" -version = "0.26.0" +version = "9000.0.0" authors = ["marin "] edition = "2021" diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 546e414ce..5fc0e0f63 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -4,7 +4,7 @@ description = "Meilisearch HTTP server" edition = "2021" license = "MIT" name = "meilisearch-http" -version = "0.26.0" +version = "9000.0.0" [[bin]] name = "meilisearch" diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 37c6af488..98829832e 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-lib" -version = "0.26.0" +version = "9000.0.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 6ff8bf823d908097b952259f74e8bdd7f7175575 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Thu, 21 Apr 2022 16:36:56 +0200 Subject: [PATCH 004/185] Revert "[TEST PURPOSE] Bump meilisearch to version 9000.0.0" --- Cargo.lock | 8 ++++---- meilisearch-auth/Cargo.toml | 2 +- meilisearch-error/Cargo.toml | 2 +- meilisearch-http/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 48fcdebd5..68bb172aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1936,7 +1936,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "meilisearch-auth" -version = "9000.0.0" +version = "0.26.0" dependencies = [ "enum-iterator", "meilisearch-error", @@ -1951,7 +1951,7 @@ dependencies = [ [[package]] name = "meilisearch-error" -version = "9000.0.0" +version = "0.26.0" dependencies = [ "actix-web", "proptest", @@ -1962,7 +1962,7 @@ dependencies = [ [[package]] name = "meilisearch-http" -version = "9000.0.0" +version = "0.26.0" dependencies = [ "actix-cors", "actix-rt", @@ -2034,7 +2034,7 @@ dependencies = [ [[package]] name = "meilisearch-lib" -version = "9000.0.0" +version = "0.26.0" dependencies = [ "actix-rt", "actix-web", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index c8ec23ee3..0d0d2a0f2 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-auth" -version = "9000.0.0" +version = "0.26.0" edition = "2021" [dependencies] diff --git a/meilisearch-error/Cargo.toml b/meilisearch-error/Cargo.toml index 47728e6e5..ac1a4bddd 100644 --- a/meilisearch-error/Cargo.toml +++ b/meilisearch-error/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-error" -version = "9000.0.0" +version = "0.26.0" authors = ["marin "] edition = "2021" diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 5fc0e0f63..546e414ce 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -4,7 +4,7 @@ description = "Meilisearch HTTP server" edition = "2021" license = "MIT" name = "meilisearch-http" -version = "9000.0.0" +version = "0.26.0" [[bin]] name = "meilisearch" diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 98829832e..37c6af488 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-lib" -version = "9000.0.0" +version = "0.26.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 38d681c230e03eb569b2ae48be71385480be656f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Thu, 21 Apr 2022 18:42:34 +0200 Subject: [PATCH 005/185] Change Nelson path --- Cargo.lock | 2 +- meilisearch-lib/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68bb172aa..ea045b696 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2277,7 +2277,7 @@ checksum = "546c37ac5d9e56f55e73b677106873d9d9f5190605e41a856503623648488cae" [[package]] name = "nelson" version = "0.1.0" -source = "git+https://github.com/MarinPostma/nelson.git?rev=675f13885548fb415ead8fbb447e9e6d9314000a#675f13885548fb415ead8fbb447e9e6d9314000a" +source = "git+https://github.com/meilisearch/nelson.git?rev=675f13885548fb415ead8fbb447e9e6d9314000a#675f13885548fb415ead8fbb447e9e6d9314000a" [[package]] name = "nom" diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 37c6af488..6594230e8 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -60,7 +60,7 @@ whoami = { version = "1.2.1", optional = true } actix-rt = "2.7.0" meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] } mockall = "0.11.0" -nelson = { git = "https://github.com/MarinPostma/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"} +nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"} paste = "1.0.6" proptest = "1.0.0" proptest-derive = "0.3.0" From 348af6cfbf69f7fd282544bf6a9987f1aae67b0c Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 4 May 2022 15:20:45 +0200 Subject: [PATCH 006/185] deny-rust-warnings --- .github/workflows/rust.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index ff28f82ca..92ac4722f 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -12,6 +12,7 @@ on: env: CARGO_TERM_COLOR: always RUST_BACKTRACE: 1 + RUSTFLAGS: "-D warnings" jobs: tests: From 0250ea91579ddbcfbd5d7a4d1bbf7c42c39c706e Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 18 May 2022 10:26:52 +0200 Subject: [PATCH 007/185] Intergrate smart crop in Meilisearch --- Cargo.lock | 25 +- meilisearch-auth/Cargo.toml | 2 +- meilisearch-http/tests/search/formatted.rs | 31 +- meilisearch-lib/Cargo.toml | 2 +- meilisearch-lib/src/index/search.rs | 1185 +++----------------- 5 files changed, 182 insertions(+), 1063 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 64b683481..7b0897571 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1092,8 +1092,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.26.4" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.26.5#1f6dc31e2f8ee02cdda255a856d15f253daf17ec" +version = "0.28.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.28.0#19dac01c5ca81543b751f66ad51fcff61608d969" dependencies = [ "nom", "nom_locate", @@ -1119,8 +1119,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.26.4" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.26.5#1f6dc31e2f8ee02cdda255a856d15f253daf17ec" +version = "0.28.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.28.0#19dac01c5ca81543b751f66ad51fcff61608d969" dependencies = [ "serde_json", ] @@ -1622,8 +1622,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.26.4" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.26.5#1f6dc31e2f8ee02cdda255a856d15f253daf17ec" +version = "0.28.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.28.0#19dac01c5ca81543b751f66ad51fcff61608d969" dependencies = [ "serde_json", ] @@ -2151,8 +2151,8 @@ dependencies = [ [[package]] name = "milli" -version = "0.26.4" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.26.5#1f6dc31e2f8ee02cdda255a856d15f253daf17ec" +version = "0.28.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.28.0#19dac01c5ca81543b751f66ad51fcff61608d969" dependencies = [ "bimap", "bincode", @@ -2189,6 +2189,7 @@ dependencies = [ "smallvec", "smartstring", "tempfile", + "thiserror", "time 0.3.9", "uuid", ] @@ -3360,18 +3361,18 @@ checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" [[package]] name = "thiserror" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ "proc-macro2 1.0.37", "quote 1.0.17", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 2d9f229f0..dd12b5b63 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" [dependencies] enum-iterator = "0.7.0" meilisearch-error = { path = "../meilisearch-error" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.26.5" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.28.0" } rand = "0.8.4" serde = { version = "1.0.136", features = ["derive"] } serde_json = { version = "1.0.79", features = ["preserve_order"] } diff --git a/meilisearch-http/tests/search/formatted.rs b/meilisearch-http/tests/search/formatted.rs index 13b8a07d8..19387bdc5 100644 --- a/meilisearch-http/tests/search/formatted.rs +++ b/meilisearch-http/tests/search/formatted.rs @@ -16,7 +16,7 @@ async fn formatted_contain_wildcard() { index.wait_task(1).await; let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"] })) + .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "matches": true })) .await; assert_eq!(code, 200, "{}", response); assert_eq!( @@ -25,7 +25,8 @@ async fn formatted_contain_wildcard() { "_formatted": { "id": "852", "cattos": "pesti", - } + }, + "_matchesInfo": {"cattos": [{"start": 0, "length": 5}]}, }) ); @@ -43,7 +44,7 @@ async fn formatted_contain_wildcard() { let (response, code) = index .search_post( - json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"] }), + json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "matches": true }), ) .await; assert_eq!(code, 200, "{}", response); @@ -55,7 +56,8 @@ async fn formatted_contain_wildcard() { "_formatted": { "id": "852", "cattos": "pesti", - } + }, + "_matchesInfo": {"cattos": [{"start": 0, "length": 5}]}, }) ); @@ -141,6 +143,27 @@ async fn format_nested() { }) ); + let (response, code) = index + .search_post( + json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "matches": true }), + ) + .await; + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "doggos": [ + { + "name": "bobby", + }, + { + "name": "buddy", + }, + ], + "_matchesInfo": {"doggos.name": [{"start": 0, "length": 5}]}, + }) + ); + let (response, code) = index .search_post(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] })) .await; diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 0b6596ffd..85ae49f64 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -30,7 +30,7 @@ lazy_static = "1.4.0" log = "0.4.14" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-error = { path = "../meilisearch-error" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.26.5" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.28.0" } mime = "0.3.16" num_cpus = "1.13.1" obkv = "0.2.0" diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index 7c12f985e..327cf173a 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -4,8 +4,10 @@ use std::str::FromStr; use std::time::Instant; use either::Either; -use milli::tokenizer::{Analyzer, AnalyzerConfig, Token}; -use milli::{AscDesc, FieldId, FieldsIdsMap, Filter, MatchingWords, SortError}; +use milli::tokenizer::{Analyzer, AnalyzerConfig}; +use milli::{ + AscDesc, FieldId, FieldsIdsMap, Filter, FormatOptions, MatchBounds, MatcherBuilder, SortError, +}; use regex::Regex; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; @@ -16,13 +18,7 @@ use super::error::{IndexError, Result}; use super::index::Index; pub type Document = serde_json::Map; -type MatchesInfo = BTreeMap>; - -#[derive(Serialize, Debug, Clone, PartialEq)] -pub struct MatchInfo { - start: usize, - length: usize, -} +type MatchesInfo = BTreeMap>; pub const DEFAULT_SEARCH_LIMIT: usize = 20; const fn default_search_limit() -> usize { @@ -105,21 +101,6 @@ pub struct SearchResult { pub exhaustive_facets_count: Option, } -#[derive(Copy, Clone, Default)] -struct FormatOptions { - highlight: bool, - crop: Option, -} - -impl FormatOptions { - pub fn merge(self, other: Self) -> Self { - Self { - highlight: self.highlight || other.highlight, - crop: self.crop.or(other.crop), - } - } -} - impl Index { pub fn perform_search(&self, query: SearchQuery) -> Result { let before_search = Instant::now(); @@ -221,11 +202,10 @@ impl Index { config.stop_words(&stop_words); let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (query.highlight_pre_tag, query.highlight_post_tag), - query.crop_marker, - ); + let mut formatter_builder = MatcherBuilder::from_matching_words(matching_words); + formatter_builder.crop_marker(query.crop_marker); + formatter_builder.highlight_prefix(query.highlight_pre_tag); + formatter_builder.highlight_suffix(query.highlight_post_tag); let mut documents = Vec::new(); @@ -242,16 +222,14 @@ impl Index { let mut document = permissive_json_pointer::select_values(&displayed_document, attributes_to_retrieve); - let matches_info = query - .matches - .then(|| compute_matches(&matching_words, &document, &analyzer)); - - let formatted = format_fields( + let (matches_info, formatted) = format_fields( &displayed_document, &fields_ids_map, - &formatter, - &matching_words, + &formatter_builder, + &analyzer, &formatted_options, + query.matches, + &displayed_ids, )?; if let Some(sort) = query.sort.as_ref() { @@ -317,56 +295,6 @@ fn insert_geo_distance(sorts: &[String], document: &mut Document) { } } -fn compute_matches>( - matcher: &impl Matcher, - document: &Document, - analyzer: &Analyzer, -) -> MatchesInfo { - let mut matches = BTreeMap::new(); - - for (key, value) in document { - let mut infos = Vec::new(); - compute_value_matches(&mut infos, value, matcher, analyzer); - if !infos.is_empty() { - matches.insert(key.clone(), infos); - } - } - matches -} - -fn compute_value_matches<'a, A: AsRef<[u8]>>( - infos: &mut Vec, - value: &Value, - matcher: &impl Matcher, - analyzer: &Analyzer<'a, A>, -) { - match value { - Value::String(s) => { - let analyzed = analyzer.analyze(s); - let mut start = 0; - for (word, token) in analyzed.reconstruct() { - if token.is_word() { - if let Some(length) = matcher.matches(&token) { - infos.push(MatchInfo { start, length }); - } - } - - start += word.len(); - } - } - Value::Array(vals) => vals - .iter() - .for_each(|val| compute_value_matches(infos, val, matcher, analyzer)), - Value::Object(vals) => vals - .values() - .for_each(|val| compute_value_matches(infos, val, matcher, analyzer)), - Value::Number(number) => { - compute_value_matches(infos, &Value::String(number.to_string()), matcher, analyzer) - } - _ => (), - } -} - fn compute_formatted_options( attr_to_highlight: &HashSet, attr_to_crop: &[String], @@ -509,22 +437,23 @@ fn make_document( Ok(document) } -fn format_fields>( +fn format_fields<'a, A: AsRef<[u8]>>( document: &Document, field_ids_map: &FieldsIdsMap, - formatter: &Formatter, - matching_words: &impl Matcher, + builder: &MatcherBuilder, + analyzer: &'a Analyzer<'a, A>, formatted_options: &BTreeMap, -) -> Result { - let selectors: Vec<_> = formatted_options - .keys() - // This unwrap must be safe since we got the ids from the fields_ids_map just - // before. - .map(|&fid| field_ids_map.name(fid).unwrap()) - .collect(); - let mut document = permissive_json_pointer::select_values(document, selectors.iter().copied()); + compute_matches: bool, + displayable_ids: &BTreeSet, +) -> Result<(Option, Document)> { + let mut matches = compute_matches.then(|| BTreeMap::new()); + let mut document = document.clone(); - permissive_json_pointer::map_leaf_values(&mut document, selectors, |key, value| { + // select the attributes to retrieve + let displayable_names = displayable_ids + .iter() + .map(|&fid| field_ids_map.name(fid).expect("Missing field name")); + permissive_json_pointer::map_leaf_values(&mut document, displayable_names, |key, value| { // To get the formatting option of each key we need to see all the rules that applies // to the value and merge them together. eg. If a user said he wanted to highlight `doggo` // and crop `doggo.name`. `doggo.name` needs to be highlighted + cropped while `doggo.age` is only @@ -535,235 +464,124 @@ fn format_fields>( let name = field_ids_map.name(**field).unwrap(); milli::is_faceted_by(name, key) || milli::is_faceted_by(key, name) }) - .fold(FormatOptions::default(), |acc, (_, option)| { - acc.merge(*option) - }); - *value = formatter.format_value(std::mem::take(value), matching_words, format); + .map(|(_, option)| *option) + .reduce(|acc, option| acc.merge(option)); + let mut infos = Vec::new(); + + *value = format_value( + std::mem::take(value), + builder, + format, + analyzer, + &mut infos, + compute_matches, + ); + + if let Some(matches) = matches.as_mut() { + if !infos.is_empty() { + matches.insert(key.to_owned(), infos); + } + } }); - Ok(document) + let selectors = formatted_options + .keys() + // This unwrap must be safe since we got the ids from the fields_ids_map just + // before. + .map(|&fid| field_ids_map.name(fid).unwrap()); + let document = permissive_json_pointer::select_values(&document, selectors); + + Ok((matches, document)) } -/// trait to allow unit testing of `format_fields` -trait Matcher { - fn matches(&self, w: &Token) -> Option; -} - -#[cfg(test)] -impl Matcher for BTreeMap<&str, Option> { - fn matches(&self, w: &Token) -> Option { - self.get(w.text()).cloned().flatten() - } -} - -impl Matcher for MatchingWords { - fn matches(&self, w: &Token) -> Option { - self.matching_bytes(w) - } -} - -struct Formatter<'a, A> { +fn format_value<'a, A: AsRef<[u8]>>( + value: Value, + builder: &MatcherBuilder, + format_options: Option, analyzer: &'a Analyzer<'a, A>, - highlight_tags: (String, String), - crop_marker: String, -} + infos: &mut Vec, + compute_matches: bool, +) -> Value { + match value { + Value::String(old_string) => { + // this will be removed with charabia + let analyzed = analyzer.analyze(&old_string); + let tokens: Vec<_> = analyzed.tokens().collect(); -impl<'a, A: AsRef<[u8]>> Formatter<'a, A> { - pub fn new( - analyzer: &'a Analyzer<'a, A>, - highlight_tags: (String, String), - crop_marker: String, - ) -> Self { - Self { - analyzer, - highlight_tags, - crop_marker, - } - } - - fn format_value( - &self, - value: Value, - matcher: &impl Matcher, - format_options: FormatOptions, - ) -> Value { - match value { - Value::String(old_string) => { - let value = self.format_string(old_string, matcher, format_options); - Value::String(value) + let mut matcher = builder.build(&tokens[..], &old_string); + if compute_matches { + let matches = matcher.matches(); + infos.extend_from_slice(&matches[..]); } - Value::Array(values) => Value::Array( - values - .into_iter() - .map(|v| { - self.format_value( + + match format_options { + Some(format_options) => { + let value = matcher.format(format_options); + Value::String(value.into_owned()) + } + None => Value::String(old_string), + } + } + Value::Array(values) => Value::Array( + values + .into_iter() + .map(|v| { + format_value( + v, + builder, + format_options.map(|format_options| FormatOptions { + highlight: format_options.highlight, + crop: None, + }), + analyzer, + infos, + compute_matches, + ) + }) + .collect(), + ), + Value::Object(object) => Value::Object( + object + .into_iter() + .map(|(k, v)| { + ( + k, + format_value( v, - matcher, - FormatOptions { + builder, + format_options.map(|format_options| FormatOptions { highlight: format_options.highlight, crop: None, - }, - ) - }) - .collect(), - ), - Value::Object(object) => Value::Object( - object - .into_iter() - .map(|(k, v)| { - ( - k, - self.format_value( - v, - matcher, - FormatOptions { - highlight: format_options.highlight, - crop: None, - }, - ), - ) - }) - .collect(), - ), - Value::Number(number) => { - let number_string_value = - self.format_string(number.to_string(), matcher, format_options); - Value::String(number_string_value) + }), + analyzer, + infos, + compute_matches, + ), + ) + }) + .collect(), + ), + Value::Number(number) => { + // this will be removed with charabia + let s = number.to_string(); + let analyzed = analyzer.analyze(&s); + let tokens: Vec<_> = analyzed.tokens().collect(); + + let mut matcher = builder.build(&tokens[..], &s); + if compute_matches { + let matches = matcher.matches(); + infos.extend_from_slice(&matches[..]); + } + + match format_options { + Some(format_options) => { + let value = matcher.format(format_options); + Value::String(value.into_owned()) + } + None => Value::Number(number), } - value => value, } - } - - fn format_string( - &self, - s: String, - matcher: &impl Matcher, - format_options: FormatOptions, - ) -> String { - let analyzed = self.analyzer.analyze(&s); - - let mut tokens = analyzed.reconstruct(); - let mut crop_marker_before = false; - - let tokens_interval: Box> = match format_options.crop { - Some(crop_len) if crop_len > 0 => { - let mut buffer = Vec::new(); - let mut tokens = tokens.by_ref().peekable(); - - while let Some((word, token)) = - tokens.next_if(|(_, token)| matcher.matches(token).is_none()) - { - buffer.push((word, token)); - } - - match tokens.next() { - Some(token) => { - let mut total_count: usize = buffer - .iter() - .filter(|(_, token)| token.is_separator().is_none()) - .count(); - - let crop_len_before = crop_len / 2; - // check if start will be cropped. - crop_marker_before = total_count > crop_len_before; - - let before_iter = buffer.into_iter().skip_while(move |(_, token)| { - if token.is_separator().is_none() { - total_count -= 1; - } - total_count >= crop_len_before - }); - - // rebalance remaining word count after the match. - let crop_len_after = if crop_marker_before { - crop_len.saturating_sub(crop_len_before + 1) - } else { - crop_len.saturating_sub(total_count + 1) - }; - - let mut taken_after = 0; - let after_iter = tokens.take_while(move |(_, token)| { - let take = taken_after < crop_len_after; - if token.is_separator().is_none() { - taken_after += 1; - } - take - }); - - let iter = before_iter.chain(Some(token)).chain(after_iter); - - Box::new(iter) - } - // If no word matches in the attribute - None => { - let mut count = 0; - let mut tokens = buffer.into_iter(); - let mut out: String = tokens - .by_ref() - .take_while(move |(_, token)| { - let take = count < crop_len; - if token.is_separator().is_none() { - count += 1; - } - take - }) - .map(|(word, _)| word) - .collect(); - - // if there are remaining tokens after formatted interval, - // put a crop marker at the end. - if tokens.next().is_some() { - out.push_str(&self.crop_marker); - } - - return out; - } - } - } - _ => Box::new(tokens.by_ref()), - }; - - let out = if crop_marker_before { - self.crop_marker.clone() - } else { - String::new() - }; - - let mut out = tokens_interval.fold(out, |mut out, (word, token)| { - // Check if we need to do highlighting or computed matches before calling - // Matcher::match since the call is expensive. - if format_options.highlight && token.is_word() { - if let Some(length) = matcher.matches(&token) { - match word.get(..length).zip(word.get(length..)) { - Some((head, tail)) => { - out.push_str(&self.highlight_tags.0); - out.push_str(head); - out.push_str(&self.highlight_tags.1); - out.push_str(tail); - } - // if we are in the middle of a character - // or if all the word should be highlighted, - // we highlight the complete word. - None => { - out.push_str(&self.highlight_tags.0); - out.push_str(word); - out.push_str(&self.highlight_tags.1); - } - } - return out; - } - } - out.push_str(word); - out - }); - - // if there are remaining tokens after formatted interval, - // put a crop marker at the end. - if tokens.next().is_some() { - out.push_str(&self.crop_marker); - } - - out + value => value, } } @@ -810,740 +628,17 @@ fn parse_filter_array(arr: &[Value]) -> Result> { mod test { use super::*; - #[test] - fn no_ids_no_formatted() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - fields.insert("test").unwrap(); - - let document: serde_json::Value = json!({ - "test": "hello", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let formatted_options = BTreeMap::new(); - - let matching_words = MatchingWords::default(); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert!(value.is_empty()); - } - - #[test] - fn formatted_with_highlight_in_word() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "The Hobbit", - "author": "J. R. R. Tolkien", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: true, - crop: None, - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("hobbit", Some(3)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "The Hobbit"); - assert_eq!(value["author"], "J. R. R. Tolkien"); - } - - #[test] - fn formatted_with_highlight_in_number() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - let publication_year = fields.insert("publication_year").unwrap(); - - let document: serde_json::Value = json!({ - "title": "The Hobbit", - "author": "J. R. R. Tolkien", - "publication_year": 1937, - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: false, - crop: None, - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - formatted_options.insert( - publication_year, - FormatOptions { - highlight: true, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("1937", Some(4)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "The Hobbit"); - assert_eq!(value["author"], "J. R. R. Tolkien"); - assert_eq!(value["publication_year"], "1937"); - } - - /// https://github.com/meilisearch/meilisearch/issues/1368 - #[test] - fn formatted_with_highlight_emoji() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "Go💼od luck.", - "author": "JacobLey", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: true, - crop: None, - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - // emojis are deunicoded during tokenization - // TODO Tokenizer should remove spaces after deunicode - matching_words.insert("gobriefcase od", Some(11)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "Go💼od luck."); - assert_eq!(value["author"], "JacobLey"); - } - - #[test] - fn formatted_with_highlight_in_unicode_word() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "étoile", - "author": "J. R. R. Tolkien", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: true, - crop: None, - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("etoile", Some(1)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "étoile"); - assert_eq!(value["author"], "J. R. R. Tolkien"); - } - - #[test] - fn formatted_with_crop_2() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "Harry Potter and the Half-Blood Prince", - "author": "J. K. Rowling", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: false, - crop: Some(2), - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("potter", Some(3)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "Harry Potter…"); - assert_eq!(value["author"], "J. K. Rowling"); - } - - #[test] - fn formatted_with_crop_5() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "Harry Potter and the Half-Blood Prince", - "author": "J. K. Rowling", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: false, - crop: Some(5), - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("potter", Some(5)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "Harry Potter and the Half…"); - assert_eq!(value["author"], "J. K. Rowling"); - } - - #[test] - fn formatted_with_crop_0() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "Harry Potter and the Half-Blood Prince", - "author": "J. K. Rowling", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: false, - crop: Some(0), - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("potter", Some(6)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "Harry Potter and the Half-Blood Prince"); - assert_eq!(value["author"], "J. K. Rowling"); - } - - #[test] - fn formatted_with_crop_and_no_match() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "Harry Potter and the Half-Blood Prince", - "author": "J. K. Rowling", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: false, - crop: Some(1), - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: Some(20), - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("rowling", Some(3)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "Harry…"); - assert_eq!(value["author"], "J. K. Rowling"); - } - - #[test] - fn formatted_with_crop_and_highlight() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "Harry Potter and the Half-Blood Prince", - "author": "J. K. Rowling", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: true, - crop: Some(1), - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("and", Some(3)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "…and…"); - assert_eq!(value["author"], "J. K. Rowling"); - } - - #[test] - fn formatted_with_crop_and_highlight_in_word() { - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - let formatter = Formatter::new( - &analyzer, - (String::from(""), String::from("")), - String::from("…"), - ); - - let mut fields = FieldsIdsMap::new(); - let title = fields.insert("title").unwrap(); - let author = fields.insert("author").unwrap(); - - let document: serde_json::Value = json!({ - "title": "Harry Potter and the Half-Blood Prince", - "author": "J. K. Rowling", - }); - - // we need to convert the `serde_json::Map` into an `IndexMap`. - let document = document - .as_object() - .unwrap() - .into_iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); - - let mut formatted_options = BTreeMap::new(); - formatted_options.insert( - title, - FormatOptions { - highlight: true, - crop: Some(4), - }, - ); - formatted_options.insert( - author, - FormatOptions { - highlight: false, - crop: None, - }, - ); - - let mut matching_words = BTreeMap::new(); - matching_words.insert("blood", Some(3)); - - let value = format_fields( - &document, - &fields, - &formatter, - &matching_words, - &formatted_options, - ) - .unwrap(); - - assert_eq!(value["title"], "…the Half-Blood Prince"); - assert_eq!(value["author"], "J. K. Rowling"); - } - - #[test] - fn test_compute_value_matches() { - let text = "Call me Ishmael. Some years ago—never mind how long precisely—having little or no money in my purse, and nothing particular to interest me on shore, I thought I would sail about a little and see the watery part of the world."; - let value = serde_json::json!(text); - - let mut matcher = BTreeMap::new(); - matcher.insert("ishmael", Some(3)); - matcher.insert("little", Some(6)); - matcher.insert("particular", Some(1)); - - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - - let mut infos = Vec::new(); - - compute_value_matches(&mut infos, &value, &matcher, &analyzer); - - let mut infos = infos.into_iter(); - let crop = |info: MatchInfo| &text[info.start..info.start + info.length]; - - assert_eq!(crop(infos.next().unwrap()), "Ish"); - assert_eq!(crop(infos.next().unwrap()), "little"); - assert_eq!(crop(infos.next().unwrap()), "p"); - assert_eq!(crop(infos.next().unwrap()), "little"); - assert!(infos.next().is_none()); - } - - #[test] - fn test_compute_match() { - let value = serde_json::from_str(r#"{ - "color": "Green", - "name": "Lucas Hess", - "gender": "male", - "price": 3.5, - "address": "412 Losee Terrace, Blairstown, Georgia, 2825", - "about": "Mollit ad in exercitation quis Laboris . Anim est ut consequat fugiat duis magna aliquip velit nisi. Commodo eiusmod est consequat proident consectetur aliqua enim fugiat. Aliqua adipisicing laboris elit proident enim veniam laboris mollit. Incididunt fugiat minim ad nostrud deserunt tempor in. Id irure officia labore qui est labore nulla nisi. Magna sit quis tempor esse consectetur amet labore duis aliqua consequat.\r\n" - }"#).unwrap(); - let mut matcher = BTreeMap::new(); - matcher.insert("green", Some(5)); - matcher.insert("mollit", Some(6)); - matcher.insert("laboris", Some(7)); - matcher.insert("3", Some(1)); - - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); - - let matches = compute_matches(&matcher, &value, &analyzer); - assert_eq!( - format!("{:?}", matches), - r##"{"about": [MatchInfo { start: 0, length: 6 }, MatchInfo { start: 31, length: 7 }, MatchInfo { start: 191, length: 7 }, MatchInfo { start: 225, length: 7 }, MatchInfo { start: 233, length: 6 }], "color": [MatchInfo { start: 0, length: 5 }], "price": [MatchInfo { start: 0, length: 1 }]}"## - ); - } - #[test] fn test_insert_geo_distance() { let value: Document = serde_json::from_str( r#"{ - "_geo": { - "lat": 50.629973371633746, - "lng": 3.0569447399419567 - }, - "city": "Lille", - "id": "1" - }"#, + "_geo": { + "lat": 50.629973371633746, + "lng": 3.0569447399419567 + }, + "city": "Lille", + "id": "1" + }"#, ) .unwrap(); From 3517eae47feb17b89d278df0fc26354773765ae9 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 18 May 2022 18:45:53 +0200 Subject: [PATCH 008/185] Fix tests --- meilisearch-http/tests/documents/add_documents.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index 911cfd312..0ac0436dc 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -868,7 +868,12 @@ async fn error_add_documents_bad_document_id() { let (response, code) = index.get_task(1).await; assert_eq!(code, 200); assert_eq!(response["status"], json!("failed")); - assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).")); + assert_eq!( + response["error"]["message"], + json!( + r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."# + ) + ); assert_eq!(response["error"]["code"], json!("invalid_document_id")); assert_eq!(response["error"]["type"], json!("invalid_request")); assert_eq!( @@ -891,7 +896,12 @@ async fn error_update_documents_bad_document_id() { index.update_documents(documents, None).await; let response = index.wait_task(1).await; assert_eq!(response["status"], json!("failed")); - assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).")); + assert_eq!( + response["error"]["message"], + json!( + r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."# + ) + ); assert_eq!(response["error"]["code"], json!("invalid_document_id")); assert_eq!(response["error"]["type"], json!("invalid_request")); assert_eq!( From 50763aac82365a8bddc635c8599f34c28a98140f Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Thu, 19 May 2022 11:23:22 +0200 Subject: [PATCH 009/185] Fix clippy --- meilisearch-lib/src/index/search.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index 327cf173a..bf543b377 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -446,7 +446,7 @@ fn format_fields<'a, A: AsRef<[u8]>>( compute_matches: bool, displayable_ids: &BTreeSet, ) -> Result<(Option, Document)> { - let mut matches = compute_matches.then(|| BTreeMap::new()); + let mut matches = compute_matches.then(BTreeMap::new); let mut document = document.clone(); // select the attributes to retrieve From 4e9accdeb7dbc63a50a7b3d53535288cdbd660f7 Mon Sep 17 00:00:00 2001 From: Irevoire Date: Wed, 18 May 2022 13:17:56 +0200 Subject: [PATCH 010/185] chore(search): rename in the search endpoint Fix ##2376 --- .../src/analytics/segment_analytics.rs | 8 +-- meilisearch-http/src/routes/indexes/search.rs | 20 +++----- meilisearch-http/tests/search/formatted.rs | 12 ++--- meilisearch-http/tests/search/mod.rs | 18 +++---- meilisearch-lib/src/index/search.rs | 49 ++++++++----------- meilisearch-lib/src/index_controller/mod.rs | 10 ++-- 6 files changed, 50 insertions(+), 67 deletions(-) diff --git a/meilisearch-http/src/analytics/segment_analytics.rs b/meilisearch-http/src/analytics/segment_analytics.rs index 3d3b23d70..9b70d81e7 100644 --- a/meilisearch-http/src/analytics/segment_analytics.rs +++ b/meilisearch-http/src/analytics/segment_analytics.rs @@ -363,7 +363,7 @@ pub struct SearchAggregator { highlight_pre_tag: bool, highlight_post_tag: bool, crop_marker: bool, - matches: bool, + show_matches_position: bool, crop_length: bool, } @@ -419,7 +419,7 @@ impl SearchAggregator { ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG; ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER; ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH; - ret.matches = query.matches; + ret.show_matches_position = query.show_matches_position; ret } @@ -472,7 +472,7 @@ impl SearchAggregator { self.highlight_pre_tag |= other.highlight_pre_tag; self.highlight_post_tag |= other.highlight_post_tag; self.crop_marker |= other.crop_marker; - self.matches |= other.matches; + self.show_matches_position |= other.show_matches_position; self.crop_length |= other.crop_length; } @@ -515,7 +515,7 @@ impl SearchAggregator { "highlight_pre_tag": self.highlight_pre_tag, "highlight_post_tag": self.highlight_post_tag, "crop_marker": self.crop_marker, - "matches": self.matches, + "show_matches_position": self.show_matches_position, "crop_length": self.crop_length, }, }); diff --git a/meilisearch-http/src/routes/indexes/search.rs b/meilisearch-http/src/routes/indexes/search.rs index 14d36c1b3..869bc4931 100644 --- a/meilisearch-http/src/routes/indexes/search.rs +++ b/meilisearch-http/src/routes/indexes/search.rs @@ -36,8 +36,8 @@ pub struct SearchQueryGet { filter: Option, sort: Option, #[serde(default = "Default::default")] - matches: bool, - facets_distribution: Option, + show_matches_position: bool, + facets: Option, #[serde(default = "default_highlight_pre_tag")] highlight_pre_tag: String, #[serde(default = "default_highlight_post_tag")] @@ -60,8 +60,8 @@ impl From for SearchQuery { .attributes_to_highlight .map(|attrs| attrs.split(',').map(String::from).collect()); - let facets_distribution = other - .facets_distribution + let facets = other + .facets .map(|attrs| attrs.split(',').map(String::from).collect()); let filter = match other.filter { @@ -84,8 +84,8 @@ impl From for SearchQuery { attributes_to_highlight, filter, sort, - matches: other.matches, - facets_distribution, + show_matches_position: other.show_matches_position, + facets, highlight_pre_tag: other.highlight_pre_tag, highlight_post_tag: other.highlight_post_tag, crop_marker: other.crop_marker, @@ -169,10 +169,6 @@ pub async fn search_with_url_query( let search_result = search_result?; - // Tests that the nb_hits is always set to false - #[cfg(test)] - assert!(!search_result.exhaustive_nb_hits); - debug!("returns: {:?}", search_result); Ok(HttpResponse::Ok().json(search_result)) } @@ -207,10 +203,6 @@ pub async fn search_with_post( let search_result = search_result?; - // Tests that the nb_hits is always set to false - #[cfg(test)] - assert!(!search_result.exhaustive_nb_hits); - debug!("returns: {:?}", search_result); Ok(HttpResponse::Ok().json(search_result)) } diff --git a/meilisearch-http/tests/search/formatted.rs b/meilisearch-http/tests/search/formatted.rs index 19387bdc5..9876bac3a 100644 --- a/meilisearch-http/tests/search/formatted.rs +++ b/meilisearch-http/tests/search/formatted.rs @@ -16,7 +16,7 @@ async fn formatted_contain_wildcard() { index.wait_task(1).await; let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "matches": true })) + .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true })) .await; assert_eq!(code, 200, "{}", response); assert_eq!( @@ -26,7 +26,7 @@ async fn formatted_contain_wildcard() { "id": "852", "cattos": "pesti", }, - "_matchesInfo": {"cattos": [{"start": 0, "length": 5}]}, + "_matchesPosition": {"cattos": [{"start": 0, "length": 5}]}, }) ); @@ -44,7 +44,7 @@ async fn formatted_contain_wildcard() { let (response, code) = index .search_post( - json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "matches": true }), + json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "showMatchesPosition": true }), ) .await; assert_eq!(code, 200, "{}", response); @@ -57,7 +57,7 @@ async fn formatted_contain_wildcard() { "id": "852", "cattos": "pesti", }, - "_matchesInfo": {"cattos": [{"start": 0, "length": 5}]}, + "_matchesPosition": {"cattos": [{"start": 0, "length": 5}]}, }) ); @@ -145,7 +145,7 @@ async fn format_nested() { let (response, code) = index .search_post( - json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "matches": true }), + json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "showMatchesPosition": true }), ) .await; assert_eq!(code, 200, "{}", response); @@ -160,7 +160,7 @@ async fn format_nested() { "name": "buddy", }, ], - "_matchesInfo": {"doggos.name": [{"start": 0, "length": 5}]}, + "_matchesPosition": {"doggos.name": [{"start": 0, "length": 5}]}, }) ); diff --git a/meilisearch-http/tests/search/mod.rs b/meilisearch-http/tests/search/mod.rs index d9b36e85d..c570e8b06 100644 --- a/meilisearch-http/tests/search/mod.rs +++ b/meilisearch-http/tests/search/mod.rs @@ -420,11 +420,11 @@ async fn search_facet_distribution() { index .search( json!({ - "facetsDistribution": ["title"] + "facets": ["title"] }), |response, code| { assert_eq!(code, 200, "{}", response); - let dist = response["facetsDistribution"].as_object().unwrap(); + let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert!(dist.get("title").is_some()); }, @@ -445,12 +445,12 @@ async fn search_facet_distribution() { index .search( json!({ - // "facetsDistribution": ["father", "doggos.name"] - "facetsDistribution": ["father"] + // "facets": ["father", "doggos.name"] + "facets": ["father"] }), |response, code| { assert_eq!(code, 200, "{}", response); - let dist = response["facetsDistribution"].as_object().unwrap(); + let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert_eq!( dist["father"], @@ -474,11 +474,11 @@ async fn search_facet_distribution() { index .search( json!({ - "facetsDistribution": ["doggos.name"] + "facets": ["doggos.name"] }), |response, code| { assert_eq!(code, 200, "{}", response); - let dist = response["facetsDistribution"].as_object().unwrap(); + let dist = response["facetDistribution"].as_object().unwrap(); assert_eq!(dist.len(), 1); assert_eq!( dist["doggos.name"], @@ -491,11 +491,11 @@ async fn search_facet_distribution() { index .search( json!({ - "facetsDistribution": ["doggos"] + "facets": ["doggos"] }), |response, code| { assert_eq!(code, 200, "{}", response); - let dist = response["facetsDistribution"].as_object().unwrap(); + let dist = response["facetDistribution"].as_object().unwrap(); dbg!(&dist); assert_eq!(dist.len(), 3); assert_eq!( diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index bf543b377..097a91570 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -18,7 +18,7 @@ use super::error::{IndexError, Result}; use super::index::Index; pub type Document = serde_json::Map; -type MatchesInfo = BTreeMap>; +type MatchesPosition = BTreeMap>; pub const DEFAULT_SEARCH_LIMIT: usize = 20; const fn default_search_limit() -> usize { @@ -63,10 +63,10 @@ pub struct SearchQuery { pub attributes_to_highlight: Option>, // Default to false #[serde(default = "Default::default")] - pub matches: bool, + pub show_matches_position: bool, pub filter: Option, pub sort: Option>, - pub facets_distribution: Option>, + pub facets: Option>, #[serde(default = "default_highlight_pre_tag")] pub highlight_pre_tag: String, #[serde(default = "default_highlight_post_tag")] @@ -81,24 +81,21 @@ pub struct SearchHit { pub document: Document, #[serde(rename = "_formatted", skip_serializing_if = "Document::is_empty")] pub formatted: Document, - #[serde(rename = "_matchesInfo", skip_serializing_if = "Option::is_none")] - pub matches_info: Option, + #[serde(rename = "_matchesPosition", skip_serializing_if = "Option::is_none")] + pub matches_position: Option, } #[derive(Serialize, Debug, Clone, PartialEq)] #[serde(rename_all = "camelCase")] pub struct SearchResult { pub hits: Vec, - pub nb_hits: u64, - pub exhaustive_nb_hits: bool, + pub estimated_total_hits: u64, pub query: String, pub limit: usize, pub offset: usize, pub processing_time_ms: u128, #[serde(skip_serializing_if = "Option::is_none")] - pub facets_distribution: Option>>, - #[serde(skip_serializing_if = "Option::is_none")] - pub exhaustive_facets_count: Option, + pub facet_distribution: Option>>, } impl Index { @@ -222,13 +219,13 @@ impl Index { let mut document = permissive_json_pointer::select_values(&displayed_document, attributes_to_retrieve); - let (matches_info, formatted) = format_fields( + let (matches_position, formatted) = format_fields( &displayed_document, &fields_ids_map, &formatter_builder, &analyzer, &formatted_options, - query.matches, + query.show_matches_position, &displayed_ids, )?; @@ -239,38 +236,34 @@ impl Index { let hit = SearchHit { document, formatted, - matches_info, + matches_position, }; documents.push(hit); } - let nb_hits = candidates.len(); + let estimated_total_hits = candidates.len(); - let facets_distribution = match query.facets_distribution { + let facet_distribution = match query.facets { Some(ref fields) => { - let mut facets_distribution = self.facets_distribution(&rtxn); + let mut facet_distribution = self.facets_distribution(&rtxn); if fields.iter().all(|f| f != "*") { - facets_distribution.facets(fields); + facet_distribution.facets(fields); } - let distribution = facets_distribution.candidates(candidates).execute()?; + let distribution = facet_distribution.candidates(candidates).execute()?; Some(distribution) } None => None, }; - let exhaustive_facets_count = facets_distribution.as_ref().map(|_| false); // not implemented yet - let result = SearchResult { - exhaustive_nb_hits: false, // not implemented yet hits: documents, - nb_hits, + estimated_total_hits, query: query.q.clone().unwrap_or_default(), limit: query.limit, offset: query.offset.unwrap_or_default(), processing_time_ms: before_search.elapsed().as_millis(), - facets_distribution, - exhaustive_facets_count, + facet_distribution, }; Ok(result) } @@ -445,8 +438,8 @@ fn format_fields<'a, A: AsRef<[u8]>>( formatted_options: &BTreeMap, compute_matches: bool, displayable_ids: &BTreeSet, -) -> Result<(Option, Document)> { - let mut matches = compute_matches.then(BTreeMap::new); +) -> Result<(Option, Document)> { + let mut matches_position = compute_matches.then(BTreeMap::new); let mut document = document.clone(); // select the attributes to retrieve @@ -477,7 +470,7 @@ fn format_fields<'a, A: AsRef<[u8]>>( compute_matches, ); - if let Some(matches) = matches.as_mut() { + if let Some(matches) = matches_position.as_mut() { if !infos.is_empty() { matches.insert(key.to_owned(), infos); } @@ -491,7 +484,7 @@ fn format_fields<'a, A: AsRef<[u8]>>( .map(|&fid| field_ids_map.name(fid).unwrap()); let document = permissive_json_pointer::select_values(&document, selectors); - Ok((matches, document)) + Ok((matches_position, document)) } fn format_value<'a, A: AsRef<[u8]>>( diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 77ff2621b..a302f12da 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -687,10 +687,10 @@ mod test { attributes_to_crop: None, crop_length: 18, attributes_to_highlight: None, - matches: true, + show_matches_position: true, filter: None, sort: None, - facets_distribution: None, + facets: None, highlight_pre_tag: default_highlight_pre_tag(), highlight_post_tag: default_highlight_post_tag(), crop_marker: default_crop_marker(), @@ -698,14 +698,12 @@ mod test { let result = SearchResult { hits: vec![], - nb_hits: 29, - exhaustive_nb_hits: true, + estimated_total_hits: 29, query: "hello world".to_string(), limit: 24, offset: 0, processing_time_ms: 50, - facets_distribution: None, - exhaustive_facets_count: Some(true), + facet_distribution: None, }; let mut uuid_store = MockIndexMetaStore::new(); From 6bf4db0bca4a8380878a1b6da179ffce56cf1594 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 19 May 2022 14:08:34 +0200 Subject: [PATCH 011/185] feat(analytics): handle the new x-meilisearch-client custom header for the analytics Fix #2367 --- meilisearch-http/src/analytics/segment_analytics.rs | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/meilisearch-http/src/analytics/segment_analytics.rs b/meilisearch-http/src/analytics/segment_analytics.rs index 3d3b23d70..6b570e502 100644 --- a/meilisearch-http/src/analytics/segment_analytics.rs +++ b/meilisearch-http/src/analytics/segment_analytics.rs @@ -31,6 +31,8 @@ use crate::Opt; use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH}; +const ANALYTICS_HEADER: &str = "X-Meilisearch-Client"; + /// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors. fn write_user_id(db_path: &Path, user_id: &str) { let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes()); @@ -48,7 +50,8 @@ const SEGMENT_API_KEY: &str = "P3FWhhEsJiEDCuEHpmcN9DHcK4hVfBvb"; pub fn extract_user_agents(request: &HttpRequest) -> Vec { request .headers() - .get(USER_AGENT) + .get(ANALYTICS_HEADER) + .or_else(|| request.headers().get(USER_AGENT)) .map(|header| header.to_str().ok()) .flatten() .unwrap_or("unknown") @@ -130,11 +133,7 @@ impl SegmentAnalytics { impl super::Analytics for SegmentAnalytics { fn publish(&self, event_name: String, mut send: Value, request: Option<&HttpRequest>) { - let user_agent = request - .map(|req| req.headers().get(USER_AGENT)) - .flatten() - .map(|header| header.to_str().unwrap_or("unknown")) - .map(|s| s.split(';').map(str::trim).collect::>()); + let user_agent = request.map(|req| extract_user_agents(req)); send["user-agent"] = json!(user_agent); let event = Track { From 641ca5a85771983dca300bef32f33de8599227df Mon Sep 17 00:00:00 2001 From: 0x0x1 <101086451+0x0x1@users.noreply.github.com> Date: Tue, 24 May 2022 17:34:34 +0800 Subject: [PATCH 012/185] Update url of movies.json --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9efb5a937..cd3effd94 100644 --- a/README.md +++ b/README.md @@ -109,7 +109,7 @@ cargo run --release Let's create an index! If you need a sample dataset, use [this movie database](https://www.notion.so/meilisearch/A-movies-dataset-to-test-Meili-1cbf7c9cfa4247249c40edfa22d7ca87#b5ae399b81834705ba5420ac70358a65). You can also find it in the `datasets/` directory. ```bash -curl -L 'https://bit.ly/2PAcw9l' -o movies.json +curl -L https://docs.meilisearch.com/movies.json -o movies.json ``` Now, you're ready to index some data. From 5f0e9b63d2913013db85ec497b24db8dc81dce7e Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 28 Apr 2022 10:48:57 +0200 Subject: [PATCH 013/185] chore(dump): add tests --- Cargo.lock | 229 ++++--- Cargo.toml | 6 + meilisearch-http/Cargo.toml | 1 + .../tests/assets/v1_v0.20.0_movies.dump | Bin 0 -> 10909 bytes .../v1_v0.20.0_movies_with_settings.dump | Bin 0 -> 11063 bytes .../v1_v0.20.0_rubygems_with_settings.dump | Bin 0 -> 7055 bytes .../tests/assets/v2_v0.21.1_movies.dump | Bin 0 -> 11202 bytes .../v2_v0.21.1_movies_with_settings.dump | Bin 0 -> 11289 bytes .../v2_v0.21.1_rubygems_with_settings.dump | Bin 0 -> 7154 bytes .../tests/assets/v3_v0.24.0_movies.dump | Bin 0 -> 11198 bytes .../v3_v0.24.0_movies_with_settings.dump | Bin 0 -> 11280 bytes .../v3_v0.24.0_rubygems_with_settings.dump | Bin 0 -> 7060 bytes .../tests/assets/v4_v0.25.2_movies.dump | Bin 0 -> 11468 bytes .../v4_v0.25.2_movies_with_settings.dump | Bin 0 -> 11545 bytes .../v4_v0.25.2_rubygems_with_settings.dump | Bin 0 -> 7511 bytes meilisearch-http/tests/common/mod.rs | 2 +- meilisearch-http/tests/common/server.rs | 10 +- meilisearch-http/tests/dumps.rs | 22 - meilisearch-http/tests/dumps/data.rs | 68 ++ meilisearch-http/tests/dumps/mod.rs | 645 ++++++++++++++++++ meilisearch-http/tests/integration.rs | 1 + meilisearch-http/tests/snapshot/mod.rs | 4 +- .../src/index_controller/dump_actor/mod.rs | 7 +- 23 files changed, 853 insertions(+), 142 deletions(-) create mode 100644 meilisearch-http/tests/assets/v1_v0.20.0_movies.dump create mode 100644 meilisearch-http/tests/assets/v1_v0.20.0_movies_with_settings.dump create mode 100644 meilisearch-http/tests/assets/v1_v0.20.0_rubygems_with_settings.dump create mode 100644 meilisearch-http/tests/assets/v2_v0.21.1_movies.dump create mode 100644 meilisearch-http/tests/assets/v2_v0.21.1_movies_with_settings.dump create mode 100644 meilisearch-http/tests/assets/v2_v0.21.1_rubygems_with_settings.dump create mode 100644 meilisearch-http/tests/assets/v3_v0.24.0_movies.dump create mode 100644 meilisearch-http/tests/assets/v3_v0.24.0_movies_with_settings.dump create mode 100644 meilisearch-http/tests/assets/v3_v0.24.0_rubygems_with_settings.dump create mode 100644 meilisearch-http/tests/assets/v4_v0.25.2_movies.dump create mode 100644 meilisearch-http/tests/assets/v4_v0.25.2_movies_with_settings.dump create mode 100644 meilisearch-http/tests/assets/v4_v0.25.2_rubygems_with_settings.dump delete mode 100644 meilisearch-http/tests/dumps.rs create mode 100644 meilisearch-http/tests/dumps/data.rs create mode 100644 meilisearch-http/tests/dumps/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 7b0897571..abdac2c1c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -77,7 +77,7 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" dependencies = [ - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -212,7 +212,7 @@ checksum = "7525bedf54704abb1d469e88d7e7e9226df73778798a69cea5022d53b2ae91bc" dependencies = [ "actix-router", "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -285,9 +285,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4361135be9122e0870de935d7c439aef945b9f9ddd4199a553b5270b49c82a27" +checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc" dependencies = [ "backtrace", ] @@ -319,7 +319,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -330,15 +330,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed6aa3524a2dfcf9fe180c51eae2b58738348d819517ceadf95789c51fff7600" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] [[package]] name = "atomic-polyfill" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d862f14e042f75b95236d4ef1bb3d5c170964082d1e1e9c3ce689a2cbee217c" +checksum = "e14bf7b4f565e5e717d7a7a65b2a05c0b8c96e4db636d6f780f03b15108cdd1b" dependencies = [ "critical-section", ] @@ -368,9 +368,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" -version = "0.3.64" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e121dee8023ce33ab248d9ce1493df03c3b38a659b240096fcbd7048ff9c31f" +checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61" dependencies = [ "addr2line", "cc", @@ -545,7 +545,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562e382481975bc61d11275ac5e62a19abd00b0547d99516a415336f183dcd0e" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -659,16 +659,16 @@ checksum = "fff857943da45f546682664a79488be82e69e43c1a7a2307679ab9afb3a66d2e" [[package]] name = "clap" -version = "3.1.8" +version = "3.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71c47df61d9e16dc010b55dba1952a57d8c215dbb533fd13cdd13369aac73b1c" +checksum = "7c167e37342afc5f33fd87bbc870cedd020d2a6dffa05d45ccd9241fbdd146db" dependencies = [ "atty", "bitflags", "clap_derive", + "clap_lex", "indexmap", "lazy_static", - "os_str_bytes", "strsim", "termcolor", "textwrap", @@ -683,10 +683,19 @@ dependencies = [ "heck", "proc-macro-error", "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] +[[package]] +name = "clap_lex" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "189ddd3b5d32a70b35e7686054371742a937b0d99128e76dde6340210e966669" +dependencies = [ + "os_str_bytes", +] + [[package]] name = "concat-arrays" version = "0.1.2" @@ -694,7 +703,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -768,9 +777,9 @@ dependencies = [ [[package]] name = "critical-section" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc1e89b93912c97878305b70ef6b011bfc74622e7b79a9d4a0676c7663496bcd" +checksum = "95da181745b56d4bd339530ec393508910c909c784e8962d15d722bacf0bcbcd" dependencies = [ "bare-metal 1.0.0", "cfg-if 1.0.0", @@ -881,7 +890,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -893,7 +902,7 @@ checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "rustc_version 0.4.0", "syn 1.0.91", ] @@ -1052,7 +1061,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -1080,9 +1089,9 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" +checksum = "c0408e2626025178a6a7f7ffc05a25bc47103229f19c113755de7bf63816290c" dependencies = [ "cfg-if 1.0.0", "libc", @@ -1107,9 +1116,9 @@ checksum = "4d3d6188b8804df28032815ea256b6955c9625c24da7525f387a7af02fbb8f01" [[package]] name = "flate2" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" +checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af" dependencies = [ "cfg-if 1.0.0", "crc32fast", @@ -1223,7 +1232,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -1301,7 +1310,7 @@ checksum = "e45727250e75cc04ff2846a66397da8ef2b3db8e40e0cef4df67950a07621eb9" dependencies = [ "proc-macro-error", "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -1313,9 +1322,9 @@ checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" [[package]] name = "git2" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3826a6e0e2215d7a41c2bfc7c9244123969273f3476b939a226aac0ab56e9e3c" +checksum = "5e77a14ffc6ba4ad5188d6cf428894c4fcfda725326b37558f35bb677e712cec" dependencies = [ "bitflags", "libc", @@ -1393,7 +1402,7 @@ checksum = "d076121838e03f862871315477528debffdb7462fb229216ecef91b1a3eb31eb" dependencies = [ "atomic-polyfill", "hash32", - "spin 0.9.2", + "spin 0.9.3", "stable_deref_trait", ] @@ -1476,9 +1485,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.6.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9100414882e15fb7feccb4897e5f0ff0ff1ca7d1a86a23208ada4d7a18e6c6c4" +checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" [[package]] name = "httpdate" @@ -1562,9 +1571,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e70ee094dc02fd9c13fdad4940090f22dbd6ac7c9e7094a46cf0232a50bc7c" +checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b" [[package]] name = "itertools" @@ -1613,9 +1622,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.56" +version = "0.3.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +checksum = "671a26f820db17c2a2750743f1dd03bafd15b98c9f30c7c2628c024c05d73397" dependencies = [ "wasm-bindgen", ] @@ -1630,9 +1639,9 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "8.0.1" +version = "8.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "012bb02250fdd38faa5feee63235f7a459974440b9b57593822414c31f92839e" +checksum = "cc9051c17f81bae79440afa041b3a278e1de71bfb96d32454b477fd4703ccb6f" dependencies = [ "base64", "pem", @@ -1665,15 +1674,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.122" +version = "0.2.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec647867e2bf0772e28c8bcde4f0d19a9216916e890543b5a03ed8ef27b8f259" +checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50" [[package]] name = "libgit2-sys" -version = "0.13.2+1.4.2" +version = "0.13.3+1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a42de9a51a5c12e00fc0e4ca6bc2ea43582fc6418488e8f615e905d886f258b" +checksum = "c24d36c3ac9b9996a2418d6bf428cc0bc5d1a814a84303fc60986088c5ed60de" dependencies = [ "cc", "libc", @@ -1689,9 +1698,9 @@ checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" [[package]] name = "libz-sys" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f35facd4a5673cb5a48822be2be1d4236c1c99cb4113cab7061ac720d5bf859" +checksum = "92e7e15d7610cce1d9752e137625f14e61a28cd45929b6e12e47b50fe154ee2e" dependencies = [ "cc", "libc", @@ -1922,7 +1931,7 @@ checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81" dependencies = [ "log", "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -1936,6 +1945,18 @@ dependencies = [ "crc", ] +[[package]] +name = "manifest-dir-macros" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b60d42baa153de5741281aa3d8a0ec1866777902f8162c04ce7b7c1e31415e8f" +dependencies = [ + "once_cell", + "proc-macro2 1.0.37", + "quote 1.0.18", + "syn 1.0.91", +] + [[package]] name = "maplit" version = "1.0.2" @@ -2004,6 +2025,7 @@ dependencies = [ "itertools", "jsonwebtoken", "log", + "manifest-dir-macros", "maplit", "meilisearch-auth", "meilisearch-error", @@ -2218,12 +2240,11 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.4.4" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082" dependencies = [ "adler", - "autocfg", ] [[package]] @@ -2272,7 +2293,7 @@ checksum = "79ef208208a0dea3f72221e26e904cdc6db2e481d9ade89081ddd494f1dbaa6b" dependencies = [ "cfg-if 1.0.0", "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -2384,9 +2405,9 @@ dependencies = [ [[package]] name = "object" -version = "0.27.1" +version = "0.28.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" +checksum = "40bec70ba014595f99f7aa110b84331ffe1ee9aece7fe6f387cc7e3ecda4d456" dependencies = [ "memchr", ] @@ -2417,9 +2438,6 @@ name = "os_str_bytes" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" -dependencies = [ - "memchr", -] [[package]] name = "page_size" @@ -2538,9 +2556,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" [[package]] name = "pin-utils" @@ -2607,7 +2625,7 @@ checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", "version_check", ] @@ -2619,7 +2637,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "version_check", ] @@ -2704,9 +2722,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "632d02bff7f874a36f33ea8bb416cd484b90cc66c1194b1a1110d067a7013f58" +checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1" dependencies = [ "proc-macro2 1.0.37", ] @@ -2752,9 +2770,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.5.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" +checksum = "fd249e82c21598a9a426a4e00dd7adc1d640b22445ec8545feef801d1a74c221" dependencies = [ "autocfg", "crossbeam-deque", @@ -2764,14 +2782,13 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.1" +version = "1.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" +checksum = "9f51245e1e62e1f1629cbfec37b5793bbabcaeb90f30e94d2ba03564687353e4" dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-utils 0.8.8", - "lazy_static", "num_cpus", ] @@ -3075,7 +3092,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -3219,9 +3236,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] name = "spin" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "511254be0c5bcf062b019a6c89c01a664aa359ded62f78aa72c6fc137c0590e5" +checksum = "c530c2b0d0bf8b69304b39fe2001993e267461948b890cd037d8ad4293fa1a0d" dependencies = [ "lock_api", ] @@ -3273,7 +3290,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b683b2b825c8eef438b77c36a06dc262294da3d5a5813fac20da149241dcd44d" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "unicode-xid 0.2.2", ] @@ -3293,16 +3310,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", "unicode-xid 0.2.2", ] [[package]] name = "sysinfo" -version = "0.23.8" +version = "0.23.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad04c584871b8dceb769a20b94e26a357a870c999b7246dcd4cb233d927547e3" +checksum = "4eea2ed6847da2e0c7289f72cb4f285f0bd704694ca067d32be811b2a45ea858" dependencies = [ "cfg-if 1.0.0", "core-foundation-sys", @@ -3375,7 +3392,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -3432,9 +3449,9 @@ checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792" [[package]] name = "tinyvec" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" dependencies = [ "tinyvec_macros", ] @@ -3447,9 +3464,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2af73ac49756f3f7c01172e34a23e5d0216f6c32333757c2c61feb2bbff5a5ee" +checksum = "0f48b6d60512a392e34dbf7fd456249fd2de3c83669ab642e021903f4015185b" dependencies = [ "bytes", "libc", @@ -3472,7 +3489,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] @@ -3514,9 +3531,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" dependencies = [ "serde", ] @@ -3529,9 +3546,9 @@ checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" [[package]] name = "tracing" -version = "0.1.32" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1bdf54a7c28a2bbf701e1d2233f6c77f473486b94bee4f9678da5a148dca7f" +checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09" dependencies = [ "cfg-if 1.0.0", "log", @@ -3542,20 +3559,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.20" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e65ce065b4b5c53e73bb28912318cb8c9e9ad3921f1d669eb0e68b4c8143a2b" +checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", ] [[package]] name = "tracing-core" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90442985ee2f57c9e1b548ee72ae842f4a9a20e3f417cc38dbc5dc684d9bb4ee" +checksum = "f54c8ca710e81886d498c2fd3331b56c93aa248d49de2222ad2742247c60072f" dependencies = [ "lazy_static", ] @@ -3583,9 +3600,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" +checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" [[package]] name = "unicode-normalization" @@ -3763,9 +3780,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +checksum = "27370197c907c55e3f1a9fbe26f44e937fe6451368324e009cba39e139dc08ad" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -3773,24 +3790,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +checksum = "53e04185bfa3a779273da532f5025e33398409573f348985af9a1cbf3774d3f4" dependencies = [ "bumpalo", "lazy_static", "log", "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.29" +version = "0.4.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb6ec270a31b1d3c7e266b999739109abce8b6c87e4b31fcfcd788b65267395" +checksum = "6f741de44b75e14c35df886aff5f1eb73aa114fa5d4d00dcd37b5e01259bf3b2" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -3800,22 +3817,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +checksum = "17cae7ff784d7e83a2fe7611cfe766ecf034111b49deb850a3dc7699c08251f5" dependencies = [ - "quote 1.0.17", + "quote 1.0.18", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b" dependencies = [ "proc-macro2 1.0.37", - "quote 1.0.17", + "quote 1.0.18", "syn 1.0.91", "wasm-bindgen-backend", "wasm-bindgen-shared", @@ -3823,15 +3840,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" +checksum = "d554b7f530dee5964d9a9468d95c1f8b8acae4f282807e7d27d4b03099a46744" [[package]] name = "web-sys" -version = "0.3.56" +version = "0.3.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" +checksum = "7b17e741662c70c8bd24ac5c5b18de314a2c26c32bf8346ee1e6f53de919c283" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml index 03f4f5597..33b961446 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,3 +7,9 @@ members = [ "meilisearch-auth", "permissive-json-pointer", ] + +[profile.dev.package.flate2] +opt-level = 3 + +[profile.dev.package.milli] +opt-level = 3 diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 0a248f702..9f06debaf 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -79,6 +79,7 @@ walkdir = "2.3.2" [dev-dependencies] actix-rt = "2.7.0" assert-json-diff = "2.0.1" +manifest-dir-macros = "0.1.14" maplit = "1.0.2" paste = "1.0.6" serde_url_params = "0.2.1" diff --git a/meilisearch-http/tests/assets/v1_v0.20.0_movies.dump b/meilisearch-http/tests/assets/v1_v0.20.0_movies.dump new file mode 100644 index 0000000000000000000000000000000000000000..9d0f4e0667b29d7097d8440b6a08e4e83c521157 GIT binary patch literal 10909 zcmV;ODq__iiwFP!000001MFJca^uQ%J`|tsv+$e-O=I zb{XqG|IhvZr@vN{+2~y^{_0|{QGO; zbfHa#|JAb$i%dDU@o$ZdNIL{mSnMty*vZNyX>2$pZ5Yd8q84BAr2kq!l_#=N(VFjE zVe$uF(#~(_-zqa%mEyl2e_C3XJ2duChKeGYZv5$QZQlR$pGE$cc_d*#PXFYWo#RjD zf3x$N|D87HfAb&t|5y3^h|H2jiA17I50SqrLN@zoIEglPnhin(B=CIJ5^=Fa)(cG)QJOcdj z{15t6>^=YMHhUf7f4ye6{g3?rYkYp(&`9z&l3W)FW%sVtDnSOCsj^e8X1H`OQO=Dx zldiNnQ<>0CNO7pMOuJZwMkXS~bt`7EaY~G25#yg&I`IVS;)WFbp-_=QsqXiAYBxGxJBc#7QQeq)r<29r>$p0t)NRg?TNc2c;iDM;{3uUfoev^vaYQ($%gTV7df<z1zX_mqQcXhDCc4o%Ck=TTl5IgIa}+IrZ;^#v41qpwk%y z?Pl|*UwOX;uYNl;r4_fvq;A!DC&jOwxQB_!GJ7gXVw2;_NL#oj)8n`h*gAdrWQN!y z8IAsnO`#Iph>ZXJRxq^snD5lFoM7r78^DtY-d%ZL+OD< zoD{+-mBM+kWK%n#rYeKQF_FdE@io?s;qVRr3CEQ=7O1d4^M<(46V(u>CeApfZ&fA6st~DxA7Dy$ zUnPdpjwSMtvuR80ikUSuY-uxr5RS;~Dn)Kt6&6WF?jlF$NLOJ-;!q}u$r@M#JfPvc zhL~xmHbX3r1On2n9E$`T9AkT{hm_4(fd?zx%j}6dn>|Nnte-A;RXLuVeu{2D*IVNGuCV~d3$aOTPW zo=~!14tJ+_*U8PNeKnNTdH7tN^#|3on0{UxN+^uIZf7Z!TCLYux}#e4AnltRzXmK5 zT@vR?OUfN!8{7^fP`Jb@!4IsLEIF*$AZn@;agyfBYWM~5g2(HC&{n7su5fY}astC+ zCVpxxk?VyH#m6*96m5ym@IL0@tUoVt>)oW9k@+pp97#uT%F97yieot8&LkL?fV(gV zVyYAf9q*I^;obyvEAA*L zH*!VLL|3Q~F|PQSuh+lo?r7?FgR2YI--Q}6t5EFxmG^kQcR1QFcFWJZ58IEYPhb1$ zL7nU2?fJv~Y^~?pold9M>n?eIsVt+~Mr6uMm6}!O(-mYbvZdYt;ckTz2)id{mW@!P z8-nvoJ?BV$6(LK=v6-)alnX~FrG?m&7S$hRM5z%X22N#6q8tYU07je9h^#2TmD1`m zB}B(nAT;iyVP?isDNPq7;6mC&d(@Jc4aTI&sn8Ni>XA2!}G&$@_|kIxdfHTFt{xFK4YZy86-^%npP6X#8;$to47Z z8Fbr0dwVGWI?MIlQdo5U>DUQ^GPI;LNjr4!{iI_Y|=cz{s9DIc5uyZ;1Ti` zks&t3P$3oox{k_JRI&@49%mZfmberX|Ff_tRqMk2{&)Y05s+ITe^`D4&IV>Yhe~mX z&ZxM{tfHz-w&9ZGX&HbhGFxH~{*RQ)bwT#0x==4TMuAsRgI7n77q;}MAXEY$JE9qH zio7rGjRhUMHLynbK8%io)9yoes;cLU$AgO}b@}}Dq}#i9$5+Gk7~Su5R~^+|BG4#C zWHyj3fK#>Vt8+z5f&pw6%e2ACMTU3Gj)%TZ)w#n3FxAl}`w86GX#{ShsOU2P<;qT` ziWQ@>_b2k7vW+Wr1al@PXQBk!o?+yYBie@xJsU|BN!*J(rZyVkJIw7ZQyYiAfOg@`53ik*eXqM#MR!fIDLtFG3BWa*1RB z1EObOm@+9ElokM&8Q^9!Rg!oq7)}N5mLU)l0d3<{GTMaO5YXPhDFroVNwFfw@`vW+ z49A?W*$JMDa=)O~z$ivZ#j{H;N*oH5bCFUxF)<1Naye(~st+$Jvd3$_yvLg_SHaau zJ3vLe>1AO)J-RyRe(JZUx@FdSb92xP27~5uL0h852b86DUXTxf=c`V=Q%Nc*LH1u1 znIA%MYzd5NwSqgx%n64mFcDEtlmnP|JWhOekY$A;+Diq5O$=v~BWhLzuMpoQAxh|@ zGhEU|t&|)wPS{ORVu8RK&t*xtf(s&7N28h)@tsI4#W_Wy2&qPuHgn`?gSh0B+Y%4p zzuv7VWVA*|SWOj`gNQdwI@V^q{t!|jkICWp z#MQvUP={UjFiQ7c?4|9VUzff6%hUMraXm@|+d;dvj1v9Vu)1FxOuNzl{CA=jexGBW zxRX}KtRR4VfJ(AR9bXSB@y!ia;4(8IXjgtbF5QrZI)&%KdC04NAjCq ziSIZK0wKwZhSk(q+hmd66oXDy5WU>|KnybY=a^g+uqTG|I{*Or=*Cnb^K!1XUJp~~ zn$-7PDXtD`b?BtXs#LGKIEYNH?K4cfOM(YZA?ls=HOWw2aSm`$Sb9b{PbYSeOz3Ro!L$f21=teoZr1WTY=z22yQ z4tIBlcatoz_U7B;>E!lOw(Z5kx0m!|8Q*?KWp#Ull_qMtwF)aAp;4)ma95@&C*hJU z_rTwj*t5{p)K>k2yG1Qr>cI6~R-~AUkEu&KL z&$C!l&v7Wj31wDm&TZ?MthN;6K?8du6`_eVl){myn&VVSqQMC8u=#4wV8y(%`O#Qs4-lT(q`} zcr#@12j1~)4ME)$tkTVK%yRYc-y+34=gpBhq@~B=ntF4$)KIM{CAa2i1(_ikw7RcdT%@z!%h^jezVGEP zFVl3Ie6K6g?>D!X#CCg0Y@ZRN?cv9c_-tU0)*I+UdN?PkEzL&?u_=iD5D?k2h&dWn znkS}Gw!!20&XC8*RCz}1J+AFyEH9x$Z9`D9-~aAEJi(&|CeYLQLlMkGW58--MWFV^ zoby`$e!4vE-^<7C$>YVN3#!AzquXw4Tz)#Z470uO7_b*`z`q)BNmZXp8!H*T4Pn0M z&-oaO{ArIApYEuoM|^k}6~knexSJX37Do&vBzvqcjJ2>kDxtR?q@AA`n6LqmOs!V4@~h1b)NP%!$YI5!!c{W{cm{anC{yP=F!TGeAx2;oT@ zP1eV!xR+XG;=Vr4X!!FIN##%*ZXr2p5YZ_))2xP%(j3*Kq&`jx_Dpf1p6ORk2ty4z zK1)ZYO?AO%B7A;BnJ_X5>q9=~RyH@=|7Wl4O&Xl|QHU0r6R@ zB9kdK=o%=|{%C1WWlz-601HN)w2?nS_N7yzBm#x`U?kPvyrkS3{yakyH*SdEZbSz5 zrPmJBGY2iosTX&^9`c~SzV+J;aZFiH?>%Twny~}q^d@IJ3Vli?g8zracAV%XpRp)f zs6|-rBQr8^vy9(0@WI8&7xFdcV``1nuPm z&CarRkz?OtyG69G`u@y3X>nmJF-XJJMe=zJb;@h;HS*$%>N-U`?Xj*@F^CQy5cm8Mw($uvf{Uz!BndIw~Y=rc3@*8ZJmm`_^vDr$RqR1&;5^;{)}e(%lv?&(1~{ zxBGE+mtK6n{LVWDgI>S4#Dh9Zr$e7II!~ik#+w%m=tLd4a#3ce)!H|$3)C49LIR1+ z1r3^T%Yn}n=(q@}laR2E%&hKv#%4?hJ;dCBb)GxAGSA8Me4NfYk`R!`3uB!`P|*C& zjtoPU7hW1Agt>(FMYS0-g1sgsmFv1A0Gn_o46#Bu?S&ARG`+)?%K|#-)bPO)(K*c> zbdq1xpvme5OCV%8sp#lSWrz^Jl!6=X?DNoATjtE%EcE`Wn;-b3=6x=$&Z_g%^mezX z9zTBka&|Snj?a!CPr~bOgZ1Y~D6-vUT#a#D^p9c;MoC(Gif?$0<&b)Dh%{nv+CI{6Z|&&+A?GU@lf?e(r-zI=zi zY}31>%cfl_IUmWhz|FfdqZbqwn(i()1qbg1qVNqRHbCn=P+vYxhUzPZ)=ZQyByU+D zn2fSBsy-aDAe^OVc3=U{zboa>puETcY^|K@MP_lZumuOtsP;5!@-w3sgVdZ*;?bHZ z`A!SHTQ`$c!_y=>s#5jKR{Y`|)iPpCG?J~)z=;GaK8r}W{be{UNXGLND0oPiRj0m1 zy`OjZ`g~ZPeBNux>tTB^{nk8cJ`O$xdH=kO*W+NT*&pB7 zZEsW3NRh`|h9Zm3%JJVa-F2%r;(5<{H&KzM4D@?tFPb|Fy5FvW^N=njnL66KAJrCXL)Y6}5ur0cJR6?KB?eyor&)1P_3e(hWIZP~kRdN1wH#o7DvqSd}1oXtP=bvE5%FJayq}WbMRw!D?c7{;qlee9b-!>P{b_qPcslV9 zcAk!F`5oBEafF0qD}O6=u!$%J-aG3!Ry!(>OWpNhG!t)E0m1kc!F8NQE_X(JWf}Eb z>RUpu8r51BiwrQrVp7PT(#hUzO-5q$3@H-~myi%b z^jBehqZZig;ZIRjs#$$}zk7SVxNWo_=Ii69QBWP7%u}4&{Oxj$C_6%)HjIu zttKljF6*uXB*hwB#igTQnt&lKFG7e9N_fqLz7A4A3p08YVL)M93Jg45Lq%mR!yt-H zM5j|R6(JAK5ZEuu^CFz$ely?DCO`Xpu!IJvwA*D4{fLM33FyBY00a>Du!Ky@LaAtl zOg0U*&`8)R3Ui^GfLb0gp~grY+c1pRl+&yPeZE4MYJeNsx6#@!3@q0Zg5QCb*u}@V zR+9-dycfXe+xNd%KZ;5b^(V! z0QV7`Z{)LCBZvE1gxS5F^fh9{BYZQRl`dM?ZX&&{c3nDc+<4~Pqg&PMf?Nv7ED9_~ zj&~e-ZnCMUSV3t)NyO?=2>9vjKqtb=55a-7#0kXxVF<8&?Jj7i{{72IX<&`VZu0jp zh2a7r{F=@ZI$|iP;)E*ZDg7gYJF7GoELZ|6XX?~MBk12Fx=LhG{$}dbGRlxi>P-Oj zIis>e1`K3svpSlu8PX{WkGNkfyb^u(bF}bPR^$n^WeMiYjkq*qpGPsT*OXGl70%3_ z1-o^E;TS(JHEW(MzP+~I&MN1y`pdw!Q_=QOHRxx;Kefg#y5#u2X;)AkQPigvF;et>cq?1U78QA| z$$MKLX1_1S9NwQmqVM0^)5R+BM{fDK^<;F^@C@AlK200}~afQOYYT+BV}X8wU%1TC}DkD%d-eRkv^c2O#+~+w<+| zW&Uin9v!>;GH<0HgOh51G7E3pSG@y(vfQfG+W8osD``$5>%-k?BKLw-$%A+oKv7vK zm8wZ$NN1UTK_}k0*El-{bo>H8>qa@&x|9SZ`XA968VMnoKiY4s0H9xHp~1u$R>4wI zPPiFIyFAz^p*B7Xs&VMmaO5qORG0D!Fi5fwJ=w2}L193`UkUIKdYpkISDe~-#L8qp zKv$e==Nxu{)&!SBHP67~lCHEaj8Z5TiyLqvfERz9X?x8r3NQhvi`Kd%XTXg5gk_jE zx61_tt<;zmmKs$R`1COLD#GF@@`db)*g_G!w5K5pmT#yLHgb^p$Hrvyjuo6BxJaogdclCn{6MG4tAnn#M$YZK2zf#VR*tc1!lu($KP}$@!PG~P(uv}X+Ptver|7RqY z4Z5(QS;hoNAY=Cem^&}Qv4G>FtqY+S!A~uwgfA{5} z#go%FXFo~MuD2THycV)q+he{l+}8KqgA}QG;gO!uS~rR52n0x3LS7luagDhI4oKP@ zjzcon{!|o7oe95DDSJmcfhZ}oUZ1W!jsnpGbS(r=^QN@wK4mfH0Vv8Tk^D>U)OyNk zpfi=cH(h~0_KH(pJ=r_z9A)QL#4)ir$(TWAF(cq9-{28Naqhy&C!_j5o|&D!eIG1`Kp-HL zZRa^wOK*D9e%%f(a}=SnmK^A0l^m2~nGeIy3E|Am!c_9@$}v{5PrJ8z=6%IcWDL z>Ebz_rBUtqW){BI8@J&BnOvpPZddaC^=3V$~iA#OK2Wf=25K6xK>vA$MS< z%N?Z(@*!Wusk3x}3_+m?%_bPeDbF~-PqtP&C3pj{m$a%2gVim9u|CTzCaFCBDO0D$ zEpKy#M{5dO1p2uh5e9UESzqRBy(}N^YomM!9B+Cdza*yjyyN@&a@d*pFWqtDdhGYh zjf;Nr(cCXaYSqiNdSejAzXi_c0WV~m#$FC5N zjhiR``*ZMEnI+v(ZFKSMoIj=ZWRx`a7go)7rP4Ug@0e!Z7X#$IW6AHSzz4h-62cuz z(<~dJVbT?Qwr2tOL7g#$QBsm^R}8gBoWHb1kSlzani)aZN^E~ZUl$>I5~+Y3*AU2b z)BBxRObcl(0Q(p9_!|l8#R3XH%I%hB$%QxsST7+BYo!8xf*5<(6Na6Q-IlLKnaJ3d z>+w@$@<9U{!4E>L6$@s0=3C;swYPRJ%D0i%-4$eT7`jY&k0c3a}8v%{<;MPNXII# zg;7g0jDo(yVz)t}^ov4|9saQv9WKpaBnDh(=v$$6JJ*d8Hxg@5V8DP5h7=i;fKhsd zi1GO8$Dy@bd>_`1{M4g*>wRxm&S#fTr;pKl_c30Tr`3mU-LAdN_lptRwN@qXackyC zl1mi@yD?u5D*DxyScAcf8y|o+Zf2b(-Od(P4r$gPp_LV3aE_D{s+3}~0$uHNMh|6w zVB0&5L>D!m>%qQHUy`7D`q8}VU3R?1;CoUpUscD^$${dyTy0iCRAfukD(_2>R5@<1 z)g97~R83G4MJ6?g3n>$SpQRCu%~O}_tVraoSs)q*!Mq^MVUo?5Lh|9%#A*X<=`4Rq zN|T&t5-QV7S~g2Bx*yp&GF=UP+d&JhsOQ=A+c7Y*32KcR*q`9v)Ic1 z&;QJ%-H2gXKlkYe6`r0uRhW9g*z!aHF1jX*c|im!hc?pON1Bz@+;dT+9MCv7{MWCW~`W3hN~9k#qW$>sVU+Yi*SDJVHej0rUqV_5ka^Lpox(TtIunIHD5iUr^em z2O1Pc>3B>y=)tFKHw&ELee*JIE0{iCA;Kw3FtI_VqzTu7(63@uI!(tsyl0|>z@caf zxHs2?PPC94!u`dj|Hp5Q{z+yT6q_4iM44WIigPA|b9VK(+rG1(%)0J%f9S=>iFHzL zpFP|pz1PP1>-jK@#X=w!`tczS7Qj~B?u{2b}C_olrTph^#5W(a|DrFUH`h4Sv0TGY$R@&#q z7sNaw7z|9T6KASHqS`7Wyd`=|5HQf^%>%*nU zH-&fla8Ua^uVAZ-_Txt|=saGm+~xVhak=(n*E*Mjb-W*0HXygm!=}czmS4}Q!2R|y zlhnq%^=)%GtMKUr-@24;yuxLWD%xgaWxstDx-`P8V{o-T{Xef*=p@0y#b5oqHMa_f z-sV5(0N80Pz6V^~qN^jUoKP)6RTcPb9$~Z@Sps!SNi*2lmmcXByu`si2)DJ_F9kK%j0FTYE0A35a9t1520b;finpEl#+#O>- z2tF{=fbb#LNODCv9lsCit-_0N(7aHqbtmjk-}=k+$#PCQlf}7x$THflSMxp2R*opy zjywi4jo`lK`{)M2VlUW>A}JNbJ(GRa#y&FWr~*OLQFGhpns7Kfk%R+4=JH8S|foF1|JV|7lYdkBwrQk6v zO@+p?!1O#5dj>1hoTs|cf3WF{oiVP?-r|_yHd*R<2r`PNmGFaG5Zo%Uj+eViLT@08 z=_IDda-p}mX}(T8n0mo;I_R?Qr>#G)KX1*4D!Ptl(e=|!Z(iw9=^9_ye)P~k+fPxq z+T1UPsCGUAlN#KXgB8wa4VvJFncduKV%+=hy`7%|n6;2I1#Afs`Z* zL-m7c^Gdf|2pi2Q8O^B*pfy3SO(fKv$o&X>9*$>NU5kGE z&N6BTBai~j`9^iQ3_VOwM^G|1aM7C<%d+;(mh9;cM-oU9 zDFR416CeduIHLYx=4Jk1{$u{6zGTk52~ng<>KawKyQ5>84;Bpqx!mR4b8bRsk-Gcv ze)r7}f4xr=pMDU~-&Q|pF8_V_{Lt#QTD=yoxB5RcTkU?Y`GW}lifdYYN>@lL#1Go4 znZZ9V-S=z1|AT(?ga4f>3Y|^d#;r5i_nm_r-0OB<^nbS(Ecw3;{|7;<^Mh!9-(@`i z`G4;JAJ?qRrf~d4nJBlm`R{8*m7BH3+T7U4;csruolbR8;kzsft4C#_9)3SFP8Zr_ z_`kZ7VUa24*8bdBi?l;9g~it5fz7N;lE#`-(uT1dC2H}LpHvqKjcR7f&a|4>gXD>< zRJ3~QL}gad%5=2+09I;Tp)9^wl_=@dmq_wT{n!}L6(+ytZDANQrk`T}LzS7VN^$+i zZ%ey$d&cg`P*Gki&HU$Ye{ZLK$Il}F%bd*R(?9vXbNovFw>rT5Rs0XS{oX(G|8Md6 z5m_ON5}{9-9wL7grIn9{_tDyBvq6Y}1paX?lPUhL?V7MmRaUsID0;~LtV~D9z0FQ* z*c%{+qS8wKX>+*I95(x%px^HTB(kriDk-BePP^G|tv843&B0Z3xY_A$wt5@Q4h6wy zk2`)f&)?e)HabCT*zSK``}Qxtx{1~=HgQ>(yEVe3`s?Nb48K^9hHEFLh$;IYA??c} z2rmDeTb)WqM7mLuyM5MTG#XL4niLnHPSP@GnZed;Akb*xJ zDiW^Lp^lW0Sw#2JIAaSujs(C2LxuoRC1NEa9f{05Jh~y?XW~}sq(Q%tzX~U?pvWqj z3OO=mA%Ju<0Fl7dc&f|_Tk$k}H~k<*rsg8iW3?gnl~68}xuW?^DgfSyc>!D#40KLJ zCi_DnCPrWrz~L-Y(ZAK@8>xs+)&RV@78gbCHs8EKX_%;u zB8^5H#!lW8Z}Kh49T_*y#*| zcC$$(kNg%q`fbmYR$Lg9x>e`BFaF@f6--Q)*;Yvsn;cih+QK!Np2UT~*6GJvGr}Ip zX!O6>6e_`u7^xCvR=N^yo|MYL_R^}_?C=826XJHA3Y`fDBMAIpv0+ofc2-3?lpa{b z`$9OSQaCS`Y-%UeOl7b*CbC#Ne#W{n9Dd=ia9o*VfeK3)sYI(8l~Tl3p7V1;j$vwe z+EC1`I_P&erXR@ttaWnw=_c+T_m2V>8+G10c~?!(o^{NiHR$&GOOEM1I_98^CbWNX zLdITo&ULJ@8!JN&T|}E0L2coCDvK~~h&|#X4RNfesv!ZG{@+3MY3Vr!YKb z;-@AOxnAf{yiIdN(T4a4?_(a$`lAxJUQVkyncwovk#q#7yc|TP*oPA?O@d(wxC?_I zrb>a(@oq{m6X&-zBcOj&8gbEH-)-|?JL{dZQ(6<|3rochj1+vR$7Mng?oB|q;*NrH zBUc1XbcG5LPU|+j;^of&w9Sy>2!L%?vm%1$}*~LM5a7ZsabVCT|w3&Tj~uE?p7#)uzO!RDnZYO{`>;#Dfc14G zH){-3B0PY3u~ipKI|V?2kc=@)RDx7s=P(s6##N4TfGE91wX6L0W0(UIK#fhxBykO1 z5#y&!Oidf+vU(B`8C+Wj!k$cZ^133T_RDwYt>)f``@>cmoqp;K=X*i^Zt`{BIvq|#=`v(wA*ugcsfk((& zM26T9BZXK1=sGG>QOPcFdXi~)TjEko{LjLoRILm5^H2YY5s+ITe^`D4&IV@Og-UUV z&Z)S|tfHz-w&9ZGX&HbhG8ArmDN+o89AEb#nLleYbbz_D@I8V|2gMU3FA?d$zrxCc3qN2S$~Lah5zLvGoQV=>dxnuqj%Xh)^lT(iBylhDn8ZEdJIw7<^*O@(8oSO% z)v(hF&hI}SM%Tk_lWnQ-WN`cL>gsvB4m|rU!Eo4IE@(@%c$c!&J{06#;Q6XkFIAFCN|2p< zMdpVPoLB;*TCL#D33I{`3QR=Q6XgKrolFv+9b{Rdi1tzeVH3mI>3K^{t5>``1la93+uRp|-3ldAKWgNtY)6!8`VZSV5$*BqgJTtrgOV&(T3d&g`%7~8 zHE}hxFw|bxU60f4dwXKLM`va4>f|84xp^KXf`QEh&PPHKVzW%osS-tc2ChQg|6w9)wZ<(wddxT4tn=*w>d4imre6VXsid6)wj$sySUe>OgJR8nSBK8A-Wb47 zuJdiE!qx3TMm{d>OYcK-??fhunpvd>-b0k7MEwheNC8y#E)d#3f9P1RQHRoZInT(X+9Gf>; z5i_nNRI@@73yK&Hksa>^77mViQC7EKiHD3yL)EC|78S5izL7&SjafN86c8+dYW4F* z{Vm+x?Ojf@z}oY#HwV*;6WO-M*I)0`w`F|sg39XlhAU0fc54+@-a?~NC*iV8Q%=Gq zTW*8DDY0##tEsK}i@QZFT=sOC3?+hxX1>;f*N}o% zNyQBcRh!DOsh)&Dy;=FnsuElWrTlu{Klyw$H@nA&r`@e03T3>n?q0~HR%_VrE|Jpq zQkUgSTMqnsLp##y5%^EG$zIthK_4G}+fK+%XE4B%(~<+aeu+wfAZc*dLn&|sPcB-6 zB3=&}{DF5oTSHJc1*>!qIA*ze_+OD?p7Z9&9MaMgaYntl3u>s=l#*L>w1Ui#j9V0c z#Nt#(8F2$>>Uv4b0y0)f?w>}D!;=iAPO>twuP9lsQV|1_P5Vb5hsWu>cYk=_jjww7 z{rxPRB`5#P%UU+BW{#6dw)j(Ru=XNDt>EwWaw;AvOik9|9s<7BNSo zO7p~2$~Jf$-x=~4nJUkSy~nj(jO8VCsBH*J_VZ8w;Rzl!FoB-VABtcm8Ut3FC<3)7 z=6tC2ucyni{*}BLOmB{FTu|-py}RhPCgq3SlQ7$U!GOJh1OCx~ORD-n+E~fxX$bR0 zf6m8PtV#s2C=z#O2&jw>V-bA=zVnVXTGSyApcqTH1#*gS_OIEl79x zXorVvR8r4!Waz|Crg>vAsAue+wH3_MxkXh=lruPox`8|gQq1!&e5a$f9|h&2M-4wYJ<-Z;sOi;%G{95k)eh4 zynL5@Stl?`L+DVAxNQZQPrA9#k05-B$cq7b2_N}zlO$yJ$4wGKEIA%aT~nGPNHDIU zm6!e0ibRkoT$=0%x)Oq>3Vlxx^J?cXZFbQ39>Jp5+xBR%m*W>_U0w!*6`;Unz(U8{An#R zurIxKpzb(mQBJ+M1NM*y{q>DMt%-fgdV23cd(w;@Ag9+k+fnFKDiQpDNNmT6Uh)}> zqJ>(7Q#r;9}=^I&0t0@Avjc9}aFh)2(>6^{n?h%}&r> zKG5teYZp23Ew&3p`>O8`&8-&4hTf|y3|AM)=P}eNuf^Bci!Z9{6z#Ofx>Cg;I($Ir z`*sfk6o|^eO@4yCN)82%5SPQwSakc zK0dzKiL=Y}_~Xe7?-&ew{oWD}>MWfOeaPrMjanH`UNE2&b>zx%nW0u|-?T1JXFv!E zB-R%+Xv!@IK2xCMBBV}2!a6eZy6+jA2_5tha|hOW?&!)qC)4wBI_pS6KproQbrL~A z^E*2-3{_ruX_OG=651Elddvv+n3Pnm>y7|y!kI9_3gNVSLR`}H4qGk@=%fR~2TMff zGqLi|z+Zn(40Lt|~3Gjp@h`>Srg=98M&xwJa0jtFg{%+`oAro_!rYe~yGA+g--RPOvIPFLjzbO6Ow|icheS`P9BipT3Ea z8$>p#7;b8Lr8<`dcW?8Ypze}R%O_(^F9y&%I2j$|)jh|Fo`Bv{)0@s4>emI{o^35! zHH$S-Ergf=+R`g0-iZ+q9a zoVc=Ne}&mEJre;AF6MULu<;$+Y{0nPabAQ}fQ*o2UBEbE{$^h0JI;?yuFNVSXqY|j z?lTiH(SA8+x3NH_s?3#ZtxS{wl=P<>DGAd8Z{6D36a^+hs#4l)#l|`0G9V^gDbgA^ zK(Jyh!eja?;A4R@p5KBJ9vzt7hq@p2eolOSzf5n&XBDeIskT>d<*V}7_C-Bx_R{&` zI9Mq++l_j@g0I)}>T5us5~Tr!ii13u<|(bHVAIopuvfUJ#BK+LbLK7;s~sQ*?KzMI zEw0GkrlgS~kGBj(7M+#jzh%f5YCNQDv2;bL&zH*t*Vx6vlw7~$SxgYnsZ`N2aFi}S zEp|TeKBP-A@Fa9A?}{@=Si=faFX0nQ?*K#y?OsVqnl9;)`QqoHTb)^&^XDz46u3_| ze-TKG)tjNCpiR2|Mp#iNm>tFn88iKv2kh7WjQ(8p?wj7XcIWEieRb7pKMpPy=U<-> z>pP7KRUf&dTW|eO7@96B)_V)l?wn4@o*-awVxFC0!dQ+HbXDL6zWs?85N>zWB-xgI zGZ?67-MZFlL#fA?lWMI3mk^&0CyuogYT%zlop`lS-PBK))5f><#o2H*tj+zw^6_kP zIXV&q>($CmKHhD>zRV=C2froX38P`S*MVR4b?=TXXBrmn(c1Wm^0;yGa6 zQydIp-v#K{Zal4oPuX;5EP@hj9GHmzV&nag1uwE2S8nI-x*I*chONh?Gxlfg`QZ7? zKdL;P*77^Bk>dyn$yWYW=wK63419Dpajf=S9@kp+VKfu(Rsq4}6~T3qMlLHOzOsz_ zE%hy-SB+~ei$w+)VKFJ>&*)@twHPI*aLd6)TYpgW4sDHKu@iAPc*M&sBaMQ zJ0dGCF6*uXB*hwB#igTQM8J@i7a_z4CA?umUk53mg*iQnFrct41qPn3p`tR$FoAMubr0sVIafB*&`wvcICC>5=+ z$)=$;8VNf^VJ>tNP|G7G)EJ3l8;0?Qa+;Mu=PPun2Dqbr8*Ti;z;Zny_#JqOU3`pN zHJ!r4dj*WXtLIq=B{&GOc+40kQuG3g%t3Bs@~T9b-06t|s&xxknozFV;Zj)Exl%Q| z1HXfBBGFHmZabuO=YmIphLQT%c5~A(plcs`+}MnNbt-;R2cMpQ^KXZpK`@&1)HG_3 z=IiIP;o`O#92Trv)pBJ=uWID@?~&y#aF5sfLe&V>m=dQXf=MB89PR3&`wnTn;XG#| z%GSInS8(gV6khn(3DtAh9&i?6CtANyxKsU$!EEw&aZ`z3MjylR&2{&s+rGc8+zgN4 zK7#X&d^T(3a9@isySJCVMvQobZ>F=-g@o-UqHVS7qO`H_%(+KP)tiD`3dk%9EJu!a z9C~iDt*F>RX~9Xv?ok-{>FmHK!p;xDfwaU4%>7{quzl+;X{Y}EkF(OinoQi}?|&4A zON8)SI!jQ*P*TMSRm?N`M+A3vX)f8Y1XRw{sfkALzejYH$fEqs)Tw2hA=A{G0_bx_ zjc9we%@-{5f}O( zGrl*^#sXVhHKjdHz<66qyNEdSwAc-^_fbnLGupDks> zgi))DCqTJpr5yUpz_wHI_E9zHXU0FZ!7jSy_`a1ZsE#P=Q;QfW{yw~wtz?Uez1HNt zoey)kFUB0+A3&n-U)!_gI`Kzt`K9%2k1m_H@vMFAgpI?HzE!Wcnyp;P*2swoeJipM zdcUWC5yY5BngfvQ=*5AF3e+g&l@)KB=`5QBOMY6krXwoYJCs#-Z~i+V`6Ju&?fKi{ z#cGWmyZdd?N1SR?(@fsQnA(%hfZ|ne|UuL1f#29wLQc_M> zjH6v1Y@JZsJ`1XG=+)@RTPdk7c%5>Ci?+Y zaju(HMc0h1fVY3=#rcPGwKtzVcIO0 z3kX`P2|Fw`sw(j5VeC~zi=)^VvL|8-k?gtqtXIZ#{R}_sl>6?a|8ahIIvu-<*PvVJ zzg-8b$!d`t++?TCX0x0p6C1f=@0oD|ty0h3cTacdh|zO$p=G1RN;gl6D(0yf2raap z(GEK&%=AT^$;ItT3*L2LUu0xUm~3JBG3_TiNFbqsH8ve05e_=C6kM}h!VFWwkjh3k zx^OYea1NvU+ThQKe+krzWQ&MaxbRkVEyOoZ5%j!O*cCm>SUt6N#Z`7rp9(b4g0|DI zm%u*m7v07sJ(o@Y!FOi_M<1K3Zas0^;jxmkQE%i+n`T}G0ZUH{)eWFs70#^5UP0Mq z{3>}xi;I@)<7B~74snaEy}8XV7LX!5K-`+korJC`bW0)#&XiEw`jki=kNMUonKq7@* z3k8!4>m^<^^(a^sP4l#L5(Ls`k_5->2wB9FL-i@qWVPyco%e@L#d`U;sz3I}7voVp zJ%4i!lLYO0t5ME-A)B=W%{PYI`nrFRBDE-t=?QJLNK8i{K*|#G$}}CPq*3&GR8DXqFsS&VrAigHRU|B^ekpK=!H zOeOD4SKyDm<`f+-(5YZK8CHixOdp+V>lO2OiLO23l+O5)iWAzfGmDE|E8mZA+2ZWi z^^3IMYrKFx**oc+Wan1QF|l-#F@wxzM!-|P!4rz&tis7BqxwIdo1MLTA8dy}ARv@& z=Q&nOZ+auY?gp0yicncg4s^0g4$85t55wF)H#N%}w0Yh0yL#%6Xx zLD~`E_gbk1Z~Z`SiZ1=_Hkg!bzq{+Hbc_T`x^_~-Ua81M;Zy3Ch)Sw7yEM)?>x-fAH~Wv2JM)5rR1*qQp@x|7E3#P63I zSN-IpdDx89s+Vi0JGno{6r4%?B}fXKy8E0T{Y)vMyC2gh_O0U9nth8VPF;?i|KmTw z170?GCV4_G zX4w!8ldiaDdmexv)EQG4B_-K*%}{&9`AfG5a)r-UGh+x_OWU8&*F~6~#3~@iH3Tx< zYQIy9X(8eeUIZ{rjQi{n6bhYz2J(Re8NAsq4-SL)#uSvapQ=LR-M~dTewOIvGku6cHd?-Ou<+!_6 z4@f&wHNi;~nbag*NSXQjB8|}4Jaf6uip1WU1)@n1EDFLLCfSTBBp*&q>^8ub&hnR} zG|71;p)$>+ZL{=3PPRpp7e$fJtofX#a7|~Yx6`q<$;!kqxf`u}Fb%*%$CN$_l09=a zaig8l@=98tyCJgWTHbkkIZOJ`xkDi~hbZCMO;c$bk^uqRTqsk`WEKu!MN!@i#XFC{ zVN8MB$DhFv_3M)7Y-2kYmCoR7bM>cvcRGC;RF|{xsqy%D%&gU@HS-6qm7`i7EN*50 z$A4tjZp5%m&wc*Rg{S9E6=q&Au{?2ri?7LIUJ!xGp^dcgk!EEz4_p)}2Q*H;RgZ0{ zvh2`Tn9Gy^NNJG^7tW*>9ct9F;K=nnDN~1Mr=6~>q}i16*(~MwOgp2u&BZpMd_5s8 zo86QxMo~A@1)-<*7Do9qpl_b2Q&6F6XB{efFj5leCge%rZ~Jad7czfTzN!Ca^wG}H z8$sxwa%$d$c4zc{AKhIK22Z!0tDE}M&C{s)_5SKGZGGC@jmWKPo*1>3E*0*9g{ z;NC(LI?+;Y2=^Bo{g2-o`bpL@C~aAf~CUoVGYG^rgkqSe~&BqxJ(rxo_cDUFuH*pudMB~ z4chBgqWvmd!1^+-mLsYdpE(Y?JRJ(wk61UzVip4~2jIzn$-5wPWG#=4e9Tc4dOE_V zj1O0~^Y9w4y}Pw@JJ<}9LG$C{e%!fxK4y4rV_a%}^>WO_z@osHH7No&CJoh1}4@?XR1M>x>ZJaOK3|lFwp1C44w39-$Ioz&n0UXDARC$2{1FT~p2y;-#%uN z+Me2@{?6EiPbc`^rF7#Ju7gz3Hk&B>?TgT*5ndgGtM%#sdBs8}36?JY>R+veRXFxG z|3(MEPGjkNz{M@PI>O2s)e=-yfzRdy~E1KB243@ByAXU|W%aC_IFToA~0Q`Fut0+w&Avz7}DyZ7U(qZQ?dO zdQY*lQJlx99?rPFSeQ(#%H z+k-;kpT#hK53P%03H+HE3p39~{V*iiBRfz4@YoCs;I*LZLC}IQAZ9C}Nu>_K-3j)C z-~%%a2p@8TBv+Kv@#~=8I=l)8%`3Iubi)4Zt-ng2E$6H=U0&M9T1MOTYF^`P<%p8q z$YU_m2;J9wAKf5W9t3+)B&7m9WWGjmFNlF!h!+N(VXU0u#`zUD%&W{hD=u6f$VD*8 zmodN=h*=4QB6w*X*i*by{9FuibB(Z_#P^7`ARg|ML1_b+s5Do+)}-Kdn^q>9-DDX@!lqZQybB*VPw-h{v zrK!+Z7MPxAV$adawBV_3^dH=G#+@;)&fd~7!!lXwc?dF!rn$ohDqZ6%+mD|57l$e8 zR-4tMt!+R5xCRO$Wk9jG8|=4WcTfm&sRG*?B09!~+IpA^#`kb%KFB@Jv03SY5rpvn zE#dyiI$ijwELloRnC*!ao8V3gya=?89NkvBdB7B0TQ_s5M0tg-LC4`f!n6t(^Dca~ zwz+K$|MtkjQBnNG0dzacA0o;se!-as=L3ajM#Vf}5^Lr0FZ-@9TkrvQc9m`l7mnld z*wEBx;^DCO?T27*=XI3aKBUWUo2Twatu{{Xz01m7>$V>sevVDR-aI6zU=V&z38W-h z7^?4@HgB}#LfB|Q$!I}U0IdnMHnC85BKITkc{rY7ktd+#=4u!F!Uf9x%k-R3?OOEP zceYVG7=aXE&M$OBdTe|O;aW#%rlk|GK>-v|PMq3cnPVP&$Vm)#Sp7l7-B{==-^h*$ zn%_1hZubO_!L(2jz1h$fL3%I*(Eifx(6tFBr(R&!5_N&QFyZ&;1#GwoaYExXO|7QdJKl!s% ziAA5@`X_wtSN8K)-~UzH`TjS2|3j^L`uqET{#Sk|FyR|O_{?Nqj$^(Qgm!p}xRw70 znGJUA8+_^~twy_2t>JrHs=^ZV!b&+nh#KmTh#{{?v${Ivjh003CGuWkSU literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/assets/v1_v0.20.0_rubygems_with_settings.dump b/meilisearch-http/tests/assets/v1_v0.20.0_rubygems_with_settings.dump new file mode 100644 index 0000000000000000000000000000000000000000..83436d4e72edd40d3ee56eac9ce6fbe49deb36bb GIT binary patch literal 7055 zcmV;A8*t=0d6h6F{EK zd(SyH@u+#$rjxWyhhH^S`RK!Z7(Pa+@ISYj?fxG>lCeizac(%{ipfWeORMCggngj? z?0uF^o7PG4D6g|u75+KZ?%el3(9fGeUQV*tSu?o$?VCZF&a=VQpk9tv(`?=hE(Xhd zvcF+mXK9;Fu3KytOz=zQFPVGE{FN|Q+72Z*+VS6nT^Ey6E!~weLn9sYCbV4C`8=&x zpJppG#R=d3?qV<*{W`0gyehGY4FwxAG@LBwi|yuL^TFFc_xSzLKMyne(+_x#kmFYC zlljkL{tGFzWZK%0{~XN|{Q>*5`TRf6|IPe2S=;92wD|x+aE|{qGY9;yjb%Ub|M&5E zGpN(@4io?RvWOxuXjh90RC{05lfd_rtQkMPg)TgaL|whk=Xtxr&uQD%`Doc@_&Ix> zj@vS8HaoOcn-({d>b@+hbkaN-1V+yrMA*2!-t2f)E{oz~&}3;no~5HA+k6#!64eM_ zFXurW)`wUW=_-iIZo_cOI$yLw7A}H-TpaFuF*xWEw!B&k369&e)#9%s;H%$;Usq*S zt{`_;Z{F?zZJt*3({!ALJPD`W9N-76LYw^W)VfRm;=4V^`}1GnBewZ3yZkpl`oHhx z^9He>wo8P1NbWIGIGb#|+cS7Sb`cl>xw>(OU#yiPq}RibR~sI=sXHQhGx#zdBP7bp zj#9b|yLdDB82O3=1y&g#!CvgJ%JmMb{CvwQn|%(``0elBZkgp~%Sl&*&#HD75_MxQ zZ|O%dfV~QO~=4{9p3QL&cjk=5|9+yItBI z{B3}+tkc-LE7)qrymLx{#LKUj+48;C-dd(Q!@drc_q6rQ$=?j#{>9k{<3|zpaW!7f zv$AcjCzE_VjOB7Z%4)QBj2%GG&M@O8a}*4kODSj=yz)*P5(d#1gOE%*;S8r>@S0mr z!Qh76Yr#ATgY1pLFe{8?hJZmyD}<+EP{tXhNf=aL3{p66AWoE6Qr2;8Xc!#l-Y60V z-5Z1UQXB6nv81^cMpH0&#FA2mmeZy;2IUY;LQ-N$YYRs~!=R0D&XO?L-WU)`o?A!4 z;F0|HECzeUK-rubD@YhzUkpkzg`OFTK454yA(kQrg(M3cM!?{E&qFvDlZrAA1|a-- zjf)sCEYi?0@M9bGZ9zEcxDuX1OvY;|9R&m*3U8dJ2?X!0)XHKO7#EaiLQ^hD8Nma) zqJW@*=q=UKTV=Gco&v%O$InXyK!~AG+B2HW@!nbuZjx{toC3meVHGKy#L(E_uxKE9 zi?wn}7|WTafKZwV5;;i-ophc!6TQ`1Nu}YxB}J{BV^W@1x(LF+t2>%|<-O$^{WpTc z4G}GND2st8RpUuS2=nsHY!suV8tnk4Zd-NWe$MwLm@%OG}|G1D>f_# zq(TvFheO9Cx<&)xt!IKb6TKDNI486OlYbCmPe!(eNQPDsfZDV#X0xYSzE6kGIGY{#XR-jgU6R?!NIcj6Kv zDlDmn2o zW{9DoA?PXB@Zn+Y+9<*$M<75fk=8JUMUesmAcE*E*A6OS9Y3$u9<_vUq*_7*Wmn!3 zG4xk!m{Mh=BY`lQlS)(JQMWrxF%3k2wPs)@?d5s%=TRRpO{yg%oUwME(-sL-f3;?o z*|26q0U?p96lRhfjK?@NuY0RCOn9j2&aa#KcvYH_CPE-ey%$8C>o3+CrJ90%5kP1L zvr9<{DTf}XBBq4yDb^gtm-Q%IJPCwW@!I-%5IS&xClajgDb`R4m?r2Pr-YMXoEy?i zn4#fZd7AgrJ;j<2d0b4Eq`Wqoankh_8Sa<^>(LyC?yuKcgf#+^)Frz3rVNrCDE)uD5X&jVClL_5ZumM7H<6-+L2G)j^PKt9F`g!My*X_KUxkO(KIG%=id3N|QKfpUZ*-*itLlSaI&QH+3MK z^`v`xwHl%lj~DhRAbRUH3JG{kXU?o&Mi5#EuP8=bo1yY3+G#7vrng>8>p&yipD`UB z2yLXHext3n)aJh~WO~x?8NgxC~J9vynPPV^ZTgD7ZVTwwia`FU~aN!0< zfmkBmBW`=^wUqHHm!^c170wZEV07KRf0}#RQituY*CIT^sy!vI;penCFB2}@aVFu6 ziMP$!{(23w>EU9}Ea`X8LWIaWFB2}_tcA=r#Ak4N3$|fOTItT+HrIiWf_X|f@v!>9 zIU^=^Z^72#u_Y!HMX;UG%$#4=ci|Qxr<5VCVcOn;ZDcG?wd|hzrZ^~AUgbzw;Gh_% zKCNkc>$X=4{)uGAJ=`fRsO*q&;XJKEwY_EAdS|(DBm?EcJ&zR0tOyhfuneM)LD}PsW>p0ttE{<>|$ zlVs-HBOo0J&5flbw(#)Cj!{AM*6napAIcqqOaO$_f>RunfSK7_~-qy>;7K7+K`SdGiry6NLgLvBPCM=Oxhwy554Vwe=hZijvsE z89{iq%Y`dmLMlsCY}Z?{kr3hj6hX-eDL}>NuRXeO#fv$nhzsMcw_+O(?neP|<~ExS z1l+r#fKcHQxtGMtcD}b_YvzQr=N@C}FnB|`ehLg?Xv0NsT8-*^3$|8&^0p7)*B{kh-J^kF}n_kT?u#{Ij0 z1K9iD|0Rs%eEt4M#WmB97zf4iAK(A=eSF>=y@~2-U{F#GE*=sRfpM4Bm(6`z&o7(F z-C+9~tE<812%qHo#k1tLs*{gn%jApDa&WQt8mz0qL!w=p#2B1GkL(E$|cs(ca9ero9;#d3V$UpHal1dZcx<;5zg%4Aewm(QxZYMR%1Z1bx+pQiJCm^^DYy-ds4 zyK3}RHf}FEcw=mQIB`3xtL1c7E!$*OE$bv*EQ)*_1!S0P0tF?w)q6wEFy!pSkPBmX zhP=%XBDXc>b5gAKMm>y2nBmaV5Y5%?ZSpED@<}{>hkKkB#V8%$g<(s=p^~h8mDg3d z4&?oz{w|If5yKBh4Eumj#h6rrZ2^}>dv_U>;~>PsresmoExOf3PdcxbP4XE`K%ExJ z^R%2Uk)p|qPJaV{msM5V<%q~tJHwn8`KV4Yl+7`LoId>Xm!CfD-z@Vs3*YY^s7opa zhj;*(ot|OlcQ|mdY-TrmZ}1B<0q%m2Nz!Cl=CB@!_PeZH2Rc&!RrYx{uIkA<=8(Q; z*LAOiI_Nq-*>#TkMQ{5)%j)WGc{v$fHmh>H2cb^yvSc#43SV4x?XHRn;d9jGbTVm@ z*gP^V@_LcgF@Hm#Ef?zyZrZBOka_KW20Yynv3Xv#JM*BF5J!P0PX(UVW`~nQGPiB9 zejVZl$j{Sem2AI@84_7`C)tl9>g9F^$VLKF_$m+DGD@3#e36X680cgOxqEp(NN2M4~N<-mV@_!u{JS~f9h5T4 zUC$EZ(jAP2J{c;G^PQ1s7Il_Ekg95))nohwcW^EI5EAI6aBKrbtel&33>7l#C(Mqv{(Xbeh6T{(mLw1_h(`r@h zlXsKP7f9fAiAgHkZJO=_Io0aHTWWb|8QvGJLhX2l+x2ib1oZx8#^sSb(gE=S0J!F2mO$C|U$-vyLD5e2^=Gc^-C17I5X^#kk3Ixi%1R ziQBw^T=2s{VMBFlPPEvWlCP@KDBlm0>jWEd&MDG)W2#TEm$dF^%-$0YTywiqYedy= zmWyC-E~`b>t#I5F)pUB`bUCyP!+}&w%T;_ia2P>o);nC}rqfDUaqI$5 zb=q4mc8oM+mtf1oUI~mnCUv{xfz$gIJ8xz_JOp}J zGo2PS0B1YHUsMIUkWb5-#d1{S&Fo;zpFez`ym4V5JoyZg;a3scI@E{rZuZad%(Wv z#QAM|V$Gs!-&IztHKcBbUH(1flhZoQ z!@DJRs;t-huKq=rAKki64A@Swjym@uf1MT6I-O+6mu+6;ZN67-NHsWIOED+cQg&In zxxG6W(X@zv1!9~fpO)2qk-@hlaZtLhJ&eseiq|-R4@37j%+@6EK!}^$aKI;Vjv#G! zo9PKj63=+Mf5vda9jC~JI@X<&^_0=OcD|bC<54x=NBL0&vr{fn*}%~wHOF^J-WA+2 zgvzW9P8dZWI$T!o2$oIXoU-Py;5jWX;Uxn*5nUDOO^s4{o~`y(`4fE8IfBo#^1uG& zFW)iRYv+bpDXK82#Uzw?+kQFizlBzt-6(Bl6?kM^r;DQ{wx)~>oZN=Lsa3Dfc-Xb+ z!x~(74AAE*n6EVYv-SRY8LEK_ff7-6BR|PrFD9>tuyY9rtqIE<(M7HIS(hbMy(1?+ zJmz-<^JKj#%nr;VYcbb1=v9jcRqX3tSdK`(`KQ18(;xr%%|HC}|NPs3{P(~A;iv=O{OjNT z>F@vXn?L;LmW;VqhcSD4&?MVkYiVG+F7tYPgHmCyFW81nA#I{r`#pa7eYe5@QxMj# zL#n_0_*Y-Pj728Y;q#A>dMWC*;yAzh9xVbD#@ z21g|8?~^v4BdYTf?1fNWf>w&qomlLKMS&7hpUjpZ_)GYrNlb!J7{5ba(@Ee(WmRRV^z>)pe}#tudp+3p;V*Ob`(R*lEYx?69>HWVrClP%(-o!+(2?Xq3g*^}YR zknD4~w6L%gS9)i0p5uZOTohz3uCdQo(b}&q{~fdePU{|J05(88Qlwy5gX;!=9#&?M z4dWcua7fp!ZH7D^=G}Xkuv@=cN5TKLcWt|k+d%YJh&&X#K%B^uFHwpw9oz8lWVkq$XQ{Un-*O^>G0|@SA)&72T|B2CC zMsFEMgl%x~umi>0?Onns7(U1g-`Wd^S1XyNvduo2hwO?fRi6mE^v~xvQ|lto(u~DK zk!)q!A+^E-P-T#UpL*7g72*jiMDarLe90FOYdQtF8}S5q09r8J0#pTpj?|V;ghy}U zx@?saQMd1H*by{6YlujXY4SPBVLn1K%CBUC9LlFE*_%{2@sD3+5QbI?m#0aR3G|1u zY=dF=dz8b+n0T&4fdkVNn-0Cz-77>55EWJ{fjkQRQWI(ufQkZ9HHK{~_nZV?mwg>p zAm8(Yz$P7G3YsVB(g2DXR)eMsKAPwhFlg&nMcY&VO6V7khQN||X^L!Cj_&*^1%Hx= z%H#)TP}D$Jvu=s405VBK#pcA^3*Fj`Vr7~5!-Jj0&jvk-KeNEcQotvZ1q{8=Pwe&I z7jm^qHR-1D|JAGHte+1ky~=92md|e@^o{hg`W>!!9&@HXYl}R>ppve`w)TZys6<-~+6CBZ?7Ti0VXP_5}g? zM{0nVFANo{tB~w!BUbFj^y>PW<9iwu5%{yNJ%z7i&r>^+rrVKT>8>(y{%F;)TJyU!EVTdQE*RRDotJ9{3t^#kLzSQ+>@tZ4dyN=b#uO<~10h zxiY0#DgLg(!W1aW0g}BL1^qYdy!8^z3EUaLpe9W$tn?`KC%xj%E4TsHZ&i2HE?-J$ zfnKwzds?tTzh&3<+YU;wP#=xO_G2L^j5pfpAY<|F5?oi7oB?$E>4$aoBl~lF>YoOG zy|;3$`LqorUQJUcHp!VR+lLYfVees|YtYy849?7`b}zF&J!_Hy?a2~eaWwb6Aza|^ zSDhSN%9&4W8Yvgxfa%vs+lE{)!PRb5$q}kg*||sfdEX5$A@;5LP2JBJZqxy!V9oAa z_lcUBHsq?;E7p&vgLV`Kb^?^h^}3Du?jmrKEHD=)kZByT=CZO@rc*i&&5dB`1@O>N zK|f0KG~*?PrEe!CdS5nLvE$1d6(!o37!n zUK>4)Wlh*aaJmj-n3v)Cr+F=apJ^@6DLlbIC0{{i4!MIoH#j3{sGj8xj_J+005tRt=0el literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/assets/v2_v0.21.1_movies.dump b/meilisearch-http/tests/assets/v2_v0.21.1_movies.dump new file mode 100644 index 0000000000000000000000000000000000000000..ee7bf060002289a81667300d9cd9e80add4fa6c6 GIT binary patch literal 11202 zcmb7KQ+FKwDMv2WRf)?zxyXv(`LA z8VL{ae}KO5UUAryO#c0AO5Na|bwyB|zJ=vR{Gtsjw=t}whMi`ck_^fY9*M5FeEgKO zQ2*7N#I!T-Gr!a1vgMLY1!)uvXKIwUbZr0^x?s2VB{QcBhKHi4?uj*lSPZf$YrJCjZq@=a;;P zTgcbDS;L9s%!yOm!s;--x^~A+Gd;dI#ZNmlgoo2zd$r8+b;tT!t+*A7TIQI3G_2s*I zlL?lO9tE8ibgsYD{|pm6xl(bNmmT~mKY5bJze810I|)n}(4!R_lVLO@b~ikxs{GBc zqYddzqV1?oHA@gVU*OwJUXvBx>LiNhWQT^ch|Z#cXFo{>#S)AB`Wyxq=dChw$_QFbqrw9jl0 z$uQW#=4DgkQj2CbA)JXk{HUxu!YGzoH=+2?x&H$}O@`}|*ZwU{N8f7#F{<>qK#?&sw7$fzB-zh}-=M$iZq z2mysA)*4pjPhSj_+IX|5z-ympDHI3ms}{zmnJ$NKZBsKrHUu(S8suNcVM(cTm)sd2 zE_rHicfK=TRxB?r`L)Zfs`Qd-X|Cy?sT!^TBL!3W*UmEXSCjLB{`BdL<`SUHP|B1g zzzSyOnnk@}wg~eC)bcl2GXXt=CQUZ%kvuu%ydRc4R<6j?{~%aR6i|H}nTk`+o~z49 zJcYBWqfu1yR}vrXPNydy#K@cYLot&CYp!i>>3-9R|25=GI%-Zc78zdO}NJ2U8^?2JX;=B(Cp2BbqJ_r)7hMuj^SS`s8)HQGdoLZLD zI8%D*2V;A{?fOZKmuWb%#H(iL*+#fJ>{=VW(7POOrk`@v^U9v|W=_h9rBU)IZjB(; zF865O_xG#whQ~69t$l|Pgh3?{Gm@AM85YK>wYz-zWh1QAS4C3}7!MO$1PGT8Cd(P! zZJBT2r{TvR{}#i8#$N-BQ0CzNh<++!ys8cSFcN##OP|8kgmF8TW^x5fhz^x$^$R_rCMrKCM~6aN3b} zkqD)6?6c^tXbPeKwUJEB38vbmifs7xJ!PjQx#W`aR3l1YSibp^2ifJEqsAEvJD)Sp z#1WbAxV#Gh%5k(xEWrYdh8Z$iwAO1^6cL1%6#$T^8!4ix@forNEN+b?IjnI*8`N$uuvPkN==$w6gGo8?bOOZIZ?VDgYRGH4x6yj9h%rJXEH7a=X8 z{)shFOQ@U6_6oWK=iT`54U>F}?I@}{s?vR-Woe(e7}(qJqBuf$zvGJZn47^h5Qz&_##3lRHuxLPUbH6M_rx>@r#+cBBJg;hV8)t}M!^ zjx7MP0z@&%lu^s5i#xCt&_J7zOZJFJ)l=MJHK`UV&+D{&q~n(Q`F`x1BxEee1UA8e z5qOp9(QuXfoD7M56iMmqk+lPl7v}gtk~m4W5ig;VYr>ijtWNf7L*`q~aQN^BakLn~ z_6w;a`7I^?r%n~|Mi+Pq`sh`rNJz+1VkoiC-jm|%tih#h#^zocYMSxw3D4*cv6)Cc zJp}YEPL3M62t7v=Na*J?`CWOyaM45Jk5tyc@2x*#{Ins&WW(YqHOcZay5)HB>&R}X zb0pSr?&Fc(aWR2A6?6NxzAxOZ&?h+lLLMK8Y#G*rWlU6&$3&C5LA>o;=6%dkeeCRj znAL{Iu&)?dau(Q+9o%uGw*Bn_<(;O!1B*w@e7ra$fg~bp|9Jm$EQ|$06(b zwO>|(ji&6r9bSCRUdifJ`tsV~w*7sN`W124j0#op25E@&O!Q%KgUBf5&e1ZYaCLI1 z{v3t?LW!n6Y8nwn8`40Au&mG4DyiD~Ow;)8uzYNzOrbdsywht11&Px z&9|$5V8!V2!l1@tFb_`&xm^GqBFr>g|&H3%Meug^sxdn4| z=KT5fpYztA@%Gl$;R(yG#mgtF-=S&&18~!`&340ZCR6HZd`UiJ|H$5KG4_ju?J)ZA z(#(S=Gg+yL;sfm=1V@O`+-WfNt>9>_q`LKZ(5Tti2%}hw!yuQU`B`sS zz>3yr#{otpIaZd6)@c7s@s+P7KL4JL;zxw;ivzcec=H0jeHJTfbEz%B!3Yay2iJ|k zIF87T<_2CE-cHe;gY(A7H)k`h2e6NI?{FJv5*O_YKy6aWrP%;g`60(8+jt~RG-q~H z#DxB6wRH9u-NGo8A21LCi=A(!{MzRu!@(rV-e09WHD}fQiBAI~pwdq84cl~IY;wD7 zo!=e)$;@aOZ=xt~^KRy7b>jW({p9ygqEhbG4p7_p(I$O%Y&#oojhMcXp_OLrv?a%? z?sBY3y9ebJN!S4GG6j9mv}BAn4G9OVj69G%IFp7ikrFw~R6NaEmC~qjipQJY@sG<2 zSzM!8jSJCzN*Z_I1`T)wKHBYsubMHKK*D4WkdP7>gOTGa_0HBQ_3?4$@yYvrVPNFp zUBKJxj4yBU;^yO!QoLx!W^pXu%vb74Mrx2sY5%?R2M6R^6wcm{-dbxd#y&?k@}UfL zHONlcqgjPCQJ7@kHC$?~y(D7hx$s7wnG;BpedLlGrnDwug9DRBqS~gQ-5g45tnZJy z9nOHOvxFVEGbaLiIa!Z5Cek*SYl6;{byzSYMJ2_ys4HFmy7i$Cg7Ny>oe$&D-p$L6 z%xh98&Bczf#$54VjYMr+JNnqc51rA6XTNsLF+P|D0}|>q zM0A8niyL}b-A0CAUdho0B2YUJ=jP0I+i|SSh;pykJSI_iqZ$<^JCMpHn7nSYsT4)M z{?iQI>bBIUPX<~-lm?B8DwPOLpVsR(Wk_QtgBQd^p?0GbsbU#jrc;;Qt&ViAh* zQh25!-O?Jl2P8Y(78e6p>J$~P6D!FGI#JuFjAb#LCoC}CAIOao&qS?IumRR+K%?l! zdsi^Y_7?8gxg#v@j`>XqwizctEh85Yl;CIp5XTf$m2a?-d7|&bduZnyhzQV-Zem4SICTHc0^&j;Zy6uUj-{BItsrR)w+i6jCs= zvpPz4s)*0fVpwJ+W3#k@!|E+j3|^#Q{8=vcI0AI57|)wsJ-(pEj%8}e$EL1#mhabb z-IQl($GPx=2v20Ca+Fc41iVQ4U-r!cXPe<&9!|UuJFqi zb=@oo8WE*afOF}c@<>+S;xjMnNT&t~jVH1Ed%sY)eKwU;?Ym>C$C zA^ZXDSaN2|Z;Hu<)R)tJ!PY_5c-paTMqKhwUQ}>q-z5=tue1B0tq5Ch0$ng0b zK|VvH6+qKG0KF2|+yWzflFP0HW{Rr8g;Fu_AKZBKq-#9LgKY?38NLCx7m4O-2bKIJ zG`|r0sw=$8{q4@_@w8EYK+FllQ@$^; zPCL&F-3~y50Ujb6##9^UW}$4s`kuc4k|GXP9QrL75bw~ri#BKBQKfEm*gbxwc=1RX z{e0b{L|{zDiw!m-n}7{AI2Usyex$&bvPmv)HXevb{ll@`4&&WMcRq$5Kkm*Gyg1ZehwmcwJn-VNr{sMSRT^YtSgCQ?6gt#$e&InWt@wX;}s` zaAj>{Vjk)?`STW*5l#6q{*nfz9tU;JI?V(;LPqfV$2G0jq>`!qYon~By;|1saEkyu zKY~g?@!x#b;Tl#zIHqoLEINCHkvn|!afgXyf{a0CXa2R&mowwY{+-^H%$v)X1D{p< z#8wPQ{K-sy;-nk=oy(z1)2fm1Ai%Vas4XlURI`y#Uv>MbBj~wU5aCaT?TOp-fM+r) zcg?xjgZs{xPi)1JHA?lb4^|Z1!~msGTWsCjLzAmMnA?q!0HDS0<=Ugr{Y#DWtl17~ za!@@^0r)2xx;T)L{$?#soQOQy8(Z_{Ci<3ws(y8kchA28LV)9xS;%=qd*H}tUnD?Q zP^8gq85g}>$O35R>Oc4ed>prij)oU=?WeV{+VoWG4I}VqavGT8_aXiD z+!{Ll_Xte9GISt5WgQKu%sZaIm}?}2^y&6$G9(Lm$D~dZNsL^5s)Hc@yLC1DOkmWy zQN)w_%KPx&^w=5>p(&QhfHjeHAz~6A-ma{T<|7*UqQ?mtUq|BbOzSdh+>DC_1*c=m9VTmPn_YGT3AWvOpX4f{m1+;oy^XBCmNr z3N)C|2Ja|_cj9}?*_}5|KWcZ+&Je+|Hd$^Xlal0+eQP1k(qe-LLg z(49D4;Y05fQymh* z{ucA^dpO}5<8dLRTe0?mrI5(V0Wa^%o#&2HX@IU&6%obBN zpk(kr6-uRD2vMulS*C_kz@Hx+wt|}_gU9WdvD_)|r57%Fi!5^;F!oo6vqc79O2Nu$ zzS_aP>5jK!YJt1ah`sOl6@ByOPw|5P%zksFi5K6f4|&~r^f(LkqTU*qeBaE(-%&owFD&96Sr$Hv?k zh9(w$jC^mm$=vn$$zpzzEvCi@{#N|-#>w?j9eV%m{t5*GlEAAuc)UJXX*z;yg^@j4 zoR1%9%6zMopM1rWGbl-9gh$2u!V#Z4X-2GoYzJ~6Jj!uq5YVT2Ccek;bc_DUOOhA* zgoo$dkVRu4{SQ9fEt5^wc^Zjvm%OEhw#pJoc{WQ!vE0g^Bfj&@Ps~v6$RjTf#VfQk zeBLf4nK-`fE>1$^I|b4Y7aKNK10zx+?x35a`d!?`o}oAWzPg=lwT)=zeV)s$mb;;j zo%`}KF5GgDXf5voy#>01uB#xS0Rs6&Bvse2!TiglAhw#Mz0^u(Emy$3_z^Enu5}{r z0lZCRL^o1)3~g4R+rK*a1X`bgUT`~aJ2*-YMgD9grLAeF?_{JWCGhh=^TX*G<&uJD zi_WtSgz)qLU-xEpc@~2nJIS3`N%Sx1qM)^L!S+m=5I1Y(DOJhY=0qDZZ9tkwt0Qsv zY4$f@fH(;X3|JS8_~tW9d>?Li6gub+*u|RoP=u-WK7^$#@m*di+l-hQmiaGMzeA^9 z|D2^#=|@wdq63a&zM>6++c=x zHcixphcIOkB}QSg>V?dbFrwH;u8HW7@J~B`GyE566MQgQ!YnmC`{IB`eCtf=AhW{y zd;ZiPdf(G@HizNwAG6k`*xVkwnP(bD$r*&Tr{66ByI~PCTFa6fTcHvwN|D7jRZQ=j zqhr=tk#tE<{Bg%;%Dx!mbJvq)-Y0^qt`Zw{CbL5Jxs#@nTUJe;8+vpmXGswV+~hc{ z9b1LIhW$LT`%rO&X9FSOQ1d*-qoL~5Gzuq3g_s{`K^VbjrTDeOC7UsJ zKmapw5XR5=y|h-t6VFqXb+>p87l)|EnSAAGu}&{ic8O&KmA`?|*6FU%cauQKZO+VD zq060Dv2MmX-3F^xt!b^|Z(=xDsrqS{$G_NVr@~bpRdOIqp2TtmuZF!Zv6cRC3;{z6 zPmls(jb{8l?^3psk*?yVGVl>7-WW7eNG>axW&JldD5Tt;?FWX~%uc~H=h%;NRtjXg zKiK@?lLIjEk>XG8{zAr|7{%C=I5-o0_hwEk%iQ`H_X3G=5lFWhxDJ;mm9^d;4Eg?r zl%Osjg!2mrk7-`=&V6q69PS@H9vfiddgUt84qPRdRj`Bk9Fw|8Tmg+JG3|V1xgeVs z5-loTmt~hxL}V{gj;1-Cgngv8r%Z2^wlMuM>wl6G!GTR9G6KM#Z#Ls zMv$8og+2y&QxA%ymS!N2^Qj8nXB+q{lrRJ;Dn0%iXk#=xkgCDTMAsjkGR~@G z=I8?m$GXshPZg-w+s&_$A1qZumOqn%Dmao=deGzV?cKE~cvYA&;o|49)2q5|eZF;B z@9cEv@oJVCd}EnBq!j7eIGZ2sh}$>}*~uVpdZ@3ItjW|u?ax`q3YUV#(n$vCVT%pP zO;S~hu=njKB$TAd1OIX0-369L(j*8bqoudIHN%T%b6dya)v5<|UHc|gmjU~n^Vs2;ih^1jgeGDs!O)QhEx7O> z3M-0_Sol`Tn=m*fMz3vJ_qwO$XO)~P*L{1vmSan&?Wy9PIJ(3_3`ZmD}7;|duB*ZH`)U~@<1NCUO;8JTsbKT z8i%yHq~S!tfAwrx*}6UT%rC6dLlthM?H|KfImdXwdRm$U}G#Q zl7}z&ldg<2hb1x~(B!9uAnrNdjL{gjz3b1wiOgsj*RC9@2Z4_yZjOxTggrFe_i<^5 z+gIXIhy6;HRMma}&=AH`|Wt#0rV#(|I|{K4#uopX35#dKoYa9Rs{( zxYCb}u=M04BJtQatDA;cqy$_hN&AXePIE2`;an%$_E;uoj7?*&W+xB3OV_HF_$zAr zoyHv5gg31yUT~Fy{cy!Zn<;cTw5@X$n;EiJT(EBg2aQjo66QBuCKcXdK2q?}_u;d( zCn7?sTS(sKPRxT>z$WzAm|sKaNbQIs9d!?K){K3oKghKre2TmH=*ZV zQgk7%19xO<@o$A^~VAB(_q!tQ;wP7 z6dd5(v}vBC{k!epJO;JQCj(N?mQjn!KV5~_Q~EMhPX~Bx(W$U+JDm5bMWp~;tU=MK zKK@i|Bj_pZH=0F2hV+_&fkj5_>JAHR;^?EjcZJX5k}Tk*yLImsVv`H4Gcu<9rW0DM zo#VvM8H~ZQm?u&S0X>M9@H>tmY8Jm0KeilC?%bL)Yf=y&%$g;XBv8u%Kyvtdh1@z6w?k%a& zswi5q{Rw2JW939Bslq5ZU-ri#4vV*P_7u@ZLPGACG6HESzbFo8Vk=ZLs^>6#$X=~{ zO&d4FJb!+EHm%R8W(X@2wT=3c%TJQ!J;lXh^#~i~@0@JmpJb4UnKs>fZsjnG*4>KS zPS)h8brD7KWNEdb2WR1Bj|If0lPrPi^k&^Wwch+>So>;4n;wiU?~=UUCg_;gDN8{T zkxJVAs<4KHaF!5;(K}~zLG~&bT0?OLRqT3Fdz-be`_>+xAlFCmhpv zwWErglr?eI@Js%sr7KP==k$%*-UsQDWQLx+bKj4x$VHpwYM|iLO$M_--Bl9j&4T1} zE40oNC&DCBuL3WnS%sAYJUKy>=-z|0zS}*!;J+b#gf7b17IlP9x3z z;RzolAW`SEOKaVHU*Y}~HU64eLR4e{OC!QT2JtM8CpS}=B3qvm{p|sPp$QM<>mji- zBfk5ik?4SRR1*EqAOWQvhs(xP(L?nE;0L%OR*kD;2L3|~oL&?!*zDO6H6?Fjdo}fK zsOaKt?yc=|?%?LI-np#JGTt-V!AJ#$q~w)5TB>#PpOpoJXCf69EiB*3Nj$~tsAG`D zC-xY~V^b993NsDcx&o~Ux0z4TlY}(8iANM)QmG%eMioMT#!MIu54RDzf1z_wymVlp zqegq`rH0HTQ)3*sNhNp0XYl%)yzFfyjQa{-2HgF_3Za>s69LRKjnw;^ko!3lEB;}^ z#$G(M4o`8qfrMt?>amyk;IqEaQg?OVy16njEw5FwT=sx^H8mG`G&yrO!x|wm@kB6_ z{^8Q{xx{dAUODov6>EpTrS?S>s|>6z>Le~S zn7xAH(aw*hC4}*lEV2X7jvqq4Goip>CLX)J>A;u{Xy_vGtgzQtbD<+h1G&axho7x5zzjGjt+|D=xYJ3$Bq88bFmG?WbXiBdWtC3TD zo>(mgh;s&ZfmZFBJgV&PrVzmRHdj?zgR5hU)>Js?MgPY{Chiv68@gflziMX>_6Ffx zQx)+eCPi&gXy}fs+?>Qo6xdvtrv363;WQw_imS?@Bx7ZNhD5S$z{<%}^y<(ZVxz#N zP2)mpvxG$^=bLzZH6~}^(NuDlw~4OrGrIm+qG06 zi=YE{r*kg5lo40L<5(=8F2UY8Ov`HA=b?JJNGTMm`bT6c;#tSgI|H2`n9Ac4k{j2_ zHTADx1Ql(m=&y_>RxKONfXn345d)S+i|uH1j9j|It^0O@$HwZ253(7y3H~ak$Vb!s zv}*bN5u-cEP1kN5dq_n$Olp!{8Tx@YRX3Zd6s#A-UV|}3(aomtq3h$cR&eyWK;x-l zEa53mVXxXPqC35B((#|hsS+-0AIM5Av+f#(@nt`J{J3z4)UH^ zYAdZI8YtqA;rf7@5VF)_0x>*<+Zr zzUMHzrA6rAoRqLy7;B-f;F6qhO`r%UhB6{xGypM>{=76Iiie+Aka>8vV)+l2=r3?h z)M-r%-gF-7@2NQvKY;SCkM}KI1iY8qi_FV5N|(kioAek4oN!^Z=@(fCoEXC(y7-ar znSq`jW&yS{xFLBp2*wtoYv-G%N2>TnqoT~(E}|tr5!-2SdlOZz4%3?#WkLi(SWBh> zn5oZctIXnQh3#d8AAzBhZtR0UH?`%*?a~y-llGQL{pjIc=GcC@E|%W!KePK-wd& z*ovJxD`p~9C*K;;jhOdP=27WihEozp&}nAk1MNMO<_C8v+XxUAHiN6}7iPh>(SWL#j~?y1mpHvwiHEz*6}j59Sl#YTcFJO7igzWdxXgUc zq%Gt>S?LK#hIw?nd2~QX0OtIv6Ve0Okn&(W>mwTRS z^vqOs+e~wdmx)tqz(v>qq!h%1O8uNYD72zTqNB%g?Zlo~3IA?8Rf!88HCGCA?@cG` zEvTNMnN+S6!aeb7KhIb?V>-GN?+EI!*t_JE*5Z!7uiyHt{Q`xu^C`5oB0Xd`=6u1$ zUOj(=3Ks_WzV48E4{m*Zglbi+T$X=)O609s*!X_!Jn?+i8}sc;S$x)u9g`Foo*>UA zp`oxcJDqfi{khkZXN4#1B=TJD+0I}&A-v>AqG?L2lFz1)5-UwSL{GQ~D%6+CaL?9J z)RFd0sS0YTQN|PEQ=OsEA8L}lRS0(XX0@eY)56HGjVSq~F-}b+<Gi+qWzb74nTN1iT5f?nKk9cS2ebP3e~oL!S4P^v#h1XDY0o+54bvvotDLjo zDA6DY^E=n(56Dmn{_nd^4U1Ka9OH3SZG{ou7Uy#0Eb!`K_ zT|q6+; zZE?gtO8`6Fsk3+KLots8dOLYSJNE!QB<{)AEe!b3iNH9#j8U#Gt?19SKTTHAdeBuN z<#av~ek{OMC~)#*v|c7I=yQ@O-Q<24>GRU9b=vpih;+z2;kg0aGj!qGwqX9>)sM?0 eYsQ||u@^OhjYI!0^4_uc1=+~yk_GV(0^)y1K^w0C literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/assets/v2_v0.21.1_movies_with_settings.dump b/meilisearch-http/tests/assets/v2_v0.21.1_movies_with_settings.dump new file mode 100644 index 0000000000000000000000000000000000000000..03483a2646364a1aa71b267a474cf5877049592c GIT binary patch literal 11289 zcmV+!EauZ6iwFP!000001MPj;a^pzWUSInuU|qQWKY~9f`wVSn{tmg0eQu&o7sfaD>Coh&$ zMgMr~zHfd1FM4@~|NQTAwONt1k<%zwhmA_PA?l*sZ8z<5vsD|`+cl?JX}8}mD(~Ze z>!(Vs(rC4}{Qo1a@8G|q>@1LBn(RI$D)hho3^2IWXuQGyX1iGj|7-PHvr%ca!T)-* zS^vqZ{Qlej|DON9?0C-3$69G8^-^E%eB9~9p7eK0J8CN9sV8Uj(Li`fs;qO7WU&{@ z(DD*XSVvwMdWmbq%G~b#_z8$G7wr-UQl2zuNrXLb(KqLlb_N4E-DxyzD zUQDbBy|J5Gv~_&>q=vLd5Dow5s#N-PBds*IvM@_BV)HnY3DI7}(vY3FK;*IMb}z8J z&`O917JU)ZhE+zii>2e)LYoWgG_?{b17e;lnAC|Sr!pjp(}QAqclw&%o0g+*_&fb23L z-HN51`g5=gjE+cMEj6-^gzu}cMDIWx@Q8UOYvv`gY}4ya@J>#gjV)ir4y~=JZbO^H zlzup;d(%)R32xLYSwR*%xQm{c_-an#13?eLgGZ3qp|l|~5QQw9dA^SYNNV`? zgq4UXamX4Ah}FolNo@WL?K>kgBNDxB2xA~)n<$wi^dhA3{)FW{R@vC)M`iJlsPvSD zV2o1T{n65u3DNRntd$EfpCetLH5sgOPzxG}vL2~Mu;?-8Z=^HFcwzwtla?aQH< zU)V46^LBSWO{e!8MTs0^t5GkAQnk92r9OPsgNWC3{D#c3O}n=)q$i*|WZQ`CL)bp*=Ad<01d?9(EpBsVjVoQ$b6ISk2Oo;IGB>vS{kglhmZ5;*? zNztx#Pu!>Hu-0E>bZdVypMm@_KiLrp%qhPPNv3r|Oz110mWAO?6$vps1&NMklTS}# z{f^a;tiO{f>$X-tsPSU^<@(Jv-ie-PXOe#~l*B`Cl=(2>ngqC&?nt2 zAC{S^7_Kx}fPhO8`<}L1f}Tdi#KYleNy3QzU>#o2I#A?hXq91{?^3@&xlGC}*@I6d z{eYfJTTNUMPsl(KAQ{n9oQyyP?VLzOAG-61+yP1H6M4J2-o8uZpa&#Uj5FU)O8iD_ zKZUOhY?z7pnnZ-eS|bQYBJlk8IT3l19e=7;j_zL1tATUEe35xpLFCi^$qsC@YZSax4--kT7h(nz#qMSiI`0`A@{oxnP3%lgtrBmX!!_U(=(^kQfKJhpK z%vf{s+PRk&aO_qQHSG7DL|Zr2;DjA|4B zG&v)XjldQer}-+cE+k%w2oQ&^2uiG6Abi5;c&Po9(H(Yxf#;N&PlSyVj|A6|aJr0t z*|URGu4JpM`APZ@ZDSAZ5IKDnF;F7d9@5GgOSBdj{MHfVB4KdSivo1ckum}jg8+c#tgWj&yqtp` zOMQ6{H(#%t*Qd26dBjhx(2l0Z*9VQec5Uia)kbcXyOn0QTPZrU0xUj&md3e5J|KI( z%GAE}0|5ouf0ZCV1%l%k#>hk~bmy2c;gB4R1L#TafXF)@`&v6dSxW-;!htY}iP>O; zK}EzB;9Ufwus&W$A9<;XloB9LqML+b5r9=1%OY?kEJ(UK8W~W;camaB)+q!ObOf%W>p*cBQU(U((Ah*+J_YB5xxy?M5`DBZktn+J)D39! zgt5|ug%T9Y;@F5+^7q2w_sCT@CPE!Gl7~^S_ZnZtjf^|vCMrg!K*MVgr)HouWCX#tE=qM2S zN8*psP3rGoSLdJ7P+go{&(70_*S0fk-5%dR4jdB3MWIQ*0*8zc zLm8`;EIeQ;bs`5(8gb;5I}myW`0C9>{T}R|9Q7w*Gmbw!Kb}o)uS6}re0Y8h4m0=m zjVP

( + pub fn new( store: TaskStore, - performer: Arc

, + performers: Vec>, mut config: SchedulerConfig, - ) -> Result>> - where - P: TaskPerformer, - { + ) -> Result>> { let (notifier, rcv) = watch::channel(()); let debounce_time = config.debounce_duration_sec; @@ -247,11 +249,11 @@ impl Scheduler { } let this = Self { - jobs: VecDeque::new(), + snapshots: VecDeque::new(), tasks: TaskQueue::default(), store, - processing: Vec::new(), + processing: Processing::Nothing, next_fetched_task_id: 0, config, notifier, @@ -264,7 +266,7 @@ impl Scheduler { let update_loop = UpdateLoop::new( this.clone(), - performer, + performers, debounce_time.filter(|&v| v > 0).map(Duration::from_secs), rcv, ); @@ -283,9 +285,13 @@ impl Scheduler { self.tasks.insert(task); } + pub fn register_snapshot(&mut self, job: SnapshotJob) { + self.snapshots.push_back(job); + } + /// Clears the processing list, this method should be called when the processing of a batch is finished. pub fn finish(&mut self) { - self.processing.clear(); + self.processing = Processing::Nothing; } pub fn notify(&self) { @@ -293,13 +299,27 @@ impl Scheduler { } fn notify_if_not_empty(&self) { - if !self.jobs.is_empty() || !self.tasks.is_empty() { + if !self.snapshots.is_empty() || !self.tasks.is_empty() { self.notify(); } } - pub async fn update_tasks(&self, tasks: Vec) -> Result> { - self.store.update_tasks(tasks).await + pub async fn update_tasks(&self, content: BatchContent) -> Result { + match content { + BatchContent::DocumentAddtitionBatch(tasks) => { + let tasks = self.store.update_tasks(tasks).await?; + Ok(BatchContent::DocumentAddtitionBatch(tasks)) + } + BatchContent::IndexUpdate(t) => { + let mut tasks = self.store.update_tasks(vec![t]).await?; + Ok(BatchContent::IndexUpdate(tasks.remove(0))) + } + BatchContent::Dump(t) => { + let mut tasks = self.store.update_tasks(vec![t]).await?; + Ok(BatchContent::Dump(tasks.remove(0))) + } + other => Ok(other), + } } pub async fn get_task(&self, id: TaskId, filter: Option) -> Result { @@ -318,16 +338,16 @@ impl Scheduler { pub async fn get_processing_tasks(&self) -> Result> { let mut tasks = Vec::new(); - for id in self.processing.iter() { - let task = self.store.get_task(*id, None).await?; + for id in self.processing.ids() { + let task = self.store.get_task(id, None).await?; tasks.push(task); } Ok(tasks) } - pub async fn schedule_job(&mut self, job: Job) { - self.jobs.push_back(job); + pub async fn schedule_snapshot(&mut self, job: SnapshotJob) { + self.snapshots.push_back(job); self.notify(); } @@ -353,106 +373,163 @@ impl Scheduler { } /// Prepare the next batch, and set `processing` to the ids in that batch. - pub async fn prepare(&mut self) -> Result { + pub async fn prepare(&mut self) -> Result { // If there is a job to process, do it first. - if let Some(job) = self.jobs.pop_front() { + if let Some(job) = self.snapshots.pop_front() { // There is more work to do, notify the update loop self.notify_if_not_empty(); - return Ok(Pending::Job(job)); + let batch = Batch::new(None, BatchContent::Snapshot(job)); + return Ok(batch); } + // Try to fill the queue with pending tasks. self.fetch_pending_tasks().await?; - make_batch(&mut self.tasks, &mut self.processing, &self.config); + self.processing = make_batch(&mut self.tasks, &self.config); log::debug!("prepared batch with {} tasks", self.processing.len()); - if !self.processing.is_empty() { - let ids = std::mem::take(&mut self.processing); + if !self.processing.is_nothing() { + let (processing, mut content) = self + .store + .get_processing_tasks(std::mem::take(&mut self.processing)) + .await?; - let (ids, mut tasks) = self.store.get_pending_tasks(ids).await?; - - // The batch id is the id of the first update it contains - let id = match tasks.first() { + // The batch id is the id of the first update it contains. At this point we must have a + // valid batch that contains at least 1 task. + let id = match content.first() { Some(Task { id, .. }) => *id, _ => panic!("invalid batch"), }; - tasks.iter_mut().for_each(|t| { - t.events.push(TaskEvent::Batched { - batch_id: id, - timestamp: OffsetDateTime::now_utc(), - }) + content.push_event(TaskEvent::Batched { + batch_id: id, + timestamp: OffsetDateTime::now_utc(), }); - self.processing = ids; + self.processing = processing; - let batch = Batch { - id, - created_at: OffsetDateTime::now_utc(), - tasks, - }; + let batch = Batch::new(Some(id), content); // There is more work to do, notify the update loop self.notify_if_not_empty(); - Ok(Pending::Batch(batch)) + Ok(batch) } else { - Ok(Pending::Nothing) + Ok(Batch::empty()) } } } -#[derive(Debug)] -pub enum Pending { - Batch(Batch), - Job(Job), +#[derive(Debug, Default)] +pub enum Processing { + DocumentAdditions(Vec), + IndexUpdate(TaskId), + Dump(TaskId), + /// Variant used when there is nothing to process. + #[default] Nothing, } -fn make_batch(tasks: &mut TaskQueue, processing: &mut Vec, config: &SchedulerConfig) { - processing.clear(); +enum ProcessingIter<'a> { + Many(slice::Iter<'a, TaskId>), + Single(Option), +} - let mut doc_count = 0; - tasks.head_mut(|list| match list.peek().copied() { - Some(PendingTask { - kind: TaskType::Other, - id, - }) => { - processing.push(id); - list.pop(); +impl<'a> Iterator for ProcessingIter<'a> { + type Item = TaskId; + + fn next(&mut self) -> Option { + match self { + ProcessingIter::Many(iter) => iter.next().copied(), + ProcessingIter::Single(val) => val.take(), } - Some(PendingTask { kind, .. }) => loop { - match list.peek() { - Some(pending) if pending.kind == kind => { - // We always need to process at least one task for the scheduler to make progress. - if processing.len() >= config.max_batch_size.unwrap_or(usize::MAX).max(1) { - break; - } - let pending = list.pop().unwrap(); - processing.push(pending.id); + } +} - // We add the number of documents to the count if we are scheduling document additions and - // stop adding if we already have enough. - // - // We check that bound only after adding the current task to the batch, so that a batch contains at least one task. - match pending.kind { - TaskType::DocumentUpdate { number } - | TaskType::DocumentAddition { number } => { - doc_count += number; +impl Processing { + fn is_nothing(&self) -> bool { + matches!(self, Processing::Nothing) + } - if doc_count >= config.max_documents_per_batch.unwrap_or(usize::MAX) { + pub fn ids(&self) -> impl Iterator + '_ { + match self { + Processing::DocumentAdditions(v) => ProcessingIter::Many(v.iter()), + Processing::IndexUpdate(id) | Processing::Dump(id) => ProcessingIter::Single(Some(*id)), + Processing::Nothing => ProcessingIter::Single(None), + } + } + + pub fn len(&self) -> usize { + match self { + Processing::DocumentAdditions(v) => v.len(), + Processing::IndexUpdate(_) | Processing::Dump(_) => 1, + Processing::Nothing => 0, + } + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +fn make_batch(tasks: &mut TaskQueue, config: &SchedulerConfig) -> Processing { + let mut doc_count = 0; + tasks + .head_mut(|list| match list.peek().copied() { + Some(PendingTask { + kind: TaskType::IndexUpdate, + id, + }) => { + list.pop(); + Processing::IndexUpdate(id) + } + Some(PendingTask { + kind: TaskType::Dump, + id, + }) => { + list.pop(); + Processing::Dump(id) + } + Some(PendingTask { kind, .. }) => { + let mut task_list = Vec::new(); + loop { + match list.peek() { + Some(pending) if pending.kind == kind => { + // We always need to process at least one task for the scheduler to make progress. + if task_list.len() >= config.max_batch_size.unwrap_or(usize::MAX).max(1) + { break; } + let pending = list.pop().unwrap(); + task_list.push(pending.id); + + // We add the number of documents to the count if we are scheduling document additions and + // stop adding if we already have enough. + // + // We check that bound only after adding the current task to the batch, so that a batch contains at least one task. + match pending.kind { + TaskType::DocumentUpdate { number } + | TaskType::DocumentAddition { number } => { + doc_count += number; + + if doc_count + >= config.max_documents_per_batch.unwrap_or(usize::MAX) + { + break; + } + } + _ => (), + } } - _ => (), + _ => break, } } - _ => break, + Processing::DocumentAdditions(task_list) } - }, - None => (), - }); + None => Processing::Nothing, + }) + .unwrap_or(Processing::Nothing) } #[cfg(test)] diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index c20d2151b..cb5ba671a 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -4,14 +4,12 @@ use meilisearch_error::ResponseError; use milli::update::{DocumentAdditionResult, IndexDocumentsMethod}; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; -use tokio::sync::oneshot; use uuid::Uuid; use super::batch::BatchId; use crate::{ index::{Settings, Unchecked}, - index_resolver::{error::IndexResolverError, IndexUid}, - snapshot::SnapshotJob, + index_resolver::IndexUid, }; pub type TaskId = u64; @@ -110,33 +108,6 @@ impl Task { } } -/// A job is like a volatile priority `Task`. -/// It should be processed as fast as possible and is not stored on disk. -/// This means, when Meilisearch is closed all your unprocessed jobs will disappear. -#[derive(Debug, derivative::Derivative)] -#[derivative(PartialEq)] -pub enum Job { - Dump { - #[derivative(PartialEq = "ignore")] - ret: oneshot::Sender, IndexResolverError>>, - path: PathBuf, - }, - Snapshot(#[derivative(PartialEq = "ignore")] SnapshotJob), - Empty, -} - -impl Default for Job { - fn default() -> Self { - Self::Empty - } -} - -impl Job { - pub fn take(&mut self) -> Self { - std::mem::take(self) - } -} - #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] #[cfg_attr(test, derive(proptest_derive::Arbitrary))] pub enum DocumentDeletion { diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index bde0f6360..f580c8e26 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -9,7 +9,9 @@ use log::debug; use milli::heed::{Env, RwTxn}; use time::OffsetDateTime; +use super::batch::BatchContent; use super::error::TaskError; +use super::scheduler::Processing; use super::task::{Task, TaskContent, TaskId}; use super::Result; use crate::index_resolver::IndexUid; @@ -122,19 +124,44 @@ impl TaskStore { } } - pub async fn get_pending_tasks(&self, ids: Vec) -> Result<(Vec, Vec)> { + /// This methods takes a `Processing` which contains the next task ids to process, and returns + /// the coresponding tasks along with the ownership to the passed processing. + /// + /// We need get_processing_tasks to take ownership over `Processing` because we need it to be + /// valid for 'static. + pub async fn get_processing_tasks( + &self, + processing: Processing, + ) -> Result<(Processing, BatchContent)> { let store = self.store.clone(); let tasks = tokio::task::spawn_blocking(move || -> Result<_> { - let mut tasks = Vec::new(); let txn = store.rtxn()?; - for id in ids.iter() { - let task = store - .get(&txn, *id)? - .ok_or(TaskError::UnexistingTask(*id))?; - tasks.push(task); - } - Ok((ids, tasks)) + let content = match processing { + Processing::DocumentAdditions(ref ids) => { + let mut tasks = Vec::new(); + + for id in ids.iter() { + let task = store + .get(&txn, *id)? + .ok_or(TaskError::UnexistingTask(*id))?; + tasks.push(task); + } + BatchContent::DocumentAddtitionBatch(tasks) + } + Processing::IndexUpdate(id) => { + let task = store.get(&txn, id)?.ok_or(TaskError::UnexistingTask(id))?; + BatchContent::IndexUpdate(task) + } + Processing::Dump(id) => { + let task = store.get(&txn, id)?.ok_or(TaskError::UnexistingTask(id))?; + debug_assert!(matches!(task.content, TaskContent::Dump { .. })); + BatchContent::Dump(task) + } + Processing::Nothing => unreachable!(), + }; + + Ok((processing, content)) }) .await??; @@ -231,7 +258,7 @@ impl TaskStore { #[cfg(test)] pub mod test { - use crate::tasks::task_store::store::test::tmp_env; + use crate::tasks::{scheduler::Processing, task_store::store::test::tmp_env}; use super::*; @@ -280,12 +307,12 @@ pub mod test { } } - pub async fn get_pending_tasks( + pub async fn get_processing_tasks( &self, - tasks: Vec, - ) -> Result<(Vec, Vec)> { + tasks: Processing, + ) -> Result<(Processing, BatchContent)> { match self { - Self::Real(s) => s.get_pending_tasks(tasks).await, + Self::Real(s) => s.get_processing_tasks(tasks).await, Self::Mock(m) => unsafe { m.get("get_pending_task").call(tasks) }, } } diff --git a/meilisearch-lib/src/tasks/update_loop.rs b/meilisearch-lib/src/tasks/update_loop.rs index b09811721..01e88755a 100644 --- a/meilisearch-lib/src/tasks/update_loop.rs +++ b/meilisearch-lib/src/tasks/update_loop.rs @@ -7,33 +7,29 @@ use tokio::time::interval_at; use super::batch::Batch; use super::error::Result; -use super::scheduler::Pending; -use super::{Scheduler, TaskPerformer}; +use super::{BatchHandler, Scheduler}; use crate::tasks::task::TaskEvent; /// The update loop sequentially performs batches of updates by asking the scheduler for a batch, /// and handing it to the `TaskPerformer`. -pub struct UpdateLoop { +pub struct UpdateLoop { scheduler: Arc>, - performer: Arc

, + performers: Vec>, notifier: Option>, debounce_duration: Option, } -impl

UpdateLoop

-where - P: TaskPerformer + Send + Sync + 'static, -{ +impl UpdateLoop { pub fn new( scheduler: Arc>, - performer: Arc

, + performers: Vec>, debuf_duration: Option, notifier: watch::Receiver<()>, ) -> Self { Self { scheduler, - performer, + performers, debounce_duration: debuf_duration, notifier: Some(notifier), } @@ -59,34 +55,29 @@ where } async fn process_next_batch(&self) -> Result<()> { - let pending = { self.scheduler.write().await.prepare().await? }; - match pending { - Pending::Batch(mut batch) => { - for task in &mut batch.tasks { - task.events - .push(TaskEvent::Processing(OffsetDateTime::now_utc())); - } + let mut batch = { self.scheduler.write().await.prepare().await? }; + let performer = self + .performers + .iter() + .find(|p| p.accept(&batch)) + .expect("No performer found for batch") + .clone(); - batch.tasks = { - self.scheduler - .read() - .await - .update_tasks(batch.tasks) - .await? - }; + batch + .content + .push_event(TaskEvent::Processing(OffsetDateTime::now_utc())); - let performer = self.performer.clone(); + batch.content = { + self.scheduler + .read() + .await + .update_tasks(batch.content) + .await? + }; - let batch = performer.process_batch(batch).await; + let batch = performer.process_batch(batch).await; - self.handle_batch_result(batch).await?; - } - Pending::Job(job) => { - let performer = self.performer.clone(); - performer.process_job(job).await; - } - Pending::Nothing => (), - } + self.handle_batch_result(batch, performer).await?; Ok(()) } @@ -96,13 +87,17 @@ where /// When a task is processed, the result of the process is pushed to its event list. The /// `handle_batch_result` make sure that the new state is saved to the store. /// The tasks are then removed from the processing queue. - async fn handle_batch_result(&self, mut batch: Batch) -> Result<()> { + async fn handle_batch_result( + &self, + mut batch: Batch, + performer: Arc, + ) -> Result<()> { let mut scheduler = self.scheduler.write().await; - let tasks = scheduler.update_tasks(batch.tasks).await?; + let content = scheduler.update_tasks(batch.content).await?; scheduler.finish(); drop(scheduler); - batch.tasks = tasks; - self.performer.finish(&batch).await; + batch.content = content; + performer.finish(&batch).await; Ok(()) } } From 60a8249de61c979edcb83f6c0c9c9782d472ab13 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 19 May 2022 14:44:24 +0200 Subject: [PATCH 022/185] add dump batch handler --- meilisearch-http/src/routes/dump.rs | 6 +- meilisearch-lib/src/dump/actor.rs | 7 +- meilisearch-lib/src/dump/error.rs | 16 +- meilisearch-lib/src/dump/handle_impl.rs | 2 +- meilisearch-lib/src/dump/mod.rs | 182 ++++++------------ meilisearch-lib/src/index_controller/error.rs | 4 +- meilisearch-lib/src/index_controller/mod.rs | 32 +-- .../src/tasks/batch_handlers/dump_handler.rs | 92 +++++++++ .../src/tasks/batch_handlers/mod.rs | 1 + 9 files changed, 166 insertions(+), 176 deletions(-) create mode 100644 meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs diff --git a/meilisearch-http/src/routes/dump.rs b/meilisearch-http/src/routes/dump.rs index b58552f27..7d32fdda5 100644 --- a/meilisearch-http/src/routes/dump.rs +++ b/meilisearch-http/src/routes/dump.rs @@ -45,8 +45,8 @@ async fn get_dump_status( meilisearch: GuardedData, MeiliSearch>, path: web::Path, ) -> Result { - let res = meilisearch.dump_info(path.dump_uid.clone()).await?; + todo!(); - debug!("returns: {:?}", res); - Ok(HttpResponse::Ok().json(res)) + // debug!("returns: {:?}", res); + // Ok(HttpResponse::Ok().json(res)) } diff --git a/meilisearch-lib/src/dump/actor.rs b/meilisearch-lib/src/dump/actor.rs index 48fc077ca..b7f615e44 100644 --- a/meilisearch-lib/src/dump/actor.rs +++ b/meilisearch-lib/src/dump/actor.rs @@ -9,7 +9,7 @@ use time::macros::format_description; use time::OffsetDateTime; use tokio::sync::{mpsc, oneshot, RwLock}; -use super::error::{DumpActorError, Result}; +use super::error::{DumpError, Result}; use super::{DumpInfo, DumpJob, DumpMsg, DumpStatus}; use crate::tasks::Scheduler; use crate::update_file_store::UpdateFileStore; @@ -106,7 +106,7 @@ impl DumpActor { let _lock = match self.lock.try_lock() { Some(lock) => lock, None => { - ret.send(Err(DumpActorError::DumpAlreadyRunning)) + ret.send(Err(DumpError::DumpAlreadyRunning)) .expect("Dump actor is dead"); return; } @@ -123,7 +123,6 @@ impl DumpActor { dump_path: self.dump_path.clone(), db_path: self.analytics_path.clone(), update_file_store: self.update_file_store.clone(), - scheduler: self.scheduler.clone(), uid: uid.clone(), update_db_size: self.update_db_size, index_db_size: self.index_db_size, @@ -155,7 +154,7 @@ impl DumpActor { async fn handle_dump_info(&self, uid: String) -> Result { match self.dump_infos.read().await.get(&uid) { Some(info) => Ok(info.clone()), - _ => Err(DumpActorError::DumpDoesNotExist(uid)), + _ => Err(DumpError::DumpDoesNotExist(uid)), } } } diff --git a/meilisearch-lib/src/dump/error.rs b/meilisearch-lib/src/dump/error.rs index f72b6d1dd..7931a8d75 100644 --- a/meilisearch-lib/src/dump/error.rs +++ b/meilisearch-lib/src/dump/error.rs @@ -3,10 +3,10 @@ use meilisearch_error::{internal_error, Code, ErrorCode}; use crate::{index_resolver::error::IndexResolverError, tasks::error::TaskError}; -pub type Result = std::result::Result; +pub type Result = std::result::Result; #[derive(thiserror::Error, Debug)] -pub enum DumpActorError { +pub enum DumpError { #[error("A dump is already processing. You must wait until the current process is finished before requesting another dump.")] DumpAlreadyRunning, #[error("Dump `{0}` not found.")] @@ -18,7 +18,7 @@ pub enum DumpActorError { } internal_error!( - DumpActorError: milli::heed::Error, + DumpError: milli::heed::Error, std::io::Error, tokio::task::JoinError, tokio::sync::oneshot::error::RecvError, @@ -29,13 +29,13 @@ internal_error!( TaskError ); -impl ErrorCode for DumpActorError { +impl ErrorCode for DumpError { fn error_code(&self) -> Code { match self { - DumpActorError::DumpAlreadyRunning => Code::DumpAlreadyInProgress, - DumpActorError::DumpDoesNotExist(_) => Code::DumpNotFound, - DumpActorError::Internal(_) => Code::Internal, - DumpActorError::IndexResolver(e) => e.error_code(), + DumpError::DumpAlreadyRunning => Code::DumpAlreadyInProgress, + DumpError::DumpDoesNotExist(_) => Code::DumpNotFound, + DumpError::Internal(_) => Code::Internal, + DumpError::IndexResolver(e) => e.error_code(), } } } diff --git a/meilisearch-lib/src/dump/handle_impl.rs b/meilisearch-lib/src/dump/handle_impl.rs index 16a312e70..9577b3663 100644 --- a/meilisearch-lib/src/dump/handle_impl.rs +++ b/meilisearch-lib/src/dump/handle_impl.rs @@ -1,7 +1,7 @@ use tokio::sync::{mpsc, oneshot}; use super::error::Result; -use super::{DumpActorHandle, DumpInfo, DumpMsg}; +use super::{DumpActorHandle, DumpMsg}; #[derive(Clone)] pub struct DumpActorHandleImpl { diff --git a/meilisearch-lib/src/dump/mod.rs b/meilisearch-lib/src/dump/mod.rs index bc717b35e..59b51a601 100644 --- a/meilisearch-lib/src/dump/mod.rs +++ b/meilisearch-lib/src/dump/mod.rs @@ -1,32 +1,30 @@ use std::fs::File; use std::path::{Path, PathBuf}; -use std::sync::Arc; use anyhow::bail; -use log::info; +use log::{info, trace}; +use meilisearch_auth::AuthController; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; -pub use actor::DumpActor; -pub use handle_impl::*; -pub use message::DumpMsg; use tempfile::TempDir; -use tokio::sync::RwLock; +use tokio::fs::create_dir_all; -use crate::compression::from_tar_gz; +use crate::analytics; +use crate::compression::{from_tar_gz, to_tar_gz}; +use crate::dump::error::DumpError; use crate::options::IndexerOpts; -use crate::tasks::Scheduler; use crate::update_file_store::UpdateFileStore; use error::Result; use self::loaders::{v2, v3, v4}; -mod actor; +// mod actor; mod compat; pub mod error; -mod handle_impl; +// mod handle_impl; mod loaders; -mod message; +// mod message; const META_FILE_NAME: &str = "metadata.json"; @@ -51,18 +49,6 @@ impl Metadata { } } -#[async_trait::async_trait] -#[cfg_attr(test, mockall::automock)] -pub trait DumpActorHandle { - /// Start the creation of a dump - /// Implementation: [handle_impl::DumpActorHandleImpl::create_dump] - async fn create_dump(&self) -> Result; - - /// Return the status of an already created dump - /// Implementation: [handle_impl::DumpActorHandleImpl::dump_info] - async fn dump_info(&self, uid: String) -> Result; -} - #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct MetadataV1 { @@ -159,49 +145,6 @@ pub enum DumpStatus { Failed, } -#[derive(Debug, Serialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct DumpInfo { - pub uid: String, - pub status: DumpStatus, - #[serde(skip_serializing_if = "Option::is_none")] - pub error: Option, - #[serde(with = "time::serde::rfc3339")] - started_at: OffsetDateTime, - #[serde( - skip_serializing_if = "Option::is_none", - with = "time::serde::rfc3339::option" - )] - finished_at: Option, -} - -impl DumpInfo { - pub fn new(uid: String, status: DumpStatus) -> Self { - Self { - uid, - status, - error: None, - started_at: OffsetDateTime::now_utc(), - finished_at: None, - } - } - - pub fn with_error(&mut self, error: String) { - self.status = DumpStatus::Failed; - self.finished_at = Some(OffsetDateTime::now_utc()); - self.error = Some(error); - } - - pub fn done(&mut self) { - self.finished_at = Some(OffsetDateTime::now_utc()); - self.status = DumpStatus::Done; - } - - pub fn dump_already_in_progress(&self) -> bool { - self.status == DumpStatus::InProgress - } -} - pub fn load_dump( dst_path: impl AsRef, src_path: impl AsRef, @@ -313,76 +256,59 @@ fn persist_dump(dst_path: impl AsRef, tmp_dst: TempDir) -> anyhow::Result< } pub struct DumpJob { - dump_path: PathBuf, - db_path: PathBuf, - update_file_store: UpdateFileStore, - scheduler: Arc>, - uid: String, - update_db_size: usize, - index_db_size: usize, + pub dump_path: PathBuf, + pub db_path: PathBuf, + pub update_file_store: UpdateFileStore, + pub uid: String, + pub update_db_size: usize, + pub index_db_size: usize, } impl DumpJob { - async fn run(self) -> Result<()> { - // trace!("Performing dump."); - // - // create_dir_all(&self.dump_path).await?; - // - // let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; - // let temp_dump_path = temp_dump_dir.path().to_owned(); - // - // let meta = MetadataVersion::new_v4(self.index_db_size, self.update_db_size); - // let meta_path = temp_dump_path.join(META_FILE_NAME); - // let mut meta_file = File::create(&meta_path)?; - // serde_json::to_writer(&mut meta_file, &meta)?; - // analytics::copy_user_id(&self.db_path, &temp_dump_path); - // - // create_dir_all(&temp_dump_path.join("indexes")).await?; - // - // let (sender, receiver) = oneshot::channel(); - // - // self.scheduler - // .write() - // .await - // .schedule_job(Job::Dump { - // ret: sender, - // path: temp_dump_path.clone(), - // }) - // .await; - // - // // wait until the job has started performing before finishing the dump process - // let sender = receiver.await??; - // - // AuthController::dump(&self.db_path, &temp_dump_path)?; - // - // //TODO(marin): this is not right, the scheduler should dump itself, not do it here... + pub async fn run(self) -> Result<()> { + trace!("Performing dump."); + + create_dir_all(&self.dump_path).await?; + + let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; + let temp_dump_path = temp_dump_dir.path().to_owned(); + + let meta = MetadataVersion::new_v4(self.index_db_size, self.update_db_size); + let meta_path = temp_dump_path.join(META_FILE_NAME); + let mut meta_file = File::create(&meta_path)?; + serde_json::to_writer(&mut meta_file, &meta)?; + analytics::copy_user_id(&self.db_path, &temp_dump_path); + + create_dir_all(&temp_dump_path.join("indexes")).await?; + + AuthController::dump(&self.db_path, &temp_dump_path)?; + // TODO: Dump indexes and updates + + //TODO(marin): this is not right, the scheduler should dump itself, not do it here... // self.scheduler // .read() // .await // .dump(&temp_dump_path, self.update_file_store.clone()) // .await?; - // - // let dump_path = tokio::task::spawn_blocking(move || -> Result { - // // for now we simply copy the updates/updates_files - // // FIXME: We may copy more files than necessary, if new files are added while we are - // // performing the dump. We need a way to filter them out. - // - // let temp_dump_file = tempfile::NamedTempFile::new_in(&self.dump_path)?; - // to_tar_gz(temp_dump_path, temp_dump_file.path()) - // .map_err(|e| DumpActorError::Internal(e.into()))?; - // - // let dump_path = self.dump_path.join(self.uid).with_extension("dump"); - // temp_dump_file.persist(&dump_path)?; - // - // Ok(dump_path) - // }) - // .await??; - // - // // notify the update loop that we are finished performing the dump. - // let _ = sender.send(()); - // - // info!("Created dump in {:?}.", dump_path); - // + + let dump_path = tokio::task::spawn_blocking(move || -> Result { + // for now we simply copy the updates/updates_files + // FIXME: We may copy more files than necessary, if new files are added while we are + // performing the dump. We need a way to filter them out. + + let temp_dump_file = tempfile::NamedTempFile::new_in(&self.dump_path)?; + to_tar_gz(temp_dump_path, temp_dump_file.path()) + .map_err(|e| DumpError::Internal(e.into()))?; + + let dump_path = self.dump_path.join(self.uid).with_extension("dump"); + temp_dump_file.persist(&dump_path)?; + + Ok(dump_path) + }) + .await??; + + info!("Created dump in {:?}.", dump_path); + Ok(()) } } diff --git a/meilisearch-lib/src/index_controller/error.rs b/meilisearch-lib/src/index_controller/error.rs index 11ef03d73..529887b6a 100644 --- a/meilisearch-lib/src/index_controller/error.rs +++ b/meilisearch-lib/src/index_controller/error.rs @@ -6,7 +6,7 @@ use tokio::task::JoinError; use super::DocumentAdditionFormat; use crate::document_formats::DocumentFormatError; -use crate::dump::error::DumpActorError; +use crate::dump::error::DumpError; use crate::index::error::IndexError; use crate::tasks::error::TaskError; use crate::update_file_store::UpdateFileStoreError; @@ -28,7 +28,7 @@ pub enum IndexControllerError { #[error("{0}")] TaskError(#[from] TaskError), #[error("{0}")] - DumpError(#[from] DumpActorError), + DumpError(#[from] DumpError), #[error("{0}")] DocumentFormatError(#[from] DocumentFormatError), #[error("A {0} payload is missing.")] diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index b73402d56..14f262a51 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -13,13 +13,13 @@ use futures::StreamExt; use milli::update::IndexDocumentsMethod; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; -use tokio::sync::{mpsc, RwLock}; +use tokio::sync::RwLock; use tokio::task::spawn_blocking; use tokio::time::sleep; use uuid::Uuid; use crate::document_formats::{read_csv, read_json, read_ndjson}; -use crate::dump::{self, load_dump, DumpActor, DumpActorHandle, DumpActorHandleImpl, DumpInfo}; +use crate::dump::load_dump; use crate::index::{ Checked, Document, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings, Unchecked, }; @@ -75,7 +75,6 @@ pub struct IndexController { scheduler: Arc>, task_store: TaskStore, dump_path: PathBuf, - dump_handle: dump::DumpActorHandleImpl, pub update_file_store: UpdateFileStore, } @@ -85,7 +84,6 @@ impl Clone for IndexController { Self { index_resolver: self.index_resolver.clone(), scheduler: self.scheduler.clone(), - dump_handle: self.dump_handle.clone(), update_file_store: self.update_file_store.clone(), task_store: self.task_store.clone(), dump_path: self.dump_path.clone(), @@ -228,23 +226,6 @@ impl IndexControllerBuilder { let dump_path = self .dump_dst .ok_or_else(|| anyhow::anyhow!("Missing dump directory path"))?; - let dump_handle = { - let analytics_path = &db_path; - let (sender, receiver) = mpsc::channel(10); - let actor = DumpActor::new( - receiver, - update_file_store.clone(), - scheduler.clone(), - dump_path.clone(), - analytics_path, - index_size, - task_store_size, - ); - - tokio::task::spawn_local(actor.run()); - - DumpActorHandleImpl { sender } - }; if self.schedule_snapshot { let snapshot_period = self @@ -269,7 +250,6 @@ impl IndexControllerBuilder { Ok(IndexController { index_resolver, scheduler, - dump_handle, dump_path, update_file_store, task_store, @@ -633,14 +613,6 @@ where indexes, }) } - - pub async fn create_dump(&self) -> Result { - Ok(self.dump_handle.create_dump().await?) - } - - pub async fn dump_info(&self, uid: String) -> Result { - Ok(self.dump_handle.dump_info(uid).await?) - } } pub async fn get_arc_ownership_blocking(mut item: Arc) -> T { diff --git a/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs b/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs new file mode 100644 index 000000000..c0ef70ba8 --- /dev/null +++ b/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs @@ -0,0 +1,92 @@ +use std::path::{Path, PathBuf}; + +use log::{error, trace}; +use time::{macros::format_description, OffsetDateTime}; + +use crate::dump::DumpJob; +use crate::tasks::batch::{Batch, BatchContent}; +use crate::tasks::BatchHandler; +use crate::update_file_store::UpdateFileStore; + +pub struct DumpHandler { + update_file_store: UpdateFileStore, + dump_path: PathBuf, + db_path: PathBuf, + update_db_size: usize, + index_db_size: usize, +} + +/// Generate uid from creation date +fn generate_uid() -> String { + OffsetDateTime::now_utc() + .format(format_description!( + "[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" + )) + .unwrap() +} + +impl DumpHandler { + pub fn new( + update_file_store: UpdateFileStore, + dump_path: impl AsRef, + db_path: impl AsRef, + index_db_size: usize, + update_db_size: usize, + ) -> Self { + Self { + update_file_store, + dump_path: dump_path.as_ref().into(), + db_path: db_path.as_ref().into(), + index_db_size, + update_db_size, + } + } + + async fn create_dump(&self) { + let uid = generate_uid(); + + let task = DumpJob { + dump_path: self.dump_path.clone(), + db_path: self.db_path.clone(), + update_file_store: self.update_file_store.clone(), + uid: uid.clone(), + update_db_size: self.update_db_size, + index_db_size: self.index_db_size, + }; + + let task_result = tokio::task::spawn_local(task.run()).await; + + match task_result { + Ok(Ok(())) => { + trace!("Dump succeed"); + } + Ok(Err(e)) => { + error!("Dump failed: {}", e); + } + Err(_) => { + error!("Dump panicked. Dump status set to failed"); + } + }; + } +} + +#[async_trait::async_trait] +impl BatchHandler for DumpHandler { + fn accept(&self, batch: &Batch) -> bool { + matches!(batch.content, BatchContent::Dump { .. }) + } + + async fn process_batch(&self, batch: Batch) -> Batch { + match batch.content { + BatchContent::Dump { .. } => { + self.create_dump().await; + batch + } + _ => unreachable!("invalid batch content for dump"), + } + } + + async fn finish(&self, _: &Batch) { + () + } +} diff --git a/meilisearch-lib/src/tasks/batch_handlers/mod.rs b/meilisearch-lib/src/tasks/batch_handlers/mod.rs index 0e94c76f1..f72c1b760 100644 --- a/meilisearch-lib/src/tasks/batch_handlers/mod.rs +++ b/meilisearch-lib/src/tasks/batch_handlers/mod.rs @@ -1,2 +1,3 @@ +pub mod dump_handler; pub mod empty_handler; mod index_resolver_handler; From 414d0907ced804dd2dcb1f1e82c80633013ff84e Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 19 May 2022 14:51:04 +0200 Subject: [PATCH 023/185] register dump handler --- meilisearch-http/src/task.rs | 2 +- meilisearch-lib/src/index_controller/mod.rs | 34 +++++++++++++-------- meilisearch-lib/src/tasks/mod.rs | 2 +- meilisearch-lib/src/tasks/task.rs | 7 +---- 4 files changed, 25 insertions(+), 20 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 5a8542ff8..3febe002f 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -218,7 +218,7 @@ impl From for TaskView { TaskType::IndexUpdate, Some(TaskDetails::IndexInfo { primary_key }), ), - TaskContent::Dump { path: _ } => (TaskType::Dump, None), + TaskContent::Dump => (TaskType::Dump, None), }; // An event always has at least one event: "Created" diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 14f262a51..de4426f81 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -27,7 +27,9 @@ use crate::options::{IndexerOpts, SchedulerConfig}; use crate::snapshot::{load_snapshot, SnapshotService}; use crate::tasks::error::TaskError; use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskId}; -use crate::tasks::{BatchHandler, EmptyBatchHandler, Scheduler, TaskFilter, TaskStore}; +use crate::tasks::{ + BatchHandler, DumpHandler, EmptyBatchHandler, Scheduler, TaskFilter, TaskStore, +}; use error::Result; use self::error::IndexControllerError; @@ -74,7 +76,6 @@ pub struct IndexController { pub index_resolver: Arc>, scheduler: Arc>, task_store: TaskStore, - dump_path: PathBuf, pub update_file_store: UpdateFileStore, } @@ -86,7 +87,6 @@ impl Clone for IndexController { scheduler: self.scheduler.clone(), update_file_store: self.update_file_store.clone(), task_store: self.task_store.clone(), - dump_path: self.dump_path.clone(), } } } @@ -218,15 +218,28 @@ impl IndexControllerBuilder { update_file_store.clone(), )?); - let task_store = TaskStore::new(meta_env)?; - let handlers: Vec> = - vec![index_resolver.clone(), Arc::new(EmptyBatchHandler)]; - let scheduler = Scheduler::new(task_store.clone(), handlers, scheduler_config)?; - let dump_path = self .dump_dst .ok_or_else(|| anyhow::anyhow!("Missing dump directory path"))?; + let dump_handler = Arc::new(DumpHandler::new( + update_file_store.clone(), + dump_path, + db_path.as_ref().clone(), + index_size, + task_store_size, + )); + let task_store = TaskStore::new(meta_env)?; + + // register all the batch handlers for use with the scheduler. + let handlers: Vec> = vec![ + index_resolver.clone(), + dump_handler, + // dummy handler to catch all empty batches + Arc::new(EmptyBatchHandler), + ]; + let scheduler = Scheduler::new(task_store.clone(), handlers, scheduler_config)?; + if self.schedule_snapshot { let snapshot_period = self .snapshot_interval @@ -250,7 +263,6 @@ impl IndexControllerBuilder { Ok(IndexController { index_resolver, scheduler, - dump_path, update_file_store, task_store, }) @@ -408,9 +420,7 @@ where } pub async fn register_dump_task(&self) -> Result { - let content = TaskContent::Dump { - path: self.dump_path.clone(), - }; + let content = TaskContent::Dump; let task = self.task_store.register(None, content).await?; self.scheduler.read().await.notify(); Ok(task) diff --git a/meilisearch-lib/src/tasks/mod.rs b/meilisearch-lib/src/tasks/mod.rs index bc01c4901..faa35f2da 100644 --- a/meilisearch-lib/src/tasks/mod.rs +++ b/meilisearch-lib/src/tasks/mod.rs @@ -1,6 +1,6 @@ use async_trait::async_trait; -pub use batch_handlers::empty_handler::EmptyBatchHandler; +pub use batch_handlers::{dump_handler::DumpHandler, empty_handler::EmptyBatchHandler}; pub use scheduler::Scheduler; pub use task_store::TaskFilter; diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index cb5ba671a..41a536a1e 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -1,5 +1,3 @@ -use std::path::PathBuf; - use meilisearch_error::ResponseError; use milli::update::{DocumentAdditionResult, IndexDocumentsMethod}; use serde::{Deserialize, Serialize}; @@ -142,10 +140,7 @@ pub enum TaskContent { IndexUpdate { primary_key: Option, }, - Dump { - #[cfg_attr(test, proptest(value = "PathBuf::from(\".\")"))] - path: PathBuf, - }, + Dump, } #[cfg(test)] From 56eb2907c9b8809b663111f3bb38492a9d7660b1 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 19 May 2022 14:59:59 +0200 Subject: [PATCH 024/185] dump indexes --- meilisearch-lib/src/dump/mod.rs | 16 +++++++++++--- meilisearch-lib/src/index_controller/mod.rs | 1 + .../src/tasks/batch_handlers/dump_handler.rs | 22 ++++++++++++++++--- 3 files changed, 33 insertions(+), 6 deletions(-) diff --git a/meilisearch-lib/src/dump/mod.rs b/meilisearch-lib/src/dump/mod.rs index 59b51a601..05deb8a40 100644 --- a/meilisearch-lib/src/dump/mod.rs +++ b/meilisearch-lib/src/dump/mod.rs @@ -1,5 +1,6 @@ use std::fs::File; use std::path::{Path, PathBuf}; +use std::sync::Arc; use anyhow::bail; use log::{info, trace}; @@ -13,6 +14,9 @@ use tokio::fs::create_dir_all; use crate::analytics; use crate::compression::{from_tar_gz, to_tar_gz}; use crate::dump::error::DumpError; +use crate::index_resolver::index_store::IndexStore; +use crate::index_resolver::meta_store::IndexMetaStore; +use crate::index_resolver::IndexResolver; use crate::options::IndexerOpts; use crate::update_file_store::UpdateFileStore; use error::Result; @@ -255,16 +259,21 @@ fn persist_dump(dst_path: impl AsRef, tmp_dst: TempDir) -> anyhow::Result< Ok(()) } -pub struct DumpJob { +pub struct DumpJob { pub dump_path: PathBuf, pub db_path: PathBuf, pub update_file_store: UpdateFileStore, pub uid: String, pub update_db_size: usize, pub index_db_size: usize, + pub index_resolver: Arc>, } -impl DumpJob { +impl DumpJob +where + U: IndexMetaStore, + I: IndexStore, +{ pub async fn run(self) -> Result<()> { trace!("Performing dump."); @@ -281,8 +290,9 @@ impl DumpJob { create_dir_all(&temp_dump_path.join("indexes")).await?; + // TODO: this is blocking!! AuthController::dump(&self.db_path, &temp_dump_path)?; - // TODO: Dump indexes and updates + self.index_resolver.dump(&self.dump_path).await?; //TODO(marin): this is not right, the scheduler should dump itself, not do it here... // self.scheduler diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index de4426f81..f89ebec4e 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -228,6 +228,7 @@ impl IndexControllerBuilder { db_path.as_ref().clone(), index_size, task_store_size, + index_resolver.clone(), )); let task_store = TaskStore::new(meta_env)?; diff --git a/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs b/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs index c0ef70ba8..057cf274f 100644 --- a/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs +++ b/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs @@ -1,15 +1,20 @@ use std::path::{Path, PathBuf}; +use std::sync::Arc; use log::{error, trace}; use time::{macros::format_description, OffsetDateTime}; use crate::dump::DumpJob; +use crate::index_resolver::index_store::IndexStore; +use crate::index_resolver::meta_store::IndexMetaStore; +use crate::index_resolver::IndexResolver; use crate::tasks::batch::{Batch, BatchContent}; use crate::tasks::BatchHandler; use crate::update_file_store::UpdateFileStore; -pub struct DumpHandler { +pub struct DumpHandler { update_file_store: UpdateFileStore, + index_resolver: Arc>, dump_path: PathBuf, db_path: PathBuf, update_db_size: usize, @@ -25,13 +30,18 @@ fn generate_uid() -> String { .unwrap() } -impl DumpHandler { +impl DumpHandler +where + U: IndexMetaStore + Send + Sync + 'static, + I: IndexStore + Send + Sync + 'static, +{ pub fn new( update_file_store: UpdateFileStore, dump_path: impl AsRef, db_path: impl AsRef, index_db_size: usize, update_db_size: usize, + index_resolver: Arc>, ) -> Self { Self { update_file_store, @@ -39,6 +49,7 @@ impl DumpHandler { db_path: db_path.as_ref().into(), index_db_size, update_db_size, + index_resolver, } } @@ -52,6 +63,7 @@ impl DumpHandler { uid: uid.clone(), update_db_size: self.update_db_size, index_db_size: self.index_db_size, + index_resolver: self.index_resolver.clone(), }; let task_result = tokio::task::spawn_local(task.run()).await; @@ -71,7 +83,11 @@ impl DumpHandler { } #[async_trait::async_trait] -impl BatchHandler for DumpHandler { +impl BatchHandler for DumpHandler +where + U: IndexMetaStore + Send + Sync + 'static, + I: IndexStore + Send + Sync + 'static, +{ fn accept(&self, batch: &Batch) -> bool { matches!(batch.content, BatchContent::Dump { .. }) } From 57fde30b918f6c8bb9775dd46a78ca26344614d0 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 19 May 2022 20:18:43 +0200 Subject: [PATCH 025/185] handle dump --- meilisearch-http/src/task.rs | 6 +- meilisearch-lib/src/dump/message.rs | 1 - meilisearch-lib/src/dump/mod.rs | 46 +++++--- meilisearch-lib/src/index_controller/mod.rs | 22 ++-- .../src/tasks/batch_handlers/dump_handler.rs | 103 +++--------------- meilisearch-lib/src/tasks/mod.rs | 2 +- meilisearch-lib/src/tasks/scheduler.rs | 6 - meilisearch-lib/src/tasks/task.rs | 20 +++- meilisearch-lib/src/tasks/task_store/mod.rs | 24 ++-- 9 files changed, 94 insertions(+), 136 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 3febe002f..f10d7e110 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -82,6 +82,8 @@ enum TaskDetails { }, #[serde(rename_all = "camelCase")] ClearAll { deleted_documents: Option }, + #[serde(rename_all = "camelCase")] + Dump { dump_uid: String }, } /// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for @@ -218,7 +220,9 @@ impl From for TaskView { TaskType::IndexUpdate, Some(TaskDetails::IndexInfo { primary_key }), ), - TaskContent::Dump => (TaskType::Dump, None), + TaskContent::Dump { uid } => { + (TaskType::Dump, Some(TaskDetails::Dump { dump_uid: uid })) + } }; // An event always has at least one event: "Created" diff --git a/meilisearch-lib/src/dump/message.rs b/meilisearch-lib/src/dump/message.rs index 6c9dded9f..8ebeb3b57 100644 --- a/meilisearch-lib/src/dump/message.rs +++ b/meilisearch-lib/src/dump/message.rs @@ -1,7 +1,6 @@ use tokio::sync::oneshot; use super::error::Result; -use super::DumpInfo; pub enum DumpMsg { CreateDump { diff --git a/meilisearch-lib/src/dump/mod.rs b/meilisearch-lib/src/dump/mod.rs index 05deb8a40..b14b356fd 100644 --- a/meilisearch-lib/src/dump/mod.rs +++ b/meilisearch-lib/src/dump/mod.rs @@ -5,10 +5,12 @@ use std::sync::Arc; use anyhow::bail; use log::{info, trace}; use meilisearch_auth::AuthController; +use milli::heed::Env; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; use tempfile::TempDir; +use time::macros::format_description; use tokio::fs::create_dir_all; use crate::analytics; @@ -18,6 +20,7 @@ use crate::index_resolver::index_store::IndexStore; use crate::index_resolver::meta_store::IndexMetaStore; use crate::index_resolver::IndexResolver; use crate::options::IndexerOpts; +use crate::tasks::TaskStore; use crate::update_file_store::UpdateFileStore; use error::Result; @@ -259,22 +262,31 @@ fn persist_dump(dst_path: impl AsRef, tmp_dst: TempDir) -> anyhow::Result< Ok(()) } -pub struct DumpJob { +/// Generate uid from creation date +pub fn generate_uid() -> String { + OffsetDateTime::now_utc() + .format(format_description!( + "[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" + )) + .unwrap() +} + +pub struct DumpHandler { pub dump_path: PathBuf, pub db_path: PathBuf, pub update_file_store: UpdateFileStore, - pub uid: String, - pub update_db_size: usize, + pub task_store_size: usize, pub index_db_size: usize, + pub env: Arc, pub index_resolver: Arc>, } -impl DumpJob +impl DumpHandler where - U: IndexMetaStore, - I: IndexStore, + U: IndexMetaStore + Sync + Send + 'static, + I: IndexStore + Sync + Send + 'static, { - pub async fn run(self) -> Result<()> { + pub async fn run(&self, uid: String) -> Result<()> { trace!("Performing dump."); create_dir_all(&self.dump_path).await?; @@ -282,7 +294,7 @@ where let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; let temp_dump_path = temp_dump_dir.path().to_owned(); - let meta = MetadataVersion::new_v4(self.index_db_size, self.update_db_size); + let meta = MetadataVersion::new_v4(self.index_db_size, self.task_store_size); let meta_path = temp_dump_path.join(META_FILE_NAME); let mut meta_file = File::create(&meta_path)?; serde_json::to_writer(&mut meta_file, &meta)?; @@ -292,25 +304,25 @@ where // TODO: this is blocking!! AuthController::dump(&self.db_path, &temp_dump_path)?; + TaskStore::dump( + self.env.clone(), + &self.dump_path, + self.update_file_store.clone(), + ) + .await?; self.index_resolver.dump(&self.dump_path).await?; - //TODO(marin): this is not right, the scheduler should dump itself, not do it here... - // self.scheduler - // .read() - // .await - // .dump(&temp_dump_path, self.update_file_store.clone()) - // .await?; - + let dump_path = self.dump_path.clone(); let dump_path = tokio::task::spawn_blocking(move || -> Result { // for now we simply copy the updates/updates_files // FIXME: We may copy more files than necessary, if new files are added while we are // performing the dump. We need a way to filter them out. - let temp_dump_file = tempfile::NamedTempFile::new_in(&self.dump_path)?; + let temp_dump_file = tempfile::NamedTempFile::new_in(&dump_path)?; to_tar_gz(temp_dump_path, temp_dump_file.path()) .map_err(|e| DumpError::Internal(e.into()))?; - let dump_path = self.dump_path.join(self.uid).with_extension("dump"); + let dump_path = dump_path.join(uid).with_extension("dump"); temp_dump_file.persist(&dump_path)?; Ok(dump_path) diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index f89ebec4e..1eb61d9f0 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -19,7 +19,7 @@ use tokio::time::sleep; use uuid::Uuid; use crate::document_formats::{read_csv, read_json, read_ndjson}; -use crate::dump::load_dump; +use crate::dump::{self, load_dump, DumpHandler}; use crate::index::{ Checked, Document, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings, Unchecked, }; @@ -27,9 +27,7 @@ use crate::options::{IndexerOpts, SchedulerConfig}; use crate::snapshot::{load_snapshot, SnapshotService}; use crate::tasks::error::TaskError; use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskId}; -use crate::tasks::{ - BatchHandler, DumpHandler, EmptyBatchHandler, Scheduler, TaskFilter, TaskStore, -}; +use crate::tasks::{BatchHandler, EmptyBatchHandler, Scheduler, TaskFilter, TaskStore}; use error::Result; use self::error::IndexControllerError; @@ -222,14 +220,15 @@ impl IndexControllerBuilder { .dump_dst .ok_or_else(|| anyhow::anyhow!("Missing dump directory path"))?; - let dump_handler = Arc::new(DumpHandler::new( - update_file_store.clone(), + let dump_handler = Arc::new(DumpHandler { dump_path, - db_path.as_ref().clone(), - index_size, + db_path: db_path.as_ref().into(), + update_file_store: update_file_store.clone(), task_store_size, - index_resolver.clone(), - )); + index_db_size: index_size, + env: meta_env.clone(), + index_resolver: index_resolver.clone(), + }); let task_store = TaskStore::new(meta_env)?; // register all the batch handlers for use with the scheduler. @@ -421,7 +420,8 @@ where } pub async fn register_dump_task(&self) -> Result { - let content = TaskContent::Dump; + let uid = dump::generate_uid(); + let content = TaskContent::Dump { uid }; let task = self.task_store.register(None, content).await?; self.scheduler.read().await.notify(); Ok(task) diff --git a/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs b/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs index 057cf274f..fc506522f 100644 --- a/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs +++ b/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs @@ -1,101 +1,34 @@ -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use log::{error, trace}; -use time::{macros::format_description, OffsetDateTime}; - -use crate::dump::DumpJob; +use crate::dump::DumpHandler; use crate::index_resolver::index_store::IndexStore; use crate::index_resolver::meta_store::IndexMetaStore; -use crate::index_resolver::IndexResolver; use crate::tasks::batch::{Batch, BatchContent}; +use crate::tasks::task::{Task, TaskContent, TaskEvent, TaskResult}; use crate::tasks::BatchHandler; -use crate::update_file_store::UpdateFileStore; - -pub struct DumpHandler { - update_file_store: UpdateFileStore, - index_resolver: Arc>, - dump_path: PathBuf, - db_path: PathBuf, - update_db_size: usize, - index_db_size: usize, -} - -/// Generate uid from creation date -fn generate_uid() -> String { - OffsetDateTime::now_utc() - .format(format_description!( - "[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" - )) - .unwrap() -} - -impl DumpHandler -where - U: IndexMetaStore + Send + Sync + 'static, - I: IndexStore + Send + Sync + 'static, -{ - pub fn new( - update_file_store: UpdateFileStore, - dump_path: impl AsRef, - db_path: impl AsRef, - index_db_size: usize, - update_db_size: usize, - index_resolver: Arc>, - ) -> Self { - Self { - update_file_store, - dump_path: dump_path.as_ref().into(), - db_path: db_path.as_ref().into(), - index_db_size, - update_db_size, - index_resolver, - } - } - - async fn create_dump(&self) { - let uid = generate_uid(); - - let task = DumpJob { - dump_path: self.dump_path.clone(), - db_path: self.db_path.clone(), - update_file_store: self.update_file_store.clone(), - uid: uid.clone(), - update_db_size: self.update_db_size, - index_db_size: self.index_db_size, - index_resolver: self.index_resolver.clone(), - }; - - let task_result = tokio::task::spawn_local(task.run()).await; - - match task_result { - Ok(Ok(())) => { - trace!("Dump succeed"); - } - Ok(Err(e)) => { - error!("Dump failed: {}", e); - } - Err(_) => { - error!("Dump panicked. Dump status set to failed"); - } - }; - } -} #[async_trait::async_trait] impl BatchHandler for DumpHandler where - U: IndexMetaStore + Send + Sync + 'static, - I: IndexStore + Send + Sync + 'static, + U: IndexMetaStore + Sync + Send + 'static, + I: IndexStore + Sync + Send + 'static, { fn accept(&self, batch: &Batch) -> bool { matches!(batch.content, BatchContent::Dump { .. }) } - async fn process_batch(&self, batch: Batch) -> Batch { - match batch.content { - BatchContent::Dump { .. } => { - self.create_dump().await; + async fn process_batch(&self, mut batch: Batch) -> Batch { + match &batch.content { + BatchContent::Dump(Task { + content: TaskContent::Dump { uid }, + .. + }) => { + match self.run(uid.clone()).await { + Ok(_) => { + batch + .content + .push_event(TaskEvent::succeeded(TaskResult::Other)); + } + Err(e) => batch.content.push_event(TaskEvent::failed(e.into())), + } batch } _ => unreachable!("invalid batch content for dump"), diff --git a/meilisearch-lib/src/tasks/mod.rs b/meilisearch-lib/src/tasks/mod.rs index faa35f2da..bc01c4901 100644 --- a/meilisearch-lib/src/tasks/mod.rs +++ b/meilisearch-lib/src/tasks/mod.rs @@ -1,6 +1,6 @@ use async_trait::async_trait; -pub use batch_handlers::{dump_handler::DumpHandler, empty_handler::EmptyBatchHandler}; +pub use batch_handlers::empty_handler::EmptyBatchHandler; pub use scheduler::Scheduler; pub use task_store::TaskFilter; diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index f3018b782..1b3fd6daa 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; use std::collections::{hash_map::Entry, BinaryHeap, HashMap, VecDeque}; use std::ops::{Deref, DerefMut}; -use std::path::Path; use std::slice; use std::sync::Arc; use std::time::Duration; @@ -13,7 +12,6 @@ use tokio::sync::{watch, RwLock}; use crate::options::SchedulerConfig; use crate::snapshot::SnapshotJob; -use crate::update_file_store::UpdateFileStore; use super::batch::{Batch, BatchContent}; use super::error::Result; @@ -276,10 +274,6 @@ impl Scheduler { Ok(this) } - pub async fn dump(&self, path: &Path, file_store: UpdateFileStore) -> Result<()> { - self.store.dump(path, file_store).await - } - fn register_task(&mut self, task: Task) { assert!(!task.is_finished()); self.tasks.insert(task); diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index 41a536a1e..0e0aa8af2 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -62,6 +62,22 @@ pub enum TaskEvent { }, } +impl TaskEvent { + pub fn succeeded(result: TaskResult) -> Self { + Self::Succeded { + result, + timestamp: OffsetDateTime::now_utc(), + } + } + + pub fn failed(error: ResponseError) -> Self { + Self::Failed { + error, + timestamp: OffsetDateTime::now_utc(), + } + } +} + /// A task represents an operation that Meilisearch must do. /// It's stored on disk and executed from the lowest to highest Task id. /// Everytime a new task is created it has a higher Task id than the previous one. @@ -140,7 +156,9 @@ pub enum TaskContent { IndexUpdate { primary_key: Option, }, - Dump, + Dump { + uid: String, + }, } #[cfg(test)] diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index f580c8e26..610a5bdeb 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -204,13 +204,14 @@ impl TaskStore { } pub async fn dump( - &self, + env: Arc, dir_path: impl AsRef, update_file_store: UpdateFileStore, ) -> Result<()> { + let store = Self::new(env)?; let update_dir = dir_path.as_ref().join("updates"); let updates_file = update_dir.join("data.jsonl"); - let tasks = self.list_tasks(None, None, None).await?; + let tasks = store.list_tasks(None, None, None).await?; let dir_path = dir_path.as_ref().to_path_buf(); tokio::task::spawn_blocking(move || -> Result<()> { @@ -287,6 +288,14 @@ pub mod test { Ok(Self::Real(TaskStore::new(env)?)) } + pub async fn dump( + env: Arc, + path: impl AsRef, + update_file_store: UpdateFileStore, + ) -> Result<()> { + TaskStore::dump(env, path, update_file_store).await + } + pub fn mock(mocker: Mocker) -> Self { Self::Mock(Arc::new(mocker)) } @@ -329,17 +338,6 @@ pub mod test { } } - pub async fn dump( - &self, - path: impl AsRef, - update_file_store: UpdateFileStore, - ) -> Result<()> { - match self { - Self::Real(s) => s.dump(path, update_file_store).await, - Self::Mock(m) => unsafe { m.get("dump").call((path, update_file_store)) }, - } - } - pub async fn register( &self, index_uid: Option, From 4778884105e273711535a0be434045476c6bfb0f Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 19 May 2022 20:19:34 +0200 Subject: [PATCH 026/185] remove dump status route --- meilisearch-http/src/routes/dump.rs | 27 +-------------------------- 1 file changed, 1 insertion(+), 26 deletions(-) diff --git a/meilisearch-http/src/routes/dump.rs b/meilisearch-http/src/routes/dump.rs index 7d32fdda5..55469b0b4 100644 --- a/meilisearch-http/src/routes/dump.rs +++ b/meilisearch-http/src/routes/dump.rs @@ -2,7 +2,6 @@ use actix_web::{web, HttpRequest, HttpResponse}; use log::debug; use meilisearch_error::ResponseError; use meilisearch_lib::MeiliSearch; -use serde::{Deserialize, Serialize}; use serde_json::json; use crate::analytics::Analytics; @@ -11,10 +10,7 @@ use crate::extractors::sequential_extractor::SeqHandler; use crate::task::SummarizedTaskView; pub fn configure(cfg: &mut web::ServiceConfig) { - cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump)))) - .service( - web::resource("/{dump_uid}/status").route(web::get().to(SeqHandler(get_dump_status))), - ); + cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump)))); } pub async fn create_dump( @@ -29,24 +25,3 @@ pub async fn create_dump( debug!("returns: {:?}", res); Ok(HttpResponse::Accepted().json(res)) } - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -struct DumpStatusResponse { - status: String, -} - -#[derive(Deserialize)] -struct DumpParam { - dump_uid: String, -} - -async fn get_dump_status( - meilisearch: GuardedData, MeiliSearch>, - path: web::Path, -) -> Result { - todo!(); - - // debug!("returns: {:?}", res); - // Ok(HttpResponse::Ok().json(res)) -} From 61035a3ea465a6066577b37ce873433f113fb6a7 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 23 May 2022 10:54:49 +0200 Subject: [PATCH 027/185] create dump v5 --- meilisearch-http/src/task.rs | 11 +++++----- meilisearch-lib/src/dump/loaders/v3.rs | 1 + meilisearch-lib/src/dump/loaders/v4.rs | 5 +++-- meilisearch-lib/src/dump/mod.rs | 28 ++++++++++++++++---------- 4 files changed, 27 insertions(+), 18 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index f10d7e110..397fed618 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -24,7 +24,7 @@ enum TaskType { DocumentDeletion, SettingsUpdate, ClearAll, - Dump, + DumpCreation, } impl From for TaskType { @@ -44,7 +44,7 @@ impl From for TaskType { TaskContent::IndexDeletion => TaskType::IndexDeletion, TaskContent::IndexCreation { .. } => TaskType::IndexCreation, TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, - TaskContent::Dump { .. } => TaskType::Dump, + TaskContent::Dump { .. } => TaskType::DumpCreation, _ => unreachable!("unexpected task type"), } } @@ -220,9 +220,10 @@ impl From for TaskView { TaskType::IndexUpdate, Some(TaskDetails::IndexInfo { primary_key }), ), - TaskContent::Dump { uid } => { - (TaskType::Dump, Some(TaskDetails::Dump { dump_uid: uid })) - } + TaskContent::Dump { uid } => ( + TaskType::DumpCreation, + Some(TaskDetails::Dump { dump_uid: uid }), + ), }; // An event always has at least one event: "Created" diff --git a/meilisearch-lib/src/dump/loaders/v3.rs b/meilisearch-lib/src/dump/loaders/v3.rs index 0a2ea438b..8e76b67e0 100644 --- a/meilisearch-lib/src/dump/loaders/v3.rs +++ b/meilisearch-lib/src/dump/loaders/v3.rs @@ -66,6 +66,7 @@ pub fn load_dump( index_db_size, meta_env_size, indexing_options, + "V5", ) } diff --git a/meilisearch-lib/src/dump/loaders/v4.rs b/meilisearch-lib/src/dump/loaders/v4.rs index c898f83b1..7f0ade714 100644 --- a/meilisearch-lib/src/dump/loaders/v4.rs +++ b/meilisearch-lib/src/dump/loaders/v4.rs @@ -19,10 +19,11 @@ pub fn load_dump( index_db_size: usize, meta_env_size: usize, indexing_options: &IndexerOpts, + version: &str, ) -> anyhow::Result<()> { info!( - "Loading dump from {}, dump database version: {}, dump version: V4", - meta.dump_date, meta.db_version + "Loading dump from {}, dump database version: {}, dump version: {}", + meta.dump_date, meta.db_version, version ); let mut options = EnvOpenOptions::new(); diff --git a/meilisearch-lib/src/dump/mod.rs b/meilisearch-lib/src/dump/mod.rs index b14b356fd..084fbd63f 100644 --- a/meilisearch-lib/src/dump/mod.rs +++ b/meilisearch-lib/src/dump/mod.rs @@ -69,6 +69,8 @@ pub enum MetadataVersion { V2(Metadata), V3(Metadata), V4(Metadata), + // V5 is forward compatible with V4 but not backward compatible. + V5(Metadata), } impl MetadataVersion { @@ -80,6 +82,7 @@ impl MetadataVersion { meta_env_size: usize, indexing_options: &IndexerOpts, ) -> anyhow::Result<()> { + let version = self.version(); match self { MetadataVersion::V1(_meta) => { anyhow::bail!("The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.") @@ -100,46 +103,49 @@ impl MetadataVersion { meta_env_size, indexing_options, )?, - MetadataVersion::V4(meta) => v4::load_dump( + MetadataVersion::V4(meta) | MetadataVersion::V5(meta) => v4::load_dump( meta, src, dst, index_db_size, meta_env_size, indexing_options, + version, )?, } Ok(()) } - pub fn new_v4(index_db_size: usize, update_db_size: usize) -> Self { + pub fn new_v5(index_db_size: usize, update_db_size: usize) -> Self { let meta = Metadata::new(index_db_size, update_db_size); - Self::V4(meta) + Self::V5(meta) } pub fn db_version(&self) -> &str { match self { Self::V1(meta) => &meta.db_version, - Self::V2(meta) | Self::V3(meta) | Self::V4(meta) => &meta.db_version, + Self::V2(meta) | Self::V3(meta) | Self::V4(meta) | Self::V5(meta) => &meta.db_version, } } - pub fn version(&self) -> &str { + pub fn version(&self) -> &'static str { match self { MetadataVersion::V1(_) => "V1", MetadataVersion::V2(_) => "V2", MetadataVersion::V3(_) => "V3", MetadataVersion::V4(_) => "V4", + MetadataVersion::V5(_) => "V5", } } pub fn dump_date(&self) -> Option<&OffsetDateTime> { match self { MetadataVersion::V1(_) => None, - MetadataVersion::V2(meta) | MetadataVersion::V3(meta) | MetadataVersion::V4(meta) => { - Some(&meta.dump_date) - } + MetadataVersion::V2(meta) + | MetadataVersion::V3(meta) + | MetadataVersion::V4(meta) + | MetadataVersion::V5(meta) => Some(&meta.dump_date), } } } @@ -294,7 +300,7 @@ where let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; let temp_dump_path = temp_dump_dir.path().to_owned(); - let meta = MetadataVersion::new_v4(self.index_db_size, self.task_store_size); + let meta = MetadataVersion::new_v5(self.index_db_size, self.task_store_size); let meta_path = temp_dump_path.join(META_FILE_NAME); let mut meta_file = File::create(&meta_path)?; serde_json::to_writer(&mut meta_file, &meta)?; @@ -306,11 +312,11 @@ where AuthController::dump(&self.db_path, &temp_dump_path)?; TaskStore::dump( self.env.clone(), - &self.dump_path, + &temp_dump_path, self.update_file_store.clone(), ) .await?; - self.index_resolver.dump(&self.dump_path).await?; + self.index_resolver.dump(&temp_dump_path).await?; let dump_path = self.dump_path.clone(); let dump_path = tokio::task::spawn_blocking(move || -> Result { From f0aceb4fba7a0598874803bf0fbc99db605e7486 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 23 May 2022 15:58:02 +0200 Subject: [PATCH 028/185] remove unused files --- meilisearch-lib/src/dump/actor.rs | 190 ------------------------ meilisearch-lib/src/dump/handle_impl.rs | 26 ---- meilisearch-lib/src/dump/message.rs | 13 -- 3 files changed, 229 deletions(-) delete mode 100644 meilisearch-lib/src/dump/actor.rs delete mode 100644 meilisearch-lib/src/dump/handle_impl.rs delete mode 100644 meilisearch-lib/src/dump/message.rs diff --git a/meilisearch-lib/src/dump/actor.rs b/meilisearch-lib/src/dump/actor.rs deleted file mode 100644 index b7f615e44..000000000 --- a/meilisearch-lib/src/dump/actor.rs +++ /dev/null @@ -1,190 +0,0 @@ -use std::collections::HashMap; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use async_stream::stream; -use futures::{lock::Mutex, stream::StreamExt}; -use log::{error, trace}; -use time::macros::format_description; -use time::OffsetDateTime; -use tokio::sync::{mpsc, oneshot, RwLock}; - -use super::error::{DumpError, Result}; -use super::{DumpInfo, DumpJob, DumpMsg, DumpStatus}; -use crate::tasks::Scheduler; -use crate::update_file_store::UpdateFileStore; - -pub const CONCURRENT_DUMP_MSG: usize = 10; - -pub struct DumpActor { - inbox: Option>, - update_file_store: UpdateFileStore, - scheduler: Arc>, - dump_path: PathBuf, - analytics_path: PathBuf, - lock: Arc>, - dump_infos: Arc>>, - update_db_size: usize, - index_db_size: usize, -} - -/// Generate uid from creation date -fn generate_uid() -> String { - OffsetDateTime::now_utc() - .format(format_description!( - "[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" - )) - .unwrap() -} - -impl DumpActor { - pub fn new( - inbox: mpsc::Receiver, - update_file_store: UpdateFileStore, - scheduler: Arc>, - dump_path: impl AsRef, - analytics_path: impl AsRef, - index_db_size: usize, - update_db_size: usize, - ) -> Self { - let dump_infos = Arc::new(RwLock::new(HashMap::new())); - let lock = Arc::new(Mutex::new(())); - Self { - inbox: Some(inbox), - scheduler, - update_file_store, - dump_path: dump_path.as_ref().into(), - analytics_path: analytics_path.as_ref().into(), - dump_infos, - lock, - index_db_size, - update_db_size, - } - } - - pub async fn run(mut self) { - trace!("Started dump actor."); - - let mut inbox = self - .inbox - .take() - .expect("Dump Actor must have a inbox at this point."); - - let stream = stream! { - loop { - match inbox.recv().await { - Some(msg) => yield msg, - None => break, - } - } - }; - - stream - .for_each_concurrent(Some(CONCURRENT_DUMP_MSG), |msg| self.handle_message(msg)) - .await; - - error!("Dump actor stopped."); - } - - async fn handle_message(&self, msg: DumpMsg) { - use DumpMsg::*; - - match msg { - CreateDump { ret } => { - let _ = self.handle_create_dump(ret).await; - } - DumpInfo { ret, uid } => { - let _ = ret.send(self.handle_dump_info(uid).await); - } - } - } - - async fn handle_create_dump(&self, ret: oneshot::Sender>) { - let uid = generate_uid(); - let info = DumpInfo::new(uid.clone(), DumpStatus::InProgress); - - let _lock = match self.lock.try_lock() { - Some(lock) => lock, - None => { - ret.send(Err(DumpError::DumpAlreadyRunning)) - .expect("Dump actor is dead"); - return; - } - }; - - self.dump_infos - .write() - .await - .insert(uid.clone(), info.clone()); - - ret.send(Ok(info)).expect("Dump actor is dead"); - - let task = DumpJob { - dump_path: self.dump_path.clone(), - db_path: self.analytics_path.clone(), - update_file_store: self.update_file_store.clone(), - uid: uid.clone(), - update_db_size: self.update_db_size, - index_db_size: self.index_db_size, - }; - - let task_result = tokio::task::spawn_local(task.run()).await; - - let mut dump_infos = self.dump_infos.write().await; - let dump_infos = dump_infos - .get_mut(&uid) - .expect("dump entry deleted while lock was acquired"); - - match task_result { - Ok(Ok(())) => { - dump_infos.done(); - trace!("Dump succeed"); - } - Ok(Err(e)) => { - dump_infos.with_error(e.to_string()); - error!("Dump failed: {}", e); - } - Err(_) => { - dump_infos.with_error("Unexpected error while performing dump.".to_string()); - error!("Dump panicked. Dump status set to failed"); - } - }; - } - - async fn handle_dump_info(&self, uid: String) -> Result { - match self.dump_infos.read().await.get(&uid) { - Some(info) => Ok(info.clone()), - _ => Err(DumpError::DumpDoesNotExist(uid)), - } - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_generate_uid() { - let current = OffsetDateTime::now_utc(); - - let uid = generate_uid(); - let (date, time) = uid.split_once('-').unwrap(); - - let date = time::Date::parse( - date, - &format_description!("[year repr:full][month repr:numerical][day padding:zero]"), - ) - .unwrap(); - let time = time::Time::parse( - time, - &format_description!( - "[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" - ), - ) - .unwrap(); - let datetime = time::PrimitiveDateTime::new(date, time); - let datetime = datetime.assume_utc(); - - assert!(current - datetime < time::Duration::SECOND); - } -} diff --git a/meilisearch-lib/src/dump/handle_impl.rs b/meilisearch-lib/src/dump/handle_impl.rs deleted file mode 100644 index 9577b3663..000000000 --- a/meilisearch-lib/src/dump/handle_impl.rs +++ /dev/null @@ -1,26 +0,0 @@ -use tokio::sync::{mpsc, oneshot}; - -use super::error::Result; -use super::{DumpActorHandle, DumpMsg}; - -#[derive(Clone)] -pub struct DumpActorHandleImpl { - pub sender: mpsc::Sender, -} - -#[async_trait::async_trait] -impl DumpActorHandle for DumpActorHandleImpl { - async fn create_dump(&self) -> Result { - let (ret, receiver) = oneshot::channel(); - let msg = DumpMsg::CreateDump { ret }; - let _ = self.sender.send(msg).await; - receiver.await.expect("IndexActor has been killed") - } - - async fn dump_info(&self, uid: String) -> Result { - let (ret, receiver) = oneshot::channel(); - let msg = DumpMsg::DumpInfo { ret, uid }; - let _ = self.sender.send(msg).await; - receiver.await.expect("IndexActor has been killed") - } -} diff --git a/meilisearch-lib/src/dump/message.rs b/meilisearch-lib/src/dump/message.rs deleted file mode 100644 index 8ebeb3b57..000000000 --- a/meilisearch-lib/src/dump/message.rs +++ /dev/null @@ -1,13 +0,0 @@ -use tokio::sync::oneshot; - -use super::error::Result; - -pub enum DumpMsg { - CreateDump { - ret: oneshot::Sender>, - }, - DumpInfo { - uid: String, - ret: oneshot::Sender>, - }, -} From 8743d73973130a4416287c0e97fe654c2c5669d8 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 23 May 2022 16:01:43 +0200 Subject: [PATCH 029/185] move DumpHandler to own module --- meilisearch-lib/src/dump/handler.rs | 89 ++++++++++++++++++++++++++ meilisearch-lib/src/dump/mod.rs | 98 ++--------------------------- 2 files changed, 95 insertions(+), 92 deletions(-) create mode 100644 meilisearch-lib/src/dump/handler.rs diff --git a/meilisearch-lib/src/dump/handler.rs b/meilisearch-lib/src/dump/handler.rs new file mode 100644 index 000000000..b168e162a --- /dev/null +++ b/meilisearch-lib/src/dump/handler.rs @@ -0,0 +1,89 @@ +use std::{fs::File, path::PathBuf, sync::Arc}; + +use log::{info, trace}; +use meilisearch_auth::AuthController; +use milli::heed::Env; +use time::{macros::format_description, OffsetDateTime}; +use tokio::fs::create_dir_all; + +use crate::analytics; +use crate::compression::to_tar_gz; +use crate::dump::error::{DumpError, Result}; +use crate::dump::{MetadataVersion, META_FILE_NAME}; +use crate::index_resolver::{index_store::IndexStore, meta_store::IndexMetaStore, IndexResolver}; +use crate::tasks::TaskStore; +use crate::update_file_store::UpdateFileStore; + +/// Generate uid from creation date +pub fn generate_uid() -> String { + OffsetDateTime::now_utc() + .format(format_description!( + "[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" + )) + .unwrap() +} + +pub struct DumpHandler { + pub dump_path: PathBuf, + pub db_path: PathBuf, + pub update_file_store: UpdateFileStore, + pub task_store_size: usize, + pub index_db_size: usize, + pub env: Arc, + pub index_resolver: Arc>, +} + +impl DumpHandler +where + U: IndexMetaStore + Sync + Send + 'static, + I: IndexStore + Sync + Send + 'static, +{ + pub async fn run(&self, uid: String) -> Result<()> { + trace!("Performing dump."); + + create_dir_all(&self.dump_path).await?; + + let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; + let temp_dump_path = temp_dump_dir.path().to_owned(); + + let meta = MetadataVersion::new_v5(self.index_db_size, self.task_store_size); + let meta_path = temp_dump_path.join(META_FILE_NAME); + // TODO: blocking + let mut meta_file = File::create(&meta_path)?; + serde_json::to_writer(&mut meta_file, &meta)?; + analytics::copy_user_id(&self.db_path, &temp_dump_path); + + create_dir_all(&temp_dump_path.join("indexes")).await?; + + // TODO: this is blocking!! + AuthController::dump(&self.db_path, &temp_dump_path)?; + TaskStore::dump( + self.env.clone(), + &temp_dump_path, + self.update_file_store.clone(), + ) + .await?; + self.index_resolver.dump(&temp_dump_path).await?; + + let dump_path = self.dump_path.clone(); + let dump_path = tokio::task::spawn_blocking(move || -> Result { + // for now we simply copy the updates/updates_files + // FIXME: We may copy more files than necessary, if new files are added while we are + // performing the dump. We need a way to filter them out. + + let temp_dump_file = tempfile::NamedTempFile::new_in(&dump_path)?; + to_tar_gz(temp_dump_path, temp_dump_file.path()) + .map_err(|e| DumpError::Internal(e.into()))?; + + let dump_path = dump_path.join(uid).with_extension("dump"); + temp_dump_file.persist(&dump_path)?; + + Ok(dump_path) + }) + .await??; + + info!("Created dump in {:?}.", dump_path); + + Ok(()) + } +} diff --git a/meilisearch-lib/src/dump/mod.rs b/meilisearch-lib/src/dump/mod.rs index 084fbd63f..c80554301 100644 --- a/meilisearch-lib/src/dump/mod.rs +++ b/meilisearch-lib/src/dump/mod.rs @@ -1,37 +1,24 @@ use std::fs::File; -use std::path::{Path, PathBuf}; -use std::sync::Arc; +use std::path::Path; use anyhow::bail; -use log::{info, trace}; -use meilisearch_auth::AuthController; -use milli::heed::Env; +use log::info; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; use tempfile::TempDir; -use time::macros::format_description; -use tokio::fs::create_dir_all; -use crate::analytics; -use crate::compression::{from_tar_gz, to_tar_gz}; -use crate::dump::error::DumpError; -use crate::index_resolver::index_store::IndexStore; -use crate::index_resolver::meta_store::IndexMetaStore; -use crate::index_resolver::IndexResolver; +use crate::compression::from_tar_gz; use crate::options::IndexerOpts; -use crate::tasks::TaskStore; -use crate::update_file_store::UpdateFileStore; -use error::Result; use self::loaders::{v2, v3, v4}; -// mod actor; +pub use handler::{generate_uid, DumpHandler}; + mod compat; pub mod error; -// mod handle_impl; +mod handler; mod loaders; -// mod message; const META_FILE_NAME: &str = "metadata.json"; @@ -268,79 +255,6 @@ fn persist_dump(dst_path: impl AsRef, tmp_dst: TempDir) -> anyhow::Result< Ok(()) } -/// Generate uid from creation date -pub fn generate_uid() -> String { - OffsetDateTime::now_utc() - .format(format_description!( - "[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" - )) - .unwrap() -} - -pub struct DumpHandler { - pub dump_path: PathBuf, - pub db_path: PathBuf, - pub update_file_store: UpdateFileStore, - pub task_store_size: usize, - pub index_db_size: usize, - pub env: Arc, - pub index_resolver: Arc>, -} - -impl DumpHandler -where - U: IndexMetaStore + Sync + Send + 'static, - I: IndexStore + Sync + Send + 'static, -{ - pub async fn run(&self, uid: String) -> Result<()> { - trace!("Performing dump."); - - create_dir_all(&self.dump_path).await?; - - let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; - let temp_dump_path = temp_dump_dir.path().to_owned(); - - let meta = MetadataVersion::new_v5(self.index_db_size, self.task_store_size); - let meta_path = temp_dump_path.join(META_FILE_NAME); - let mut meta_file = File::create(&meta_path)?; - serde_json::to_writer(&mut meta_file, &meta)?; - analytics::copy_user_id(&self.db_path, &temp_dump_path); - - create_dir_all(&temp_dump_path.join("indexes")).await?; - - // TODO: this is blocking!! - AuthController::dump(&self.db_path, &temp_dump_path)?; - TaskStore::dump( - self.env.clone(), - &temp_dump_path, - self.update_file_store.clone(), - ) - .await?; - self.index_resolver.dump(&temp_dump_path).await?; - - let dump_path = self.dump_path.clone(); - let dump_path = tokio::task::spawn_blocking(move || -> Result { - // for now we simply copy the updates/updates_files - // FIXME: We may copy more files than necessary, if new files are added while we are - // performing the dump. We need a way to filter them out. - - let temp_dump_file = tempfile::NamedTempFile::new_in(&dump_path)?; - to_tar_gz(temp_dump_path, temp_dump_file.path()) - .map_err(|e| DumpError::Internal(e.into()))?; - - let dump_path = dump_path.join(uid).with_extension("dump"); - temp_dump_file.persist(&dump_path)?; - - Ok(dump_path) - }) - .await??; - - info!("Created dump in {:?}.", dump_path); - - Ok(()) - } -} - #[cfg(test)] mod test { use nelson::Mocker; From 7b47e4e87a4c4d80c718fb2b2c3175a598a5bb50 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 23 May 2022 16:30:06 +0200 Subject: [PATCH 030/185] snapshot batch handler --- meilisearch-lib/src/index_controller/mod.rs | 5 ++- meilisearch-lib/src/snapshot.rs | 2 +- .../src/tasks/batch_handlers/mod.rs | 1 + .../tasks/batch_handlers/snapshot_handler.rs | 31 +++++++++++++++++++ meilisearch-lib/src/tasks/mod.rs | 1 + meilisearch-lib/src/tasks/scheduler.rs | 6 +--- 6 files changed, 39 insertions(+), 7 deletions(-) create mode 100644 meilisearch-lib/src/tasks/batch_handlers/snapshot_handler.rs diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 1eb61d9f0..039bd8dfa 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -27,7 +27,9 @@ use crate::options::{IndexerOpts, SchedulerConfig}; use crate::snapshot::{load_snapshot, SnapshotService}; use crate::tasks::error::TaskError; use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskId}; -use crate::tasks::{BatchHandler, EmptyBatchHandler, Scheduler, TaskFilter, TaskStore}; +use crate::tasks::{ + BatchHandler, EmptyBatchHandler, Scheduler, SnapshotHandler, TaskFilter, TaskStore, +}; use error::Result; use self::error::IndexControllerError; @@ -235,6 +237,7 @@ impl IndexControllerBuilder { let handlers: Vec> = vec![ index_resolver.clone(), dump_handler, + Arc::new(SnapshotHandler), // dummy handler to catch all empty batches Arc::new(EmptyBatchHandler), ]; diff --git a/meilisearch-lib/src/snapshot.rs b/meilisearch-lib/src/snapshot.rs index 6dda0f3e8..527195729 100644 --- a/meilisearch-lib/src/snapshot.rs +++ b/meilisearch-lib/src/snapshot.rs @@ -38,7 +38,7 @@ impl SnapshotService { meta_env_size: self.meta_env_size, index_size: self.index_size, }; - self.scheduler.write().await.register_snapshot(snapshot_job); + self.scheduler.write().await.schedule_snapshot(snapshot_job); sleep(self.snapshot_period).await; } } diff --git a/meilisearch-lib/src/tasks/batch_handlers/mod.rs b/meilisearch-lib/src/tasks/batch_handlers/mod.rs index f72c1b760..9199e872d 100644 --- a/meilisearch-lib/src/tasks/batch_handlers/mod.rs +++ b/meilisearch-lib/src/tasks/batch_handlers/mod.rs @@ -1,3 +1,4 @@ pub mod dump_handler; pub mod empty_handler; mod index_resolver_handler; +pub mod snapshot_handler; diff --git a/meilisearch-lib/src/tasks/batch_handlers/snapshot_handler.rs b/meilisearch-lib/src/tasks/batch_handlers/snapshot_handler.rs new file mode 100644 index 000000000..2948fb4ff --- /dev/null +++ b/meilisearch-lib/src/tasks/batch_handlers/snapshot_handler.rs @@ -0,0 +1,31 @@ +use crate::tasks::batch::{Batch, BatchContent}; +use crate::tasks::BatchHandler; + +pub struct SnapshotHandler; + +#[async_trait::async_trait] +impl BatchHandler for SnapshotHandler { + fn accept(&self, batch: &Batch) -> bool { + match batch.content { + BatchContent::Snapshot(_) => true, + _ => false, + } + } + + async fn process_batch(&self, batch: Batch) -> Batch { + match batch.content { + BatchContent::Snapshot(job) => { + if let Err(e) = job.run().await { + log::error!("snapshot error: {e}"); + } + } + _ => unreachable!(), + } + + Batch::empty() + } + + async fn finish(&self, _: &Batch) { + () + } +} diff --git a/meilisearch-lib/src/tasks/mod.rs b/meilisearch-lib/src/tasks/mod.rs index bc01c4901..4c51ec207 100644 --- a/meilisearch-lib/src/tasks/mod.rs +++ b/meilisearch-lib/src/tasks/mod.rs @@ -1,6 +1,7 @@ use async_trait::async_trait; pub use batch_handlers::empty_handler::EmptyBatchHandler; +pub use batch_handlers::snapshot_handler::SnapshotHandler; pub use scheduler::Scheduler; pub use task_store::TaskFilter; diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 1b3fd6daa..6089efd7f 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -279,10 +279,6 @@ impl Scheduler { self.tasks.insert(task); } - pub fn register_snapshot(&mut self, job: SnapshotJob) { - self.snapshots.push_back(job); - } - /// Clears the processing list, this method should be called when the processing of a batch is finished. pub fn finish(&mut self) { self.processing = Processing::Nothing; @@ -340,7 +336,7 @@ impl Scheduler { Ok(tasks) } - pub async fn schedule_snapshot(&mut self, job: SnapshotJob) { + pub fn schedule_snapshot(&mut self, job: SnapshotJob) { self.snapshots.push_back(job); self.notify(); } From 0f9c134114051432090f395ff1f5ae2cc33eb121 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Tue, 24 May 2022 16:47:10 +0200 Subject: [PATCH 031/185] fix tests --- meilisearch-http/tests/auth/authorization.rs | 1 - meilisearch-lib/src/dump/mod.rs | 118 ------------------- meilisearch-lib/src/index_controller/mod.rs | 15 +-- meilisearch-lib/src/index_resolver/mod.rs | 50 ++++---- meilisearch-lib/src/tasks/scheduler.rs | 33 ++---- 5 files changed, 45 insertions(+), 172 deletions(-) diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 30df2dd2d..25f32eb12 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -45,7 +45,6 @@ pub static AUTHORIZATIONS: Lazy hashset!{"stats.get", "*"}, ("GET", "/stats") => hashset!{"stats.get", "*"}, ("POST", "/dumps") => hashset!{"dumps.create", "*"}, - ("GET", "/dumps/0/status") => hashset!{"dumps.get", "*"}, ("GET", "/version") => hashset!{"version", "*"}, } }); diff --git a/meilisearch-lib/src/dump/mod.rs b/meilisearch-lib/src/dump/mod.rs index c80554301..ab1c63d6d 100644 --- a/meilisearch-lib/src/dump/mod.rs +++ b/meilisearch-lib/src/dump/mod.rs @@ -254,121 +254,3 @@ fn persist_dump(dst_path: impl AsRef, tmp_dst: TempDir) -> anyhow::Result< Ok(()) } - -#[cfg(test)] -mod test { - use nelson::Mocker; - use once_cell::sync::Lazy; - - use super::*; - use crate::index_resolver::error::IndexResolverError; - use crate::options::SchedulerConfig; - use crate::tasks::error::Result as TaskResult; - use crate::tasks::task::{Task, TaskId}; - use crate::tasks::{BatchHandler, TaskFilter, TaskStore}; - use crate::update_file_store::UpdateFileStore; - - fn setup() { - static SETUP: Lazy<()> = Lazy::new(|| { - if cfg!(windows) { - std::env::set_var("TMP", "."); - } else { - std::env::set_var("TMPDIR", "."); - } - }); - - // just deref to make sure the env is setup - *SETUP - } - - #[actix_rt::test] - async fn test_dump_normal() { - setup(); - - let tmp = tempfile::tempdir().unwrap(); - - let mocker = Mocker::default(); - let update_file_store = UpdateFileStore::mock(mocker); - - let mut performer = BatchHandler::new(); - performer - .expect_process_job() - .once() - .returning(|j| match j { - Job::Dump { ret, .. } => { - let (sender, _receiver) = oneshot::channel(); - ret.send(Ok(sender)).unwrap(); - } - _ => unreachable!(), - }); - let performer = Arc::new(performer); - let mocker = Mocker::default(); - mocker - .when::<(&Path, UpdateFileStore), TaskResult<()>>("dump") - .then(|_| Ok(())); - mocker - .when::<(Option, Option, Option), TaskResult>>( - "list_tasks", - ) - .then(|_| Ok(Vec::new())); - let store = TaskStore::mock(mocker); - let config = SchedulerConfig::default(); - - let scheduler = Scheduler::new(store, performer, config).unwrap(); - - let task = DumpJob { - dump_path: tmp.path().into(), - // this should do nothing - update_file_store, - db_path: tmp.path().into(), - uid: String::from("test"), - update_db_size: 4096 * 10, - index_db_size: 4096 * 10, - scheduler, - }; - - task.run().await.unwrap(); - } - - #[actix_rt::test] - async fn error_performing_dump() { - let tmp = tempfile::tempdir().unwrap(); - - let mocker = Mocker::default(); - let file_store = UpdateFileStore::mock(mocker); - - let mocker = Mocker::default(); - mocker - .when::<(Option, Option, Option), TaskResult>>( - "list_tasks", - ) - .then(|_| Ok(Vec::new())); - let task_store = TaskStore::mock(mocker); - let mut performer = BatchHandler::new(); - performer - .expect_process_job() - .once() - .returning(|job| match job { - Job::Dump { ret, .. } => drop(ret.send(Err(IndexResolverError::BadlyFormatted( - "blabla".to_string(), - )))), - _ => unreachable!(), - }); - let performer = Arc::new(performer); - - let scheduler = Scheduler::new(task_store, performer, SchedulerConfig::default()).unwrap(); - - let task = DumpJob { - dump_path: tmp.path().into(), - // this should do nothing - db_path: tmp.path().into(), - update_file_store: file_store, - uid: String::from("test"), - update_db_size: 4096 * 10, - index_db_size: 4096 * 10, - scheduler, - }; - - assert!(task.run().await.is_err()); - } -} diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 039bd8dfa..b34523fd5 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -664,13 +664,11 @@ mod test { index_resolver: Arc>, task_store: TaskStore, update_file_store: UpdateFileStore, - dump_handle: DumpActorHandleImpl, scheduler: Arc>, ) -> Self { IndexController { index_resolver, task_store, - dump_handle, update_file_store, scheduler, } @@ -754,19 +752,12 @@ mod test { let task_store = TaskStore::mock(task_store_mocker); let scheduler = Scheduler::new( task_store.clone(), - index_resolver.clone(), + vec![index_resolver.clone()], SchedulerConfig::default(), ) .unwrap(); - let (sender, _) = mpsc::channel(1); - let dump_handle = DumpActorHandleImpl { sender }; - let index_controller = IndexController::mock( - index_resolver, - task_store, - update_file_store, - dump_handle, - scheduler, - ); + let index_controller = + IndexController::mock(index_resolver, task_store, update_file_store, scheduler); let r = index_controller .search(index_uid.to_owned(), query.clone()) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index f463cd24d..cc0308f9e 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -411,15 +411,21 @@ mod test { use nelson::Mocker; use proptest::prelude::*; - use crate::index::{ - error::{IndexError, Result as IndexResult}, - Checked, IndexMeta, IndexStats, Settings, + use crate::{ + index::{ + error::{IndexError, Result as IndexResult}, + Checked, IndexMeta, IndexStats, Settings, + }, + tasks::{batch::Batch, BatchHandler}, }; use index_store::MockIndexStore; use meta_store::MockIndexMetaStore; + // TODO: ignoring this test, it has become too complex to maintain, and rather implement + // handler logic test. proptest! { #[test] + #[ignore] fn test_process_task( task in any::().prop_filter("IndexUid should be Some", |s| s.index_uid.is_some()), index_exists in any::(), @@ -497,7 +503,7 @@ mod test { .then(move |_| result()); } } - TaskContent::Dump { path: _ } => { } + TaskContent::Dump { .. } => { } } mocker.when::<(), IndexResult>("stats") @@ -561,24 +567,26 @@ mod test { let update_file_store = UpdateFileStore::mock(mocker); let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store); - let batch = Batch { id: 1, created_at: OffsetDateTime::now_utc(), tasks: vec![task.clone()] }; - let result = index_resolver.process_batch(batch).await; + let batch = Batch { id: Some(1), created_at: OffsetDateTime::now_utc(), content: crate::tasks::batch::BatchContent::IndexUpdate(task.clone()) }; + if index_resolver.accept(&batch) { + let result = index_resolver.process_batch(batch).await; - // Test for some expected output scenarios: - // Index creation and deletion cannot fail because of a failed index op, since they - // don't perform index ops. - if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None } | TaskContent::Dump { .. }) - || (index_exists && matches!(task.content, TaskContent::IndexCreation { .. })) - || (!index_exists && matches!(task.content, TaskContent::IndexDeletion - | TaskContent::DocumentDeletion(_) - | TaskContent::SettingsUpdate { is_deletion: true, ..} - | TaskContent::SettingsUpdate { allow_index_creation: false, ..} - | TaskContent::DocumentAddition { allow_index_creation: false, ..} - | TaskContent::IndexUpdate { .. } )) - { - assert!(matches!(result.tasks[0].events.last().unwrap(), TaskEvent::Failed { .. }), "{:?}", result); - } else { - assert!(matches!(result.tasks[0].events.last().unwrap(), TaskEvent::Succeded { .. }), "{:?}", result); + // Test for some expected output scenarios: + // Index creation and deletion cannot fail because of a failed index op, since they + // don't perform index ops. + if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None } | TaskContent::Dump { .. }) + || (index_exists && matches!(task.content, TaskContent::IndexCreation { .. })) + || (!index_exists && matches!(task.content, TaskContent::IndexDeletion + | TaskContent::DocumentDeletion(_) + | TaskContent::SettingsUpdate { is_deletion: true, ..} + | TaskContent::SettingsUpdate { allow_index_creation: false, ..} + | TaskContent::DocumentAddition { allow_index_creation: false, ..} + | TaskContent::IndexUpdate { .. } )) + { + assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Failed { .. }), "{:?}", result); + } else { + assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Succeded { .. }), "{:?}", result); + } } }); } diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 6089efd7f..177cc0229 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -411,7 +411,7 @@ impl Scheduler { } } -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq)] pub enum Processing { DocumentAdditions(Vec), IndexUpdate(TaskId), @@ -586,31 +586,24 @@ mod test { queue.insert(gen_task(6, "test2", content.clone())); queue.insert(gen_task(7, "test1", content)); - let mut batch = Vec::new(); - let config = SchedulerConfig::default(); - make_batch(&mut queue, &mut batch, &config); - assert_eq!(batch, &[0, 4]); + let batch = make_batch(&mut queue, &config); + assert_eq!(batch, Processing::DocumentAdditions(vec![0, 4])); - batch.clear(); - make_batch(&mut queue, &mut batch, &config); - assert_eq!(batch, &[1]); + let batch = make_batch(&mut queue, &config); + assert_eq!(batch, Processing::DocumentAdditions(vec![1])); - batch.clear(); - make_batch(&mut queue, &mut batch, &config); - assert_eq!(batch, &[2]); + let batch = make_batch(&mut queue, &config); + assert_eq!(batch, Processing::IndexUpdate(2)); - batch.clear(); - make_batch(&mut queue, &mut batch, &config); - assert_eq!(batch, &[3, 6]); + let batch = make_batch(&mut queue, &config); + assert_eq!(batch, Processing::DocumentAdditions(vec![3, 6])); - batch.clear(); - make_batch(&mut queue, &mut batch, &config); - assert_eq!(batch, &[5]); + let batch = make_batch(&mut queue, &config); + assert_eq!(batch, Processing::IndexUpdate(5)); - batch.clear(); - make_batch(&mut queue, &mut batch, &config); - assert_eq!(batch, &[7]); + let batch = make_batch(&mut queue, &config); + assert_eq!(batch, Processing::DocumentAdditions(vec![7])); assert!(queue.is_empty()); } From 64654ef7c34665bac3310f4b2a2a8b3bec62498a Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 08:46:06 +0200 Subject: [PATCH 032/185] rename batch_handler to handler --- .../src/tasks/{batch_handlers => handlers}/dump_handler.rs | 0 .../src/tasks/{batch_handlers => handlers}/empty_handler.rs | 0 .../{batch_handlers => handlers}/index_resolver_handler.rs | 0 .../src/tasks/{batch_handlers => handlers}/mod.rs | 0 .../tasks/{batch_handlers => handlers}/snapshot_handler.rs | 0 meilisearch-lib/src/tasks/mod.rs | 6 +++--- 6 files changed, 3 insertions(+), 3 deletions(-) rename meilisearch-lib/src/tasks/{batch_handlers => handlers}/dump_handler.rs (100%) rename meilisearch-lib/src/tasks/{batch_handlers => handlers}/empty_handler.rs (100%) rename meilisearch-lib/src/tasks/{batch_handlers => handlers}/index_resolver_handler.rs (100%) rename meilisearch-lib/src/tasks/{batch_handlers => handlers}/mod.rs (100%) rename meilisearch-lib/src/tasks/{batch_handlers => handlers}/snapshot_handler.rs (100%) diff --git a/meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs b/meilisearch-lib/src/tasks/handlers/dump_handler.rs similarity index 100% rename from meilisearch-lib/src/tasks/batch_handlers/dump_handler.rs rename to meilisearch-lib/src/tasks/handlers/dump_handler.rs diff --git a/meilisearch-lib/src/tasks/batch_handlers/empty_handler.rs b/meilisearch-lib/src/tasks/handlers/empty_handler.rs similarity index 100% rename from meilisearch-lib/src/tasks/batch_handlers/empty_handler.rs rename to meilisearch-lib/src/tasks/handlers/empty_handler.rs diff --git a/meilisearch-lib/src/tasks/batch_handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs similarity index 100% rename from meilisearch-lib/src/tasks/batch_handlers/index_resolver_handler.rs rename to meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs diff --git a/meilisearch-lib/src/tasks/batch_handlers/mod.rs b/meilisearch-lib/src/tasks/handlers/mod.rs similarity index 100% rename from meilisearch-lib/src/tasks/batch_handlers/mod.rs rename to meilisearch-lib/src/tasks/handlers/mod.rs diff --git a/meilisearch-lib/src/tasks/batch_handlers/snapshot_handler.rs b/meilisearch-lib/src/tasks/handlers/snapshot_handler.rs similarity index 100% rename from meilisearch-lib/src/tasks/batch_handlers/snapshot_handler.rs rename to meilisearch-lib/src/tasks/handlers/snapshot_handler.rs diff --git a/meilisearch-lib/src/tasks/mod.rs b/meilisearch-lib/src/tasks/mod.rs index 4c51ec207..a8cb74aed 100644 --- a/meilisearch-lib/src/tasks/mod.rs +++ b/meilisearch-lib/src/tasks/mod.rs @@ -1,7 +1,7 @@ use async_trait::async_trait; -pub use batch_handlers::empty_handler::EmptyBatchHandler; -pub use batch_handlers::snapshot_handler::SnapshotHandler; +pub use handlers::empty_handler::EmptyBatchHandler; +pub use handlers::snapshot_handler::SnapshotHandler; pub use scheduler::Scheduler; pub use task_store::TaskFilter; @@ -14,8 +14,8 @@ use batch::Batch; use error::Result; pub mod batch; -mod batch_handlers; pub mod error; +mod handlers; mod scheduler; pub mod task; mod task_store; From 8349f38197e1c8e97ef4a11db9056119826e026e Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 09:25:42 +0200 Subject: [PATCH 033/185] remove unused file --- meilisearch-lib/src/index_resolver/message.rs | 37 ------------------- 1 file changed, 37 deletions(-) delete mode 100644 meilisearch-lib/src/index_resolver/message.rs diff --git a/meilisearch-lib/src/index_resolver/message.rs b/meilisearch-lib/src/index_resolver/message.rs deleted file mode 100644 index 25a0d64a9..000000000 --- a/meilisearch-lib/src/index_resolver/message.rs +++ /dev/null @@ -1,37 +0,0 @@ -use std::{collections::HashSet, path::PathBuf}; - -use tokio::sync::oneshot; -use uuid::Uuid; - -use crate::index::Index; -use super::error::Result; - -pub enum IndexResolverMsg { - Get { - uid: String, - ret: oneshot::Sender>, - }, - Delete { - uid: String, - ret: oneshot::Sender>, - }, - List { - ret: oneshot::Sender>>, - }, - Insert { - uuid: Uuid, - name: String, - ret: oneshot::Sender>, - }, - SnapshotRequest { - path: PathBuf, - ret: oneshot::Sender>>, - }, - GetSize { - ret: oneshot::Sender>, - }, - DumpRequest { - path: PathBuf, - ret: oneshot::Sender>>, - }, -} From 3c85b2986542e9e9dcb9a8ee2bd114dbb16dd0da Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 09:26:11 +0200 Subject: [PATCH 034/185] add doc to BatchHandler --- meilisearch-lib/src/tasks/mod.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/meilisearch-lib/src/tasks/mod.rs b/meilisearch-lib/src/tasks/mod.rs index a8cb74aed..d8bc25bb7 100644 --- a/meilisearch-lib/src/tasks/mod.rs +++ b/meilisearch-lib/src/tasks/mod.rs @@ -28,6 +28,9 @@ pub trait BatchHandler: Sync + Send + 'static { fn accept(&self, batch: &Batch) -> bool; /// Processes the `Task` batch returning the batch with the `Task` updated. + /// + /// It is ok for this function to panic if a batch is handed that hasn't been verified by + /// `accept` beforehand. async fn process_batch(&self, batch: Batch) -> Batch; /// `finish` is called when the result of `process` has been commited to the task store. This From 92d86ce6aa7c49547b402ef215804e2d13106227 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 09:26:35 +0200 Subject: [PATCH 035/185] add tests to IndexResolver BatchHandler --- .../tasks/handlers/index_resolver_handler.rs | 98 +++++++++++++++++++ meilisearch-lib/src/tasks/handlers/mod.rs | 30 ++++++ meilisearch-lib/src/update_file_store.rs | 4 +- 3 files changed, 130 insertions(+), 2 deletions(-) diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index 41a78a22b..38c079baa 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -56,3 +56,101 @@ where } } } + +#[cfg(test)] +mod test { + use crate::index_resolver::{index_store::MockIndexStore, meta_store::MockIndexMetaStore}; + use crate::tasks::{ + handlers::test::task_to_batch, + task::{Task, TaskContent}, + }; + use crate::update_file_store::{Result as FileStoreResult, UpdateFileStore}; + + use super::*; + use milli::update::IndexDocumentsMethod; + use nelson::Mocker; + use proptest::prelude::*; + use uuid::Uuid; + + proptest! { + #[test] + fn test_accept_task( + task in any::(), + ) { + let batch = task_to_batch(task); + + let index_store = MockIndexStore::new(); + let meta_store = MockIndexMetaStore::new(); + let mocker = Mocker::default(); + let update_file_store = UpdateFileStore::mock(mocker); + let index_resolver = IndexResolver::new(meta_store, index_store, update_file_store); + + match batch.content { + BatchContent::DocumentAddtitionBatch(_) + | BatchContent::IndexUpdate(_) => assert!(index_resolver.accept(&batch)), + BatchContent::Dump(_) + | BatchContent::Snapshot(_) + | BatchContent::Empty => assert!(!index_resolver.accept(&batch)), + } + } + } + + #[actix_rt::test] + async fn finisher_called_on_document_update() { + let index_store = MockIndexStore::new(); + let meta_store = MockIndexMetaStore::new(); + let mocker = Mocker::default(); + let content_uuid = Uuid::new_v4(); + mocker + .when::>("delete") + .once() + .then(move |uuid| { + assert_eq!(uuid, content_uuid); + Ok(()) + }); + let update_file_store = UpdateFileStore::mock(mocker); + let index_resolver = IndexResolver::new(meta_store, index_store, update_file_store); + + let task = Task { + id: 1, + index_uid: None, + content: TaskContent::DocumentAddition { + content_uuid, + merge_strategy: IndexDocumentsMethod::ReplaceDocuments, + primary_key: None, + documents_count: 100, + allow_index_creation: true, + }, + events: Vec::new(), + }; + + let batch = task_to_batch(task); + + index_resolver.finish(&batch).await; + } + + #[actix_rt::test] + #[should_panic] + async fn panic_when_passed_unsupported_batch() { + let index_store = MockIndexStore::new(); + let meta_store = MockIndexMetaStore::new(); + let mocker = Mocker::default(); + let update_file_store = UpdateFileStore::mock(mocker); + let index_resolver = IndexResolver::new(meta_store, index_store, update_file_store); + + let task = Task { + id: 1, + index_uid: None, + content: TaskContent::Dump { + uid: String::from("hello"), + }, + events: Vec::new(), + }; + + let batch = task_to_batch(task); + + index_resolver.process_batch(batch).await; + } + + // TODO: test perform_batch. We need a Mocker for IndexResolver. +} diff --git a/meilisearch-lib/src/tasks/handlers/mod.rs b/meilisearch-lib/src/tasks/handlers/mod.rs index 9199e872d..f5fe8eaf2 100644 --- a/meilisearch-lib/src/tasks/handlers/mod.rs +++ b/meilisearch-lib/src/tasks/handlers/mod.rs @@ -2,3 +2,33 @@ pub mod dump_handler; pub mod empty_handler; mod index_resolver_handler; pub mod snapshot_handler; + +#[cfg(test)] +mod test { + use time::OffsetDateTime; + + use crate::tasks::{ + batch::{Batch, BatchContent}, + task::{Task, TaskContent}, + }; + + pub fn task_to_batch(task: Task) -> Batch { + let content = match task.content { + TaskContent::DocumentAddition { .. } => { + BatchContent::DocumentAddtitionBatch(vec![task]) + } + TaskContent::DocumentDeletion(_) + | TaskContent::SettingsUpdate { .. } + | TaskContent::IndexDeletion + | TaskContent::IndexCreation { .. } + | TaskContent::IndexUpdate { .. } => BatchContent::IndexUpdate(task), + TaskContent::Dump { .. } => BatchContent::Dump(task), + }; + + Batch { + id: Some(1), + created_at: OffsetDateTime::now_utc(), + content, + } + } +} diff --git a/meilisearch-lib/src/update_file_store.rs b/meilisearch-lib/src/update_file_store.rs index ec355a56e..3a60dfe26 100644 --- a/meilisearch-lib/src/update_file_store.rs +++ b/meilisearch-lib/src/update_file_store.rs @@ -26,7 +26,7 @@ pub struct UpdateFile { #[error("Error while persisting update to disk: {0}")] pub struct UpdateFileStoreError(Box); -type Result = std::result::Result; +pub type Result = std::result::Result; macro_rules! into_update_store_error { ($($other:path),*) => { @@ -249,7 +249,7 @@ mod test { pub async fn delete(&self, uuid: Uuid) -> Result<()> { match self { MockUpdateFileStore::Real(s) => s.delete(uuid).await, - MockUpdateFileStore::Mock(_) => todo!(), + MockUpdateFileStore::Mock(mocker) => unsafe { mocker.get("delete").call(uuid) }, } } } From 986a99296d8b41d98ca8a7ec015fcc53c8bcb39d Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 11:24:35 +0200 Subject: [PATCH 036/185] remove useless dump test --- meilisearch-http/tests/dumps/mod.rs | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 8395ec3aa..22625f17f 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -6,23 +6,6 @@ use serde_json::json; use self::data::GetDump; -#[actix_rt::test] -async fn get_unexisting_dump_status() { - let server = Server::new().await; - - let (response, code) = server.get_dump_status("foobar").await; - assert_eq!(code, 404); - - let expected_response = json!({ - "message": "Dump `foobar` not found.", - "code": "dump_not_found", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#dump_not_found" - }); - - assert_eq!(response, expected_response); -} - // all the following test are ignored on windows. See #2364 #[actix_rt::test] #[cfg_attr(target_os = "windows", ignore)] From 127171c8120b2b11c8e372fae140cff3c939b75a Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 14:10:17 +0200 Subject: [PATCH 037/185] impl Default on Processing --- meilisearch-lib/src/tasks/scheduler.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 177cc0229..8510ba771 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -411,16 +411,21 @@ impl Scheduler { } } -#[derive(Debug, Default, PartialEq)] +#[derive(Debug, PartialEq)] pub enum Processing { DocumentAdditions(Vec), IndexUpdate(TaskId), Dump(TaskId), /// Variant used when there is nothing to process. - #[default] Nothing, } +impl Default for Processing { + fn default() -> Self { + Self::Nothing + } +} + enum ProcessingIter<'a> { Many(slice::Iter<'a, TaskId>), Single(Option), From 49d8fadb520c9edff37a06c3d67bf86589c1b9ec Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 11:14:25 +0200 Subject: [PATCH 038/185] test dump handler --- meilisearch-lib/src/dump/handler.rs | 217 +++++++++++++----- meilisearch-lib/src/index_controller/mod.rs | 14 +- .../src/tasks/handlers/dump_handler.rs | 93 ++++++++ 3 files changed, 254 insertions(+), 70 deletions(-) diff --git a/meilisearch-lib/src/dump/handler.rs b/meilisearch-lib/src/dump/handler.rs index b168e162a..4adb7011a 100644 --- a/meilisearch-lib/src/dump/handler.rs +++ b/meilisearch-lib/src/dump/handler.rs @@ -1,18 +1,10 @@ -use std::{fs::File, path::PathBuf, sync::Arc}; +#[cfg(not(test))] +pub use real::DumpHandler; + +#[cfg(test)] +pub use test::MockDumpHandler as DumpHandler; -use log::{info, trace}; -use meilisearch_auth::AuthController; -use milli::heed::Env; use time::{macros::format_description, OffsetDateTime}; -use tokio::fs::create_dir_all; - -use crate::analytics; -use crate::compression::to_tar_gz; -use crate::dump::error::{DumpError, Result}; -use crate::dump::{MetadataVersion, META_FILE_NAME}; -use crate::index_resolver::{index_store::IndexStore, meta_store::IndexMetaStore, IndexResolver}; -use crate::tasks::TaskStore; -use crate::update_file_store::UpdateFileStore; /// Generate uid from creation date pub fn generate_uid() -> String { @@ -23,67 +15,166 @@ pub fn generate_uid() -> String { .unwrap() } -pub struct DumpHandler { - pub dump_path: PathBuf, - pub db_path: PathBuf, - pub update_file_store: UpdateFileStore, - pub task_store_size: usize, - pub index_db_size: usize, - pub env: Arc, - pub index_resolver: Arc>, -} +mod real { + use std::{fs::File, path::PathBuf, sync::Arc}; -impl DumpHandler -where - U: IndexMetaStore + Sync + Send + 'static, - I: IndexStore + Sync + Send + 'static, -{ - pub async fn run(&self, uid: String) -> Result<()> { - trace!("Performing dump."); + use log::{info, trace}; + use meilisearch_auth::AuthController; + use milli::heed::Env; + use tokio::fs::create_dir_all; - create_dir_all(&self.dump_path).await?; + use crate::analytics; + use crate::compression::to_tar_gz; + use crate::dump::error::{DumpError, Result}; + use crate::dump::{MetadataVersion, META_FILE_NAME}; + use crate::index_resolver::{ + index_store::IndexStore, meta_store::IndexMetaStore, IndexResolver, + }; + use crate::tasks::TaskStore; + use crate::update_file_store::UpdateFileStore; - let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; - let temp_dump_path = temp_dump_dir.path().to_owned(); + pub struct DumpHandler { + dump_path: PathBuf, + db_path: PathBuf, + update_file_store: UpdateFileStore, + task_store_size: usize, + index_db_size: usize, + env: Arc, + index_resolver: Arc>, + } - let meta = MetadataVersion::new_v5(self.index_db_size, self.task_store_size); - let meta_path = temp_dump_path.join(META_FILE_NAME); - // TODO: blocking - let mut meta_file = File::create(&meta_path)?; - serde_json::to_writer(&mut meta_file, &meta)?; - analytics::copy_user_id(&self.db_path, &temp_dump_path); + impl DumpHandler + where + U: IndexMetaStore + Sync + Send + 'static, + I: IndexStore + Sync + Send + 'static, + { + pub fn new( + dump_path: PathBuf, + db_path: PathBuf, + update_file_store: UpdateFileStore, + task_store_size: usize, + index_db_size: usize, + env: Arc, + index_resolver: Arc>, + ) -> Self { + Self { + dump_path, + db_path, + update_file_store, + task_store_size, + index_db_size, + env, + index_resolver, + } + } - create_dir_all(&temp_dump_path.join("indexes")).await?; + pub async fn run(&self, uid: String) -> Result<()> { + trace!("Performing dump."); - // TODO: this is blocking!! - AuthController::dump(&self.db_path, &temp_dump_path)?; - TaskStore::dump( - self.env.clone(), - &temp_dump_path, - self.update_file_store.clone(), - ) - .await?; - self.index_resolver.dump(&temp_dump_path).await?; + create_dir_all(&self.dump_path).await?; - let dump_path = self.dump_path.clone(); - let dump_path = tokio::task::spawn_blocking(move || -> Result { - // for now we simply copy the updates/updates_files - // FIXME: We may copy more files than necessary, if new files are added while we are - // performing the dump. We need a way to filter them out. + let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; + let temp_dump_path = temp_dump_dir.path().to_owned(); - let temp_dump_file = tempfile::NamedTempFile::new_in(&dump_path)?; - to_tar_gz(temp_dump_path, temp_dump_file.path()) - .map_err(|e| DumpError::Internal(e.into()))?; + let meta = MetadataVersion::new_v5(self.index_db_size, self.task_store_size); + let meta_path = temp_dump_path.join(META_FILE_NAME); + // TODO: blocking + let mut meta_file = File::create(&meta_path)?; + serde_json::to_writer(&mut meta_file, &meta)?; + analytics::copy_user_id(&self.db_path, &temp_dump_path); - let dump_path = dump_path.join(uid).with_extension("dump"); - temp_dump_file.persist(&dump_path)?; + create_dir_all(&temp_dump_path.join("indexes")).await?; - Ok(dump_path) - }) - .await??; + // TODO: this is blocking!! + AuthController::dump(&self.db_path, &temp_dump_path)?; + TaskStore::dump( + self.env.clone(), + &temp_dump_path, + self.update_file_store.clone(), + ) + .await?; + self.index_resolver.dump(&temp_dump_path).await?; - info!("Created dump in {:?}.", dump_path); + let dump_path = self.dump_path.clone(); + let dump_path = tokio::task::spawn_blocking(move || -> Result { + // for now we simply copy the updates/updates_files + // FIXME: We may copy more files than necessary, if new files are added while we are + // performing the dump. We need a way to filter them out. - Ok(()) + let temp_dump_file = tempfile::NamedTempFile::new_in(&dump_path)?; + to_tar_gz(temp_dump_path, temp_dump_file.path()) + .map_err(|e| DumpError::Internal(e.into()))?; + + let dump_path = dump_path.join(uid).with_extension("dump"); + temp_dump_file.persist(&dump_path)?; + + Ok(dump_path) + }) + .await??; + + info!("Created dump in {:?}.", dump_path); + + Ok(()) + } + } +} + +#[cfg(test)] +mod test { + use std::marker::PhantomData; + use std::path::PathBuf; + use std::sync::Arc; + + use milli::heed::Env; + use nelson::Mocker; + + use crate::dump::error::Result; + use crate::index_resolver::IndexResolver; + use crate::index_resolver::{index_store::IndexStore, meta_store::IndexMetaStore}; + use crate::update_file_store::UpdateFileStore; + + use super::*; + + pub enum MockDumpHandler { + Real(super::real::DumpHandler), + Mock(Mocker, PhantomData<(U, I)>), + } + + impl MockDumpHandler { + pub fn mock(mocker: Mocker) -> Self { + Self::Mock(mocker, PhantomData) + } + } + + impl MockDumpHandler + where + U: IndexMetaStore + Sync + Send + 'static, + I: IndexStore + Sync + Send + 'static, + { + pub fn new( + dump_path: PathBuf, + db_path: PathBuf, + update_file_store: UpdateFileStore, + task_store_size: usize, + index_db_size: usize, + env: Arc, + index_resolver: Arc>, + ) -> Self { + Self::Real(super::real::DumpHandler::new( + dump_path, + db_path, + update_file_store, + task_store_size, + index_db_size, + env, + index_resolver, + )) + } + pub async fn run(&self, uid: String) -> Result<()> { + match self { + DumpHandler::Real(real) => real.run(uid).await, + DumpHandler::Mock(mocker, _) => unsafe { mocker.get("run").call(uid) }, + } + } } } diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index b34523fd5..30a6b6dc8 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -222,15 +222,15 @@ impl IndexControllerBuilder { .dump_dst .ok_or_else(|| anyhow::anyhow!("Missing dump directory path"))?; - let dump_handler = Arc::new(DumpHandler { + let dump_handler = Arc::new(DumpHandler::new( dump_path, - db_path: db_path.as_ref().into(), - update_file_store: update_file_store.clone(), + db_path.as_ref().into(), + update_file_store.clone(), task_store_size, - index_db_size: index_size, - env: meta_env.clone(), - index_resolver: index_resolver.clone(), - }); + index_size, + meta_env.clone(), + index_resolver.clone(), + )); let task_store = TaskStore::new(meta_env)?; // register all the batch handlers for use with the scheduler. diff --git a/meilisearch-lib/src/tasks/handlers/dump_handler.rs b/meilisearch-lib/src/tasks/handlers/dump_handler.rs index fc506522f..e826242f4 100644 --- a/meilisearch-lib/src/tasks/handlers/dump_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/dump_handler.rs @@ -39,3 +39,96 @@ where () } } + +#[cfg(test)] +mod test { + use crate::dump::error::{DumpError, Result as DumpResult}; + use crate::index_resolver::{index_store::MockIndexStore, meta_store::MockIndexMetaStore}; + use crate::tasks::handlers::test::task_to_batch; + + use super::*; + + use nelson::Mocker; + use proptest::prelude::*; + + proptest! { + #[test] + fn finish_does_nothing( + task in any::(), + ) { + let rt = tokio::runtime::Runtime::new().unwrap(); + let handle = rt.spawn(async { + let batch = task_to_batch(task); + + let mocker = Mocker::default(); + let dump_handler = DumpHandler::::mock(mocker); + + dump_handler.finish(&batch).await; + }); + + rt.block_on(handle).unwrap(); + } + + #[test] + fn test_handle_dump_success( + task in any::(), + ) { + let rt = tokio::runtime::Runtime::new().unwrap(); + let handle = rt.spawn(async { + let batch = task_to_batch(task); + let should_accept = matches!(batch.content, BatchContent::Dump { .. }); + + let mocker = Mocker::default(); + if should_accept { + mocker.when::>("run") + .once() + .then(|_| Ok(())); + } + + let dump_handler = DumpHandler::::mock(mocker); + + let accept = dump_handler.accept(&batch); + assert_eq!(accept, should_accept); + + if accept { + let batch = dump_handler.process_batch(batch).await; + let last_event = batch.content.first().unwrap().events.last().unwrap(); + assert!(matches!(last_event, TaskEvent::Succeded { .. })); + } + }); + + rt.block_on(handle).unwrap(); + } + + #[test] + fn test_handle_dump_error( + task in any::(), + ) { + let rt = tokio::runtime::Runtime::new().unwrap(); + let handle = rt.spawn(async { + let batch = task_to_batch(task); + let should_accept = matches!(batch.content, BatchContent::Dump { .. }); + + let mocker = Mocker::default(); + if should_accept { + mocker.when::>("run") + .once() + .then(|_| Err(DumpError::Internal("error".into()))); + } + + let dump_handler = DumpHandler::::mock(mocker); + + let accept = dump_handler.accept(&batch); + assert_eq!(accept, should_accept); + + if accept { + let batch = dump_handler.process_batch(batch).await; + let last_event = batch.content.first().unwrap().events.last().unwrap(); + assert!(matches!(last_event, TaskEvent::Failed { .. })); + } + }); + + rt.block_on(handle).unwrap(); + } + } +} From 3015265bde07a0b7428611fb429139fb540cffd2 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 14:37:10 +0200 Subject: [PATCH 039/185] remove useless dump errors --- meilisearch-lib/src/dump/error.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/meilisearch-lib/src/dump/error.rs b/meilisearch-lib/src/dump/error.rs index 7931a8d75..da9010347 100644 --- a/meilisearch-lib/src/dump/error.rs +++ b/meilisearch-lib/src/dump/error.rs @@ -7,10 +7,6 @@ pub type Result = std::result::Result; #[derive(thiserror::Error, Debug)] pub enum DumpError { - #[error("A dump is already processing. You must wait until the current process is finished before requesting another dump.")] - DumpAlreadyRunning, - #[error("Dump `{0}` not found.")] - DumpDoesNotExist(String), #[error("An internal error has occurred. `{0}`.")] Internal(Box), #[error("{0}")] @@ -32,8 +28,6 @@ internal_error!( impl ErrorCode for DumpError { fn error_code(&self) -> Code { match self { - DumpError::DumpAlreadyRunning => Code::DumpAlreadyInProgress, - DumpError::DumpDoesNotExist(_) => Code::DumpNotFound, DumpError::Internal(_) => Code::Internal, DumpError::IndexResolver(e) => e.error_code(), } From 6b2016b350d6ea7e8ffd79cb6f278578b5a9c7d7 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 14:39:07 +0200 Subject: [PATCH 040/185] remove typo in BatchContent variant --- meilisearch-lib/src/tasks/batch.rs | 8 ++++---- .../src/tasks/handlers/index_resolver_handler.rs | 8 ++++---- meilisearch-lib/src/tasks/handlers/mod.rs | 2 +- meilisearch-lib/src/tasks/scheduler.rs | 4 ++-- meilisearch-lib/src/tasks/task_store/mod.rs | 2 +- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/meilisearch-lib/src/tasks/batch.rs b/meilisearch-lib/src/tasks/batch.rs index 88c73e3de..d5116f750 100644 --- a/meilisearch-lib/src/tasks/batch.rs +++ b/meilisearch-lib/src/tasks/batch.rs @@ -8,7 +8,7 @@ pub type BatchId = u64; #[derive(Debug)] pub enum BatchContent { - DocumentAddtitionBatch(Vec), + DocumentsAdditionBatch(Vec), IndexUpdate(Task), Dump(Task), Snapshot(SnapshotJob), @@ -19,7 +19,7 @@ pub enum BatchContent { impl BatchContent { pub fn first(&self) -> Option<&Task> { match self { - BatchContent::DocumentAddtitionBatch(ts) => ts.first(), + BatchContent::DocumentsAdditionBatch(ts) => ts.first(), BatchContent::Dump(t) | BatchContent::IndexUpdate(t) => Some(t), BatchContent::Snapshot(_) | BatchContent::Empty => None, } @@ -27,7 +27,7 @@ impl BatchContent { pub fn push_event(&mut self, event: TaskEvent) { match self { - BatchContent::DocumentAddtitionBatch(ts) => { + BatchContent::DocumentsAdditionBatch(ts) => { ts.iter_mut().for_each(|t| t.events.push(event.clone())) } BatchContent::IndexUpdate(t) | BatchContent::Dump(t) => t.events.push(event), @@ -55,7 +55,7 @@ impl Batch { } pub fn len(&self) -> usize { match self.content { - BatchContent::DocumentAddtitionBatch(ref ts) => ts.len(), + BatchContent::DocumentsAdditionBatch(ref ts) => ts.len(), BatchContent::IndexUpdate(_) | BatchContent::Dump(_) | BatchContent::Snapshot(_) => 1, BatchContent::Empty => 0, } diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index 38c079baa..a744dcaad 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -14,14 +14,14 @@ where { fn accept(&self, batch: &Batch) -> bool { match batch.content { - BatchContent::DocumentAddtitionBatch(_) | BatchContent::IndexUpdate(_) => true, + BatchContent::DocumentsAdditionBatch(_) | BatchContent::IndexUpdate(_) => true, _ => false, } } async fn process_batch(&self, mut batch: Batch) -> Batch { match batch.content { - BatchContent::DocumentAddtitionBatch(ref mut tasks) => { + BatchContent::DocumentsAdditionBatch(ref mut tasks) => { *tasks = self .process_document_addition_batch(std::mem::take(tasks)) .await; @@ -45,7 +45,7 @@ where } async fn finish(&self, batch: &Batch) { - if let BatchContent::DocumentAddtitionBatch(ref tasks) = batch.content { + if let BatchContent::DocumentsAdditionBatch(ref tasks) = batch.content { for task in tasks { if let Some(content_uuid) = task.get_content_uuid() { if let Err(e) = self.file_store.delete(content_uuid).await { @@ -86,7 +86,7 @@ mod test { let index_resolver = IndexResolver::new(meta_store, index_store, update_file_store); match batch.content { - BatchContent::DocumentAddtitionBatch(_) + BatchContent::DocumentsAdditionBatch(_) | BatchContent::IndexUpdate(_) => assert!(index_resolver.accept(&batch)), BatchContent::Dump(_) | BatchContent::Snapshot(_) diff --git a/meilisearch-lib/src/tasks/handlers/mod.rs b/meilisearch-lib/src/tasks/handlers/mod.rs index f5fe8eaf2..6e28636ed 100644 --- a/meilisearch-lib/src/tasks/handlers/mod.rs +++ b/meilisearch-lib/src/tasks/handlers/mod.rs @@ -15,7 +15,7 @@ mod test { pub fn task_to_batch(task: Task) -> Batch { let content = match task.content { TaskContent::DocumentAddition { .. } => { - BatchContent::DocumentAddtitionBatch(vec![task]) + BatchContent::DocumentsAdditionBatch(vec![task]) } TaskContent::DocumentDeletion(_) | TaskContent::SettingsUpdate { .. } diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 8510ba771..cf3b14cd4 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -296,9 +296,9 @@ impl Scheduler { pub async fn update_tasks(&self, content: BatchContent) -> Result { match content { - BatchContent::DocumentAddtitionBatch(tasks) => { + BatchContent::DocumentsAdditionBatch(tasks) => { let tasks = self.store.update_tasks(tasks).await?; - Ok(BatchContent::DocumentAddtitionBatch(tasks)) + Ok(BatchContent::DocumentsAdditionBatch(tasks)) } BatchContent::IndexUpdate(t) => { let mut tasks = self.store.update_tasks(vec![t]).await?; diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index 610a5bdeb..a5227fe73 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -147,7 +147,7 @@ impl TaskStore { .ok_or(TaskError::UnexistingTask(*id))?; tasks.push(task); } - BatchContent::DocumentAddtitionBatch(tasks) + BatchContent::DocumentsAdditionBatch(tasks) } Processing::IndexUpdate(id) => { let task = store.get(&txn, id)?.ok_or(TaskError::UnexistingTask(id))?; From f58507379aaab938fa52e5a7d1c17ec31ce92342 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 14:50:14 +0200 Subject: [PATCH 041/185] fix dump priority in scheduler --- meilisearch-lib/src/tasks/scheduler.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index cf3b14cd4..8d44a5859 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -110,8 +110,8 @@ impl Ord for TaskList { (Some(lhs), Some(rhs)) => lhs.cmp(rhs), } } - (TaskListIdentifier::Index(_), TaskListIdentifier::Dump) => Ordering::Greater, - (TaskListIdentifier::Dump, TaskListIdentifier::Index(_)) => Ordering::Less, + (TaskListIdentifier::Index(_), TaskListIdentifier::Dump) => Ordering::Less, + (TaskListIdentifier::Dump, TaskListIdentifier::Index(_)) => Ordering::Greater, (TaskListIdentifier::Dump, TaskListIdentifier::Dump) => { unreachable!("There should be only one Dump task list") } From 74a1f88d8883ed9a78f29f74d18b7915f810b1f2 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 14:57:18 +0200 Subject: [PATCH 042/185] add test for dump processing order --- meilisearch-lib/src/tasks/scheduler.rs | 46 ++++++++++++++++---------- 1 file changed, 29 insertions(+), 17 deletions(-) diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 8d44a5859..19265a911 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -536,10 +536,10 @@ mod test { use super::*; - fn gen_task(id: TaskId, index_uid: &str, content: TaskContent) -> Task { + fn gen_task(id: TaskId, index_uid: Option<&str>, content: TaskContent) -> Task { Task { id, - index_uid: Some(IndexUid::new_unchecked(index_uid)), + index_uid: index_uid.map(IndexUid::new_unchecked), content, events: vec![], } @@ -548,13 +548,13 @@ mod test { #[test] fn register_updates_multiples_indexes() { let mut queue = TaskQueue::default(); - queue.insert(gen_task(0, "test1", TaskContent::IndexDeletion)); - queue.insert(gen_task(1, "test2", TaskContent::IndexDeletion)); - queue.insert(gen_task(2, "test2", TaskContent::IndexDeletion)); - queue.insert(gen_task(3, "test2", TaskContent::IndexDeletion)); - queue.insert(gen_task(4, "test1", TaskContent::IndexDeletion)); - queue.insert(gen_task(5, "test1", TaskContent::IndexDeletion)); - queue.insert(gen_task(6, "test2", TaskContent::IndexDeletion)); + queue.insert(gen_task(0, Some("test1"), TaskContent::IndexDeletion)); + queue.insert(gen_task(1, Some("test2"), TaskContent::IndexDeletion)); + queue.insert(gen_task(2, Some("test2"), TaskContent::IndexDeletion)); + queue.insert(gen_task(3, Some("test2"), TaskContent::IndexDeletion)); + queue.insert(gen_task(4, Some("test1"), TaskContent::IndexDeletion)); + queue.insert(gen_task(5, Some("test1"), TaskContent::IndexDeletion)); + queue.insert(gen_task(6, Some("test2"), TaskContent::IndexDeletion)); let test1_tasks = queue .head_mut(|tasks| tasks.drain().map(|t| t.id).collect::>()) @@ -582,16 +582,28 @@ mod test { documents_count: 0, allow_index_creation: true, }; - queue.insert(gen_task(0, "test1", content.clone())); - queue.insert(gen_task(1, "test2", content.clone())); - queue.insert(gen_task(2, "test2", TaskContent::IndexDeletion)); - queue.insert(gen_task(3, "test2", content.clone())); - queue.insert(gen_task(4, "test1", content.clone())); - queue.insert(gen_task(5, "test1", TaskContent::IndexDeletion)); - queue.insert(gen_task(6, "test2", content.clone())); - queue.insert(gen_task(7, "test1", content)); + queue.insert(gen_task(0, Some("test1"), content.clone())); + queue.insert(gen_task(1, Some("test2"), content.clone())); + queue.insert(gen_task(2, Some("test2"), TaskContent::IndexDeletion)); + queue.insert(gen_task(3, Some("test2"), content.clone())); + queue.insert(gen_task(4, Some("test1"), content.clone())); + queue.insert(gen_task(5, Some("test1"), TaskContent::IndexDeletion)); + queue.insert(gen_task(6, Some("test2"), content.clone())); + queue.insert(gen_task(7, Some("test1"), content)); + queue.insert(gen_task( + 8, + None, + TaskContent::Dump { + uid: "adump".to_owned(), + }, + )); let config = SchedulerConfig::default(); + + // Make sure that the dump is processed before everybody else. + let batch = make_batch(&mut queue, &config); + assert_eq!(batch, Processing::Dump(8)); + let batch = make_batch(&mut queue, &config); assert_eq!(batch, Processing::DocumentAdditions(vec![0, 4])); From 1647ca3c1f929a1ceb67e5c9ec6fb24a5c96bcb1 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 25 May 2022 15:07:52 +0200 Subject: [PATCH 043/185] fix clipy warnings --- meilisearch-lib/src/tasks/handlers/dump_handler.rs | 4 +--- meilisearch-lib/src/tasks/handlers/empty_handler.rs | 4 +--- .../src/tasks/handlers/index_resolver_handler.rs | 10 +++++----- meilisearch-lib/src/tasks/handlers/snapshot_handler.rs | 9 ++------- meilisearch-lib/src/tasks/task_store/store.rs | 2 +- 5 files changed, 10 insertions(+), 19 deletions(-) diff --git a/meilisearch-lib/src/tasks/handlers/dump_handler.rs b/meilisearch-lib/src/tasks/handlers/dump_handler.rs index e826242f4..715beafee 100644 --- a/meilisearch-lib/src/tasks/handlers/dump_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/dump_handler.rs @@ -35,9 +35,7 @@ where } } - async fn finish(&self, _: &Batch) { - () - } + async fn finish(&self, _: &Batch) {} } #[cfg(test)] diff --git a/meilisearch-lib/src/tasks/handlers/empty_handler.rs b/meilisearch-lib/src/tasks/handlers/empty_handler.rs index 5d6aa2275..d800e1965 100644 --- a/meilisearch-lib/src/tasks/handlers/empty_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/empty_handler.rs @@ -14,7 +14,5 @@ impl BatchHandler for EmptyBatchHandler { batch } - async fn finish(&self, _: &Batch) { - () - } + async fn finish(&self, _: &Batch) {} } diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index a744dcaad..a34082afe 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -13,10 +13,10 @@ where I: IndexStore + Send + Sync + 'static, { fn accept(&self, batch: &Batch) -> bool { - match batch.content { - BatchContent::DocumentsAdditionBatch(_) | BatchContent::IndexUpdate(_) => true, - _ => false, - } + matches!( + batch.content, + BatchContent::DocumentsAdditionBatch(_) | BatchContent::IndexUpdate(_) + ) } async fn process_batch(&self, mut batch: Batch) -> Batch { @@ -26,7 +26,7 @@ where .process_document_addition_batch(std::mem::take(tasks)) .await; } - BatchContent::IndexUpdate(ref mut task) => match self.process_task(&task).await { + BatchContent::IndexUpdate(ref mut task) => match self.process_task(task).await { Ok(success) => { task.events.push(TaskEvent::Succeded { result: success, diff --git a/meilisearch-lib/src/tasks/handlers/snapshot_handler.rs b/meilisearch-lib/src/tasks/handlers/snapshot_handler.rs index 2948fb4ff..32fe6d746 100644 --- a/meilisearch-lib/src/tasks/handlers/snapshot_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/snapshot_handler.rs @@ -6,10 +6,7 @@ pub struct SnapshotHandler; #[async_trait::async_trait] impl BatchHandler for SnapshotHandler { fn accept(&self, batch: &Batch) -> bool { - match batch.content { - BatchContent::Snapshot(_) => true, - _ => false, - } + matches!(batch.content, BatchContent::Snapshot(_)) } async fn process_batch(&self, batch: Batch) -> Batch { @@ -25,7 +22,5 @@ impl BatchHandler for SnapshotHandler { Batch::empty() } - async fn finish(&self, _: &Batch) { - () - } + async fn finish(&self, _: &Batch) {} } diff --git a/meilisearch-lib/src/tasks/task_store/store.rs b/meilisearch-lib/src/tasks/task_store/store.rs index 902f80560..75ece0ae8 100644 --- a/meilisearch-lib/src/tasks/task_store/store.rs +++ b/meilisearch-lib/src/tasks/task_store/store.rs @@ -110,7 +110,7 @@ impl Store { self.tasks.put(txn, &BEU64::new(task.id), task)?; // only add the task to the indexes index if it has an index_uid if let Some(ref index_uid) = task.index_uid { - self.uids_task_ids.put(txn, &(&index_uid, task.id), &())?; + self.uids_task_ids.put(txn, &(index_uid, task.id), &())?; } Ok(()) From ba51ca83ec9f22a6590ac371251f08e82d00555b Mon Sep 17 00:00:00 2001 From: 0x0x1 <101086451+0x0x1@users.noreply.github.com> Date: Thu, 26 May 2022 10:29:27 +0800 Subject: [PATCH 044/185] Update docker volume path Makes docker volume same as Dockerfile --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index cd3effd94..d009be3fe 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ meilisearch #### Docker ```bash -docker run -p 7700:7700 -v "$(pwd)/data.ms:/data.ms" getmeili/meilisearch +docker run -p 7700:7700 -v "$(pwd)/data.ms:/meili_data/data.ms" getmeili/meilisearch ``` #### Announcing a cloud-hosted Meilisearch From 5a2972fc1929aaa9a2a1a11b91b4b903f0135d43 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 26 May 2022 11:50:04 +0200 Subject: [PATCH 045/185] use TaskEvent method instead of variants in BatchHandler impl --- .../src/tasks/handlers/index_resolver_handler.rs | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index a34082afe..e0471567b 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -1,5 +1,3 @@ -use time::OffsetDateTime; - use crate::index_resolver::IndexResolver; use crate::index_resolver::{index_store::IndexStore, meta_store::IndexMetaStore}; use crate::tasks::batch::{Batch, BatchContent}; @@ -27,16 +25,8 @@ where .await; } BatchContent::IndexUpdate(ref mut task) => match self.process_task(task).await { - Ok(success) => { - task.events.push(TaskEvent::Succeded { - result: success, - timestamp: OffsetDateTime::now_utc(), - }); - } - Err(err) => task.events.push(TaskEvent::Failed { - error: err.into(), - timestamp: OffsetDateTime::now_utc(), - }), + Ok(success) => task.events.push(TaskEvent::succeeded(success)), + Err(err) => task.events.push(TaskEvent::failed(err.into())), }, _ => unreachable!(), } From a9ef399a6b5ac619c8ab7ef8a3475530f8b60fcd Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 26 May 2022 12:04:27 +0200 Subject: [PATCH 046/185] processing::Nothing return BatchContent::Empty instead of panic --- meilisearch-lib/src/tasks/task_store/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index a5227fe73..deb3f8191 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -158,7 +158,7 @@ impl TaskStore { debug_assert!(matches!(task.content, TaskContent::Dump { .. })); BatchContent::Dump(task) } - Processing::Nothing => unreachable!(), + Processing::Nothing => BatchContent::Empty, }; Ok((processing, content)) From 4cb2c6ef1e16894054c111b5a1aaac93cfca5d3a Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 30 May 2022 12:30:15 +0200 Subject: [PATCH 047/185] use map_or instead of map + unwrap_or --- meilisearch-lib/src/tasks/task_store/mod.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index deb3f8191..3645717e6 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -36,8 +36,7 @@ impl TaskFilter { Some(ref index_uid) => self .indexes .as_ref() - .map(|indexes| indexes.contains(index_uid.as_str())) - .unwrap_or(true), + .map_or(true, |indexes| indexes.contains(index_uid.as_str())), None => false, } } From 1e310ecc7d1600c171187c6f23e176825331185b Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 30 May 2022 14:34:49 +0200 Subject: [PATCH 048/185] fix typo in docstring Co-authored-by: Tamo --- meilisearch-lib/src/tasks/task.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index 0e0aa8af2..0499d9702 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -86,7 +86,7 @@ impl TaskEvent { #[cfg_attr(test, derive(proptest_derive::Arbitrary))] pub struct Task { pub id: TaskId, - /// The name of the index the task is targeting. If it isn't targeting any idex (i.e Dump task) + /// The name of the index the task is targeting. If it isn't targeting any index (i.e Dump task) /// then this is None // TODO: when next forward breaking dumps, it would be a good idea to move this field inside of // the TaskContent. From d47b997120da1e772009fe84095937930c1da43f Mon Sep 17 00:00:00 2001 From: Irevoire Date: Wed, 25 May 2022 14:36:00 +0200 Subject: [PATCH 049/185] chore(analytics): update the url used to send our analytics --- meilisearch-http/Cargo.toml | 1 + .../src/analytics/segment_analytics.rs | 14 +++++++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 9f06debaf..0a5cdff7f 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -57,6 +57,7 @@ platform-dirs = "0.3.0" rand = "0.8.5" rayon = "1.5.1" regex = "1.5.5" +reqwest = { version = "0.11.4", features = ["rustls-tls", "json"], default-features = false } rustls = "0.20.4" rustls-pemfile = "0.3.0" segment = { version = "0.2.0", optional = true } diff --git a/meilisearch-http/src/analytics/segment_analytics.rs b/meilisearch-http/src/analytics/segment_analytics.rs index 49f5aed3d..20df96942 100644 --- a/meilisearch-http/src/analytics/segment_analytics.rs +++ b/meilisearch-http/src/analytics/segment_analytics.rs @@ -81,7 +81,19 @@ impl SegmentAnalytics { let user_id = user_id.unwrap_or_else(|| Uuid::new_v4().to_string()); write_user_id(&opt.db_path, &user_id); - let client = HttpClient::default(); + let client = reqwest::Client::builder() + .connect_timeout(Duration::from_secs(10)) + .build(); + + // if reqwest throws an error we won't be able to send analytics + if client.is_err() { + return super::MockAnalytics::new(opt); + } + + let client = HttpClient::new( + client.unwrap(), + "https://telemetry.meilisearch.com".to_string(), + ); let user = User::UserId { user_id }; let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string()); From 26e7bdf702fa8184b482495580abc4f5cf3c0176 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 30 May 2022 17:19:29 +0200 Subject: [PATCH 050/185] add boilerplate for dump v5 --- meilisearch-lib/src/dump/compat/mod.rs | 1 + meilisearch-lib/src/dump/compat/v4.rs | 1 + meilisearch-lib/src/dump/loaders/mod.rs | 1 + meilisearch-lib/src/dump/loaders/v3.rs | 1 - meilisearch-lib/src/dump/loaders/v4.rs | 37 ++++--------------- meilisearch-lib/src/dump/loaders/v5.rs | 47 +++++++++++++++++++++++++ meilisearch-lib/src/dump/mod.rs | 14 +++++--- 7 files changed, 67 insertions(+), 35 deletions(-) create mode 100644 meilisearch-lib/src/dump/compat/v4.rs create mode 100644 meilisearch-lib/src/dump/loaders/v5.rs diff --git a/meilisearch-lib/src/dump/compat/mod.rs b/meilisearch-lib/src/dump/compat/mod.rs index 93f3f9dd7..9abac24c7 100644 --- a/meilisearch-lib/src/dump/compat/mod.rs +++ b/meilisearch-lib/src/dump/compat/mod.rs @@ -1,5 +1,6 @@ pub mod v2; pub mod v3; +pub mod v4; /// Parses the v1 version of the Asc ranking rules `asc(price)`and returns the field name. pub fn asc_ranking_rule(text: &str) -> Option<&str> { diff --git a/meilisearch-lib/src/dump/compat/v4.rs b/meilisearch-lib/src/dump/compat/v4.rs new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/meilisearch-lib/src/dump/compat/v4.rs @@ -0,0 +1 @@ + diff --git a/meilisearch-lib/src/dump/loaders/mod.rs b/meilisearch-lib/src/dump/loaders/mod.rs index ecc305652..199b20c02 100644 --- a/meilisearch-lib/src/dump/loaders/mod.rs +++ b/meilisearch-lib/src/dump/loaders/mod.rs @@ -1,3 +1,4 @@ pub mod v2; pub mod v3; pub mod v4; +pub mod v5; diff --git a/meilisearch-lib/src/dump/loaders/v3.rs b/meilisearch-lib/src/dump/loaders/v3.rs index 8e76b67e0..0a2ea438b 100644 --- a/meilisearch-lib/src/dump/loaders/v3.rs +++ b/meilisearch-lib/src/dump/loaders/v3.rs @@ -66,7 +66,6 @@ pub fn load_dump( index_db_size, meta_env_size, indexing_options, - "V5", ) } diff --git a/meilisearch-lib/src/dump/loaders/v4.rs b/meilisearch-lib/src/dump/loaders/v4.rs index 7f0ade714..df7b33cfa 100644 --- a/meilisearch-lib/src/dump/loaders/v4.rs +++ b/meilisearch-lib/src/dump/loaders/v4.rs @@ -1,16 +1,9 @@ use std::path::Path; -use std::sync::Arc; use log::info; -use meilisearch_auth::AuthController; -use milli::heed::EnvOpenOptions; -use crate::analytics; use crate::dump::Metadata; -use crate::index_resolver::IndexResolver; use crate::options::IndexerOpts; -use crate::tasks::TaskStore; -use crate::update_file_store::UpdateFileStore; pub fn load_dump( meta: Metadata, @@ -19,31 +12,15 @@ pub fn load_dump( index_db_size: usize, meta_env_size: usize, indexing_options: &IndexerOpts, - version: &str, ) -> anyhow::Result<()> { - info!( - "Loading dump from {}, dump database version: {}, dump version: {}", - meta.dump_date, meta.db_version, version - ); + info!("Patching dump V4 to dump V5..."); - let mut options = EnvOpenOptions::new(); - options.map_size(meta_env_size); - options.max_dbs(100); - let env = Arc::new(options.open(&dst)?); - - IndexResolver::load_dump( - src.as_ref(), - &dst, + super::v5::load_dump( + meta, + src, + dst, index_db_size, - env.clone(), + meta_env_size, indexing_options, - )?; - UpdateFileStore::load_dump(src.as_ref(), &dst)?; - TaskStore::load_dump(&src, env)?; - AuthController::load_dump(&src, &dst)?; - analytics::copy_user_id(src.as_ref(), dst.as_ref()); - - info!("Loading indexes."); - - Ok(()) + ) } diff --git a/meilisearch-lib/src/dump/loaders/v5.rs b/meilisearch-lib/src/dump/loaders/v5.rs new file mode 100644 index 000000000..fcb4224bb --- /dev/null +++ b/meilisearch-lib/src/dump/loaders/v5.rs @@ -0,0 +1,47 @@ +use std::{path::Path, sync::Arc}; + +use log::info; +use meilisearch_auth::AuthController; +use milli::heed::EnvOpenOptions; + +use crate::analytics; +use crate::dump::Metadata; +use crate::index_resolver::IndexResolver; +use crate::options::IndexerOpts; +use crate::tasks::TaskStore; +use crate::update_file_store::UpdateFileStore; + +pub fn load_dump( + meta: Metadata, + src: impl AsRef, + dst: impl AsRef, + index_db_size: usize, + meta_env_size: usize, + indexing_options: &IndexerOpts, +) -> anyhow::Result<()> { + info!( + "Loading dump from {}, dump database version: {}, dump version: V5", + meta.dump_date, meta.db_version + ); + + let mut options = EnvOpenOptions::new(); + options.map_size(meta_env_size); + options.max_dbs(100); + let env = Arc::new(options.open(&dst)?); + + IndexResolver::load_dump( + src.as_ref(), + &dst, + index_db_size, + env.clone(), + indexing_options, + )?; + UpdateFileStore::load_dump(src.as_ref(), &dst)?; + TaskStore::load_dump(&src, env)?; + AuthController::load_dump(&src, &dst)?; + analytics::copy_user_id(src.as_ref(), dst.as_ref()); + + info!("Loading indexes."); + + Ok(()) +} diff --git a/meilisearch-lib/src/dump/mod.rs b/meilisearch-lib/src/dump/mod.rs index ab1c63d6d..ea7b9f3dc 100644 --- a/meilisearch-lib/src/dump/mod.rs +++ b/meilisearch-lib/src/dump/mod.rs @@ -11,7 +11,7 @@ use tempfile::TempDir; use crate::compression::from_tar_gz; use crate::options::IndexerOpts; -use self::loaders::{v2, v3, v4}; +use self::loaders::{v2, v3, v4, v5}; pub use handler::{generate_uid, DumpHandler}; @@ -69,7 +69,6 @@ impl MetadataVersion { meta_env_size: usize, indexing_options: &IndexerOpts, ) -> anyhow::Result<()> { - let version = self.version(); match self { MetadataVersion::V1(_meta) => { anyhow::bail!("The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.") @@ -90,14 +89,21 @@ impl MetadataVersion { meta_env_size, indexing_options, )?, - MetadataVersion::V4(meta) | MetadataVersion::V5(meta) => v4::load_dump( + MetadataVersion::V4(meta) => v4::load_dump( + meta, + src, + dst, + index_db_size, + meta_env_size, + indexing_options, + )?, + MetadataVersion::V5(meta) => v5::load_dump( meta, src, dst, index_db_size, meta_env_size, indexing_options, - version, )?, } From deba0cc09694222538d4811e905c8500232383a5 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 31 May 2022 10:23:46 +0200 Subject: [PATCH 051/185] Make v4::load_dump copy each part a the dump --- meilisearch-lib/src/dump/loaders/v4.rs | 32 +++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/meilisearch-lib/src/dump/loaders/v4.rs b/meilisearch-lib/src/dump/loaders/v4.rs index df7b33cfa..50fffeb9e 100644 --- a/meilisearch-lib/src/dump/loaders/v4.rs +++ b/meilisearch-lib/src/dump/loaders/v4.rs @@ -1,6 +1,9 @@ +use std::fs; use std::path::Path; +use fs_extra::dir::{self, CopyOptions}; use log::info; +use tempfile::tempdir; use crate::dump::Metadata; use crate::options::IndexerOpts; @@ -15,9 +18,36 @@ pub fn load_dump( ) -> anyhow::Result<()> { info!("Patching dump V4 to dump V5..."); + let patched_dir = tempdir()?; + let options = CopyOptions::default(); + + // Indexes + dir::copy(src.as_ref().join("indexes"), patched_dir.path(), &options)?; + + // Index uuids + dir::copy( + src.as_ref().join("index_uuids"), + patched_dir.path(), + &options, + )?; + + // Metadata + fs::copy( + src.as_ref().join("metadata.json"), + patched_dir.path().join("metadata.json"), + )?; + + // Updates + dir::copy(src.as_ref().join("updates"), patched_dir.path(), &options)?; + + // Keys + if src.as_ref().join("keys").exists() { + fs::copy(src.as_ref().join("keys"), patched_dir.path().join("keys"))?; + } + super::v5::load_dump( meta, - src, + patched_dir.path(), dst, index_db_size, meta_env_size, From e5ef5a6f9ce5d54654aeff6e0c51214c3290fcaf Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 17 May 2022 11:13:29 +0200 Subject: [PATCH 052/185] Remove an unused updates.rs file --- meilisearch-http/src/routes/indexes/updates.rs | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 meilisearch-http/src/routes/indexes/updates.rs diff --git a/meilisearch-http/src/routes/indexes/updates.rs b/meilisearch-http/src/routes/indexes/updates.rs deleted file mode 100644 index e69de29bb..000000000 From d2f457a07632887c3e4bfc0c7645dd6dc2f3efa3 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 17 May 2022 11:17:32 +0200 Subject: [PATCH 053/185] Rename the uid to taskUid in asynchronous response --- meilisearch-http/src/task.rs | 4 ++-- meilisearch-http/tests/auth/authorization.rs | 12 ++++++------ meilisearch-http/tests/common/index.rs | 2 +- meilisearch-http/tests/documents/add_documents.rs | 6 +++--- meilisearch-http/tests/index/delete_index.rs | 4 ++-- meilisearch-http/tests/index/stats.rs | 2 +- meilisearch-http/tests/settings/get_settings.rs | 2 +- meilisearch-http/tests/stats/mod.rs | 2 +- meilisearch-http/tests/tasks/mod.rs | 2 +- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 397fed618..a916d5ce8 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -349,7 +349,7 @@ impl From> for TaskListView { #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub struct SummarizedTaskView { - uid: TaskId, + task_uid: TaskId, index_uid: Option, status: TaskStatus, #[serde(rename = "type")] @@ -372,7 +372,7 @@ impl From for SummarizedTaskView { }; Self { - uid: other.id, + task_uid: other.id, index_uid: other.index_uid.map(|u| u.into_inner()), status: TaskStatus::Enqueued, task_type: other.content.into(), diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 25f32eb12..7d7ec1899 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -523,7 +523,7 @@ async fn error_creating_index_without_action() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202, "{:?}", response); - let task_id = response["uid"].as_u64().unwrap(); + let task_id = response["taskUid"].as_u64().unwrap(); let response = index.wait_task(task_id).await; assert_eq!(response["status"], "failed"); @@ -534,7 +534,7 @@ async fn error_creating_index_without_action() { let (response, code) = index.update_settings(settings).await; assert_eq!(code, 202); - let task_id = response["uid"].as_u64().unwrap(); + let task_id = response["taskUid"].as_u64().unwrap(); let response = index.wait_task(task_id).await; @@ -544,7 +544,7 @@ async fn error_creating_index_without_action() { // try to create a index via add specialized settings route let (response, code) = index.update_distinct_attribute(json!("test")).await; assert_eq!(code, 202); - let task_id = response["uid"].as_u64().unwrap(); + let task_id = response["taskUid"].as_u64().unwrap(); let response = index.wait_task(task_id).await; @@ -583,7 +583,7 @@ async fn lazy_create_index() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202, "{:?}", response); - let task_id = response["uid"].as_u64().unwrap(); + let task_id = response["taskUid"].as_u64().unwrap(); index.wait_task(task_id).await; @@ -597,7 +597,7 @@ async fn lazy_create_index() { let (response, code) = index.update_settings(settings).await; assert_eq!(code, 202); - let task_id = response["uid"].as_u64().unwrap(); + let task_id = response["taskUid"].as_u64().unwrap(); index.wait_task(task_id).await; @@ -609,7 +609,7 @@ async fn lazy_create_index() { let index = server.index("test2"); let (response, code) = index.update_distinct_attribute(json!("test")).await; assert_eq!(code, 202); - let task_id = response["uid"].as_u64().unwrap(); + let task_id = response["taskUid"].as_u64().unwrap(); index.wait_task(task_id).await; diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index 6c44ea369..b0c7a3342 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -46,7 +46,7 @@ impl Index<'_> { .post_str(url, include_str!("../assets/test_set.json")) .await; assert_eq!(code, 202); - let update_id = response["uid"].as_i64().unwrap(); + let update_id = response["taskUid"].as_i64().unwrap(); self.wait_task(update_id as u64).await; update_id as u64 } diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index 0ac0436dc..238df6332 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -35,7 +35,7 @@ async fn add_documents_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); assert_eq!(status_code, 202); - assert_eq!(response["uid"], 0); + assert_eq!(response["taskUid"], 0); // put let req = test::TestRequest::put() @@ -48,7 +48,7 @@ async fn add_documents_test_json_content_types() { let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); assert_eq!(status_code, 202); - assert_eq!(response["uid"], 1); + assert_eq!(response["taskUid"], 1); } /// any other content-type is must be refused @@ -599,7 +599,7 @@ async fn add_documents_no_index_creation() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - assert_eq!(response["uid"], 0); + assert_eq!(response["taskUid"], 0); /* * currently we don’t check these field to stay ISO with meilisearch * assert_eq!(response["status"], "pending"); diff --git a/meilisearch-http/tests/index/delete_index.rs b/meilisearch-http/tests/index/delete_index.rs index 0674d0afd..f3cdf6631 100644 --- a/meilisearch-http/tests/index/delete_index.rs +++ b/meilisearch-http/tests/index/delete_index.rs @@ -52,10 +52,10 @@ async fn loop_delete_add_documents() { let mut tasks = Vec::new(); for _ in 0..50 { let (response, code) = index.add_documents(documents.clone(), None).await; - tasks.push(response["uid"].as_u64().unwrap()); + tasks.push(response["taskUid"].as_u64().unwrap()); assert_eq!(code, 202, "{}", response); let (response, code) = index.delete().await; - tasks.push(response["uid"].as_u64().unwrap()); + tasks.push(response["taskUid"].as_u64().unwrap()); assert_eq!(code, 202, "{}", response); } diff --git a/meilisearch-http/tests/index/stats.rs b/meilisearch-http/tests/index/stats.rs index 555c7311a..f55998998 100644 --- a/meilisearch-http/tests/index/stats.rs +++ b/meilisearch-http/tests/index/stats.rs @@ -35,7 +35,7 @@ async fn stats() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - assert_eq!(response["uid"], 1); + assert_eq!(response["taskUid"], 1); index.wait_task(1).await; diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index 98b4f9558..9b3c31b63 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -122,7 +122,7 @@ async fn reset_all_settings() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - assert_eq!(response["uid"], 0); + assert_eq!(response["taskUid"], 0); index.wait_task(0).await; index diff --git a/meilisearch-http/tests/stats/mod.rs b/meilisearch-http/tests/stats/mod.rs index b9d185ca3..0629c2e29 100644 --- a/meilisearch-http/tests/stats/mod.rs +++ b/meilisearch-http/tests/stats/mod.rs @@ -54,7 +54,7 @@ async fn stats() { let (response, code) = index.add_documents(documents, None).await; assert_eq!(code, 202, "{}", response); - assert_eq!(response["uid"], 1); + assert_eq!(response["taskUid"], 1); index.wait_task(1).await; diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index 167b7b05f..6f64a8970 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -94,7 +94,7 @@ async fn list_tasks() { macro_rules! assert_valid_summarized_task { ($response:expr, $task_type:literal, $index:literal) => {{ assert_eq!($response.as_object().unwrap().len(), 5); - assert!($response["uid"].as_u64().is_some()); + assert!($response["taskUid"].as_u64().is_some()); assert_eq!($response["indexUid"], $index); assert_eq!($response["status"], "enqueued"); assert_eq!($response["type"], $task_type); From 80f7d873563e7f06acde532d4345f4a866390a14 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 17 May 2022 10:51:28 +0200 Subject: [PATCH 054/185] Remove the /indexes/:indexUid/tasks/... routes --- meilisearch-http/src/routes/indexes/mod.rs | 2 - meilisearch-http/src/routes/indexes/tasks.rs | 80 -------------------- 2 files changed, 82 deletions(-) delete mode 100644 meilisearch-http/src/routes/indexes/tasks.rs diff --git a/meilisearch-http/src/routes/indexes/mod.rs b/meilisearch-http/src/routes/indexes/mod.rs index bd74fd724..956761eb3 100644 --- a/meilisearch-http/src/routes/indexes/mod.rs +++ b/meilisearch-http/src/routes/indexes/mod.rs @@ -15,7 +15,6 @@ use crate::task::SummarizedTaskView; pub mod documents; pub mod search; pub mod settings; -pub mod tasks; pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service( @@ -34,7 +33,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats)))) .service(web::scope("/documents").configure(documents::configure)) .service(web::scope("/search").configure(search::configure)) - .service(web::scope("/tasks").configure(tasks::configure)) .service(web::scope("/settings").configure(settings::configure)), ); } diff --git a/meilisearch-http/src/routes/indexes/tasks.rs b/meilisearch-http/src/routes/indexes/tasks.rs deleted file mode 100644 index 01ed85db8..000000000 --- a/meilisearch-http/src/routes/indexes/tasks.rs +++ /dev/null @@ -1,80 +0,0 @@ -use actix_web::{web, HttpRequest, HttpResponse}; -use log::debug; -use meilisearch_error::ResponseError; -use meilisearch_lib::MeiliSearch; -use serde::{Deserialize, Serialize}; -use serde_json::json; -use time::OffsetDateTime; - -use crate::analytics::Analytics; -use crate::extractors::authentication::{policies::*, GuardedData}; -use crate::extractors::sequential_extractor::SeqHandler; -use crate::task::{TaskListView, TaskView}; - -pub fn configure(cfg: &mut web::ServiceConfig) { - cfg.service(web::resource("").route(web::get().to(SeqHandler(get_all_tasks_status)))) - .service(web::resource("{task_id}").route(web::get().to(SeqHandler(get_task_status)))); -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct UpdateIndexResponse { - name: String, - uid: String, - #[serde(serialize_with = "time::serde::rfc3339::serialize")] - created_at: OffsetDateTime, - #[serde(serialize_with = "time::serde::rfc3339::serialize")] - updated_at: OffsetDateTime, - #[serde(serialize_with = "time::serde::rfc3339::serialize")] - primary_key: OffsetDateTime, -} - -#[derive(Deserialize)] -pub struct UpdateParam { - index_uid: String, - task_id: u64, -} - -pub async fn get_task_status( - meilisearch: GuardedData, MeiliSearch>, - index_uid: web::Path, - req: HttpRequest, - analytics: web::Data, -) -> Result { - analytics.publish( - "Index Tasks Seen".to_string(), - json!({ "per_task_uid": true }), - Some(&req), - ); - - let UpdateParam { index_uid, task_id } = index_uid.into_inner(); - - let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into(); - - debug!("returns: {:?}", task); - Ok(HttpResponse::Ok().json(task)) -} - -pub async fn get_all_tasks_status( - meilisearch: GuardedData, MeiliSearch>, - index_uid: web::Path, - req: HttpRequest, - analytics: web::Data, -) -> Result { - analytics.publish( - "Index Tasks Seen".to_string(), - json!({ "per_task_uid": false }), - Some(&req), - ); - - let tasks: TaskListView = meilisearch - .list_index_task(index_uid.into_inner(), None, None) - .await? - .into_iter() - .map(TaskView::from) - .collect::>() - .into(); - - debug!("returns: {:?}", tasks); - Ok(HttpResponse::Ok().json(tasks)) -} From 3684c822f11c882fe4682051e8357c92efca3d22 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 17 May 2022 16:08:23 +0200 Subject: [PATCH 055/185] Add indexUid filtering on the /tasks route --- Cargo.lock | 10 ++++ meilisearch-http/Cargo.toml | 1 + meilisearch-http/src/lib.rs | 2 +- meilisearch-http/src/routes/tasks.rs | 49 ++++++++++++++++---- meilisearch-http/src/task.rs | 43 +++++++++++++++-- meilisearch-http/tests/auth/authorization.rs | 4 +- meilisearch-http/tests/common/index.rs | 4 +- meilisearch-http/tests/tasks/mod.rs | 32 ------------- meilisearch-lib/src/index_controller/mod.rs | 3 +- meilisearch-lib/src/index_resolver/mod.rs | 9 ++++ meilisearch-lib/src/lib.rs | 2 +- 11 files changed, 106 insertions(+), 53 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index abdac2c1c..39eb78987 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2046,6 +2046,7 @@ dependencies = [ "rustls-pemfile", "segment", "serde", + "serde-cs", "serde_json", "serde_url_params", "sha-1", @@ -3085,6 +3086,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-cs" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d5b0435c9139761fbe5abeb1283234bcfbde88fadc2ae432579648fbce72ad" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" version = "1.0.136" diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 0a5cdff7f..75d0ac06e 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -62,6 +62,7 @@ rustls = "0.20.4" rustls-pemfile = "0.3.0" segment = { version = "0.2.0", optional = true } serde = { version = "1.0.136", features = ["derive"] } +serde-cs = "0.2.2" serde_json = { version = "1.0.79", features = ["preserve_order"] } sha2 = "0.10.2" siphasher = "0.3.10" diff --git a/meilisearch-http/src/lib.rs b/meilisearch-http/src/lib.rs index d1f5d9da1..201013bc6 100644 --- a/meilisearch-http/src/lib.rs +++ b/meilisearch-http/src/lib.rs @@ -2,7 +2,7 @@ #[macro_use] pub mod error; pub mod analytics; -mod task; +pub mod task; #[macro_use] pub mod extractors; pub mod helpers; diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index ae932253a..64929d5e0 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -2,21 +2,33 @@ use actix_web::{web, HttpRequest, HttpResponse}; use meilisearch_error::ResponseError; use meilisearch_lib::tasks::task::TaskId; use meilisearch_lib::tasks::TaskFilter; -use meilisearch_lib::MeiliSearch; +use meilisearch_lib::{IndexUid, MeiliSearch}; +use serde::Deserialize; +use serde_cs::vec::CS; use serde_json::json; use crate::analytics::Analytics; use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::sequential_extractor::SeqHandler; -use crate::task::{TaskListView, TaskView}; +use crate::task::{TaskListView, TaskStatus, TaskType, TaskView}; pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks)))) .service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task)))); } +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct TasksFilter { + #[serde(rename = "type")] + type_: Option>, + status: Option>, + index_uid: Option>, +} + async fn get_tasks( meilisearch: GuardedData, MeiliSearch>, + params: web::Query, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -26,15 +38,34 @@ async fn get_tasks( Some(&req), ); + let TasksFilter { + type_, + status, + index_uid, + } = params.into_inner(); + let search_rules = &meilisearch.filters().search_rules; - let filters = if search_rules.is_index_authorized("*") { - None - } else { - let mut filters = TaskFilter::default(); - for (index, _policy) in search_rules.clone() { - filters.filter_index(index); + let filters = match index_uid { + Some(indexes) => { + let mut filters = TaskFilter::default(); + for name in indexes.into_inner() { + if search_rules.is_index_authorized(&name) { + filters.filter_index(name.to_string()); + } + } + Some(filters) + } + None => { + if search_rules.is_index_authorized("*") { + None + } else { + let mut filters = TaskFilter::default(); + for (index, _policy) in search_rules.clone() { + filters.filter_index(index); + } + Some(filters) + } } - Some(filters) }; let tasks: TaskListView = meilisearch diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index a916d5ce8..0c22b8ed6 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -1,4 +1,5 @@ use std::fmt::Write; +use std::str::FromStr; use std::write; use meilisearch_error::ResponseError; @@ -8,14 +9,14 @@ use meilisearch_lib::tasks::batch::BatchId; use meilisearch_lib::tasks::task::{ DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult, }; -use serde::{Serialize, Serializer}; +use serde::{Deserialize, Serialize, Serializer}; use time::{Duration, OffsetDateTime}; use crate::AUTOBATCHING_ENABLED; -#[derive(Debug, Serialize)] +#[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -enum TaskType { +pub enum TaskType { IndexCreation, IndexUpdate, IndexDeletion, @@ -50,15 +51,47 @@ impl From for TaskType { } } -#[derive(Debug, Serialize)] +impl FromStr for TaskType { + type Err = &'static str; + + fn from_str(status: &str) -> Result { + match status { + "indexCreation" => Ok(TaskType::IndexCreation), + "indexUpdate" => Ok(TaskType::IndexUpdate), + "indexDeletion" => Ok(TaskType::IndexDeletion), + "documentAddition" => Ok(TaskType::DocumentAddition), + "documentPartial" => Ok(TaskType::DocumentPartial), + "documentDeletion" => Ok(TaskType::DocumentDeletion), + "settingsUpdate" => Ok(TaskType::SettingsUpdate), + "clearAll" => Ok(TaskType::ClearAll), + _ => Err("invalid task type value"), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -enum TaskStatus { +pub enum TaskStatus { Enqueued, Processing, Succeeded, Failed, } +impl FromStr for TaskStatus { + type Err = &'static str; + + fn from_str(status: &str) -> Result { + match status { + "enqueued" => Ok(TaskStatus::Enqueued), + "processing" => Ok(TaskStatus::Processing), + "succeeded" => Ok(TaskStatus::Succeeded), + "failed" => Ok(TaskStatus::Failed), + _ => Err("invalid task status value"), + } + } +} + #[derive(Debug, Serialize)] #[serde(untagged)] #[allow(clippy::large_enum_variant)] diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 7d7ec1899..56a1a13ca 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -16,8 +16,8 @@ pub static AUTHORIZATIONS: Lazy hashset!{"documents.get", "*"}, ("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"}, ("GET", "/tasks") => hashset!{"tasks.get", "*"}, - ("GET", "/indexes/products/tasks") => hashset!{"tasks.get", "*"}, - ("GET", "/indexes/products/tasks/0") => hashset!{"tasks.get", "*"}, + ("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "*"}, + ("GET", "/tasks/0") => hashset!{"tasks.get", "*"}, ("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"}, ("GET", "/indexes/products/") => hashset!{"indexes.get", "*"}, ("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"}, diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index b0c7a3342..9e86ac27e 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -122,12 +122,12 @@ impl Index<'_> { } pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) { - let url = format!("/indexes/{}/tasks/{}", self.uid, update_id); + let url = format!("/tasks/{}", update_id); self.service.get(url).await } pub async fn list_tasks(&self) -> (Value, StatusCode) { - let url = format!("/indexes/{}/tasks", self.uid); + let url = format!("/tasks?indexUid={}", self.uid); self.service.get(url).await } diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index 6f64a8970..ce0f56eb5 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -3,22 +3,6 @@ use serde_json::json; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; -#[actix_rt::test] -async fn error_get_task_unexisting_index() { - let server = Server::new().await; - let (response, code) = server.service.get("/indexes/test/tasks").await; - - let expected_response = json!({ - "message": "Index `test` not found.", - "code": "index_not_found", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index_not_found" - }); - - assert_eq!(response, expected_response); - assert_eq!(code, 404); -} - #[actix_rt::test] async fn error_get_unexisting_task_status() { let server = Server::new().await; @@ -58,22 +42,6 @@ async fn get_task_status() { // TODO check resonse format, as per #48 } -#[actix_rt::test] -async fn error_list_tasks_unexisting_index() { - let server = Server::new().await; - let (response, code) = server.index("test").list_tasks().await; - - let expected_response = json!({ - "message": "Index `test` not found.", - "code": "index_not_found", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index_not_found" - }); - - assert_eq!(response, expected_response); - assert_eq!(code, 404); -} - #[actix_rt::test] async fn list_tasks() { let server = Server::new().await; diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 30a6b6dc8..7ec159684 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -35,7 +35,8 @@ use error::Result; use self::error::IndexControllerError; use crate::index_resolver::index_store::{IndexStore, MapIndexStore}; use crate::index_resolver::meta_store::{HeedMetaStore, IndexMetaStore}; -use crate::index_resolver::{create_index_resolver, IndexResolver, IndexUid}; +pub use crate::index_resolver::IndexUid; +use crate::index_resolver::{create_index_resolver, IndexResolver}; use crate::update_file_store::UpdateFileStore; pub mod error; diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index cc0308f9e..1900061c7 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -4,6 +4,7 @@ pub mod meta_store; use std::convert::{TryFrom, TryInto}; use std::path::Path; +use std::str::FromStr; use std::sync::Arc; use error::{IndexResolverError, Result}; @@ -88,6 +89,14 @@ impl TryInto for String { } } +impl FromStr for IndexUid { + type Err = IndexResolverError; + + fn from_str(s: &str) -> Result { + IndexUid::new(s.to_string()) + } +} + pub struct IndexResolver { index_uuid_store: U, index_store: I, diff --git a/meilisearch-lib/src/lib.rs b/meilisearch-lib/src/lib.rs index 3d3d5e860..52da63027 100644 --- a/meilisearch-lib/src/lib.rs +++ b/meilisearch-lib/src/lib.rs @@ -13,7 +13,7 @@ mod update_file_store; use std::path::Path; -pub use index_controller::MeiliSearch; +pub use index_controller::{IndexUid, MeiliSearch}; pub use milli; pub use milli::heed; From 8509243e682b0317630f491cdd2605f424d3eaa3 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 18 May 2022 12:07:06 +0200 Subject: [PATCH 056/185] Implement the status and type filtering on the tasks route --- meilisearch-http/src/routes/tasks.rs | 85 ++++++++++++++++++++++++-- meilisearch-http/src/task.rs | 21 +++++-- meilisearch-http/tests/common/index.rs | 11 ++++ meilisearch-http/tests/tasks/mod.rs | 79 ++++++++++++++++++++++++ 4 files changed, 185 insertions(+), 11 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 64929d5e0..02f700ccd 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -1,6 +1,7 @@ use actix_web::{web, HttpRequest, HttpResponse}; use meilisearch_error::ResponseError; -use meilisearch_lib::tasks::task::TaskId; +use meilisearch_lib::milli::update::IndexDocumentsMethod; +use meilisearch_lib::tasks::task::{DocumentDeletion, TaskContent, TaskEvent, TaskId}; use meilisearch_lib::tasks::TaskFilter; use meilisearch_lib::{IndexUid, MeiliSearch}; use serde::Deserialize; @@ -19,16 +20,51 @@ pub fn configure(cfg: &mut web::ServiceConfig) { #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct TasksFilter { +pub struct TaskFilterQuery { #[serde(rename = "type")] type_: Option>, status: Option>, index_uid: Option>, } +#[rustfmt::skip] +fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool { + matches!((type_, content), + (TaskType::IndexCreation, TaskContent::IndexCreation { .. }) + | (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. }) + | (TaskType::IndexDeletion, TaskContent::IndexDeletion) + | (TaskType::DocumentAddition, TaskContent::DocumentAddition { + merge_strategy: IndexDocumentsMethod::ReplaceDocuments, + .. + }) + | (TaskType::DocumentPartial, TaskContent::DocumentAddition { + merge_strategy: IndexDocumentsMethod::UpdateDocuments, + .. + }) + | (TaskType::DocumentDeletion, TaskContent::DocumentDeletion(DocumentDeletion::Ids(_))) + | (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. }) + | (TaskType::ClearAll, TaskContent::DocumentDeletion(DocumentDeletion::Clear)) + ) +} + +fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool { + events.last().map_or(false, |event| { + matches!( + (status, event), + (TaskStatus::Enqueued, TaskEvent::Created(_)) + | ( + TaskStatus::Processing, + TaskEvent::Processing(_) | TaskEvent::Batched { .. } + ) + | (TaskStatus::Succeeded, TaskEvent::Succeded { .. }) + | (TaskStatus::Failed, TaskEvent::Failed { .. }), + ) + }) +} + async fn get_tasks( meilisearch: GuardedData, MeiliSearch>, - params: web::Query, + params: web::Query, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -38,14 +74,17 @@ async fn get_tasks( Some(&req), ); - let TasksFilter { + let TaskFilterQuery { type_, status, index_uid, } = params.into_inner(); let search_rules = &meilisearch.filters().search_rules; - let filters = match index_uid { + + // We first filter on potential indexes and make sure + // that the search filter restrictions are also applied. + let indexes_filters = match index_uid { Some(indexes) => { let mut filters = TaskFilter::default(); for name in indexes.into_inner() { @@ -68,6 +107,42 @@ async fn get_tasks( } }; + // Then we complete the task filter with other potential status and types filters. + let filters = match (type_, status) { + (Some(CS(types)), Some(CS(statuses))) => { + let mut filters = indexes_filters.unwrap_or_default(); + filters.filter_fn(move |task| { + let matches_type = types + .iter() + .any(|t| task_type_matches_content(&t, &task.content)); + let matches_status = statuses + .iter() + .any(|s| task_status_matches_events(&s, &task.events)); + matches_type && matches_status + }); + Some(filters) + } + (Some(CS(types)), None) => { + let mut filters = indexes_filters.unwrap_or_default(); + filters.filter_fn(move |task| { + types + .iter() + .any(|t| task_type_matches_content(&t, &task.content)) + }); + Some(filters) + } + (None, Some(CS(statuses))) => { + let mut filters = indexes_filters.unwrap_or_default(); + filters.filter_fn(move |task| { + statuses + .iter() + .any(|s| task_status_matches_events(&s, &task.events)) + }); + Some(filters) + } + (None, None) => indexes_filters, + }; + let tasks: TaskListView = meilisearch .list_tasks(filters, None, None) .await? diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 0c22b8ed6..4ecb6cead 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -52,9 +52,9 @@ impl From for TaskType { } impl FromStr for TaskType { - type Err = &'static str; + type Err = String; - fn from_str(status: &str) -> Result { + fn from_str(status: &str) -> Result { match status { "indexCreation" => Ok(TaskType::IndexCreation), "indexUpdate" => Ok(TaskType::IndexUpdate), @@ -64,7 +64,12 @@ impl FromStr for TaskType { "documentDeletion" => Ok(TaskType::DocumentDeletion), "settingsUpdate" => Ok(TaskType::SettingsUpdate), "clearAll" => Ok(TaskType::ClearAll), - _ => Err("invalid task type value"), + unknown => Err(format!( + "invalid task type `{}` value, expecting one of: \ + indexCreation, indexUpdate, indexDeletion, documentAddition, \ + documentPartial, documentDeletion, settingsUpdate, or clearAll", + unknown + )), } } } @@ -79,15 +84,19 @@ pub enum TaskStatus { } impl FromStr for TaskStatus { - type Err = &'static str; + type Err = String; - fn from_str(status: &str) -> Result { + fn from_str(status: &str) -> Result { match status { "enqueued" => Ok(TaskStatus::Enqueued), "processing" => Ok(TaskStatus::Processing), "succeeded" => Ok(TaskStatus::Succeeded), "failed" => Ok(TaskStatus::Failed), - _ => Err("invalid task status value"), + unknown => Err(format!( + "invalid task status `{}` value, expecting one of: \ + enqueued, processing, succeeded, or failed", + unknown + )), } } } diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index 9e86ac27e..bdce22db2 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -131,6 +131,17 @@ impl Index<'_> { self.service.get(url).await } + pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) { + let mut url = format!("/tasks?indexUid={}", self.uid); + if !type_.is_empty() { + url += &format!("&type={}", type_.join(",")); + } + if !status.is_empty() { + url += &format!("&status={}", status.join(",")); + } + self.service.get(url).await + } + pub async fn get_document( &self, id: u64, diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index ce0f56eb5..300cddde7 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -59,6 +59,85 @@ async fn list_tasks() { assert_eq!(response["results"].as_array().unwrap().len(), 2); } +#[actix_rt::test] +async fn list_tasks_status_filtered() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents( + serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), + None, + ) + .await; + + let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 1); + + let (response, code) = index.filtered_tasks(&[], &["processing"]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 1); + + index.wait_task(1).await; + + let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + +#[actix_rt::test] +async fn list_tasks_type_filtered() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents( + serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), + None, + ) + .await; + + let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 1); + + let (response, code) = index + .filtered_tasks(&["indexCreation", "documentAddition"], &[]) + .await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + +#[actix_rt::test] +async fn list_tasks_status_and_type_filtered() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents( + serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), + None, + ) + .await; + + let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 0); + + let (response, code) = index + .filtered_tasks( + &["indexCreation", "documentAddition"], + &["succeeded", "processing"], + ) + .await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + macro_rules! assert_valid_summarized_task { ($response:expr, $task_type:literal, $index:literal) => {{ assert_eq!($response.as_object().unwrap().len(), 5); From 3f80468f1827c537c0a578cc9d3f8c612203e809 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 25 May 2022 12:05:24 +0200 Subject: [PATCH 057/185] Rename the Tasks Types --- meilisearch-http/src/routes/tasks.rs | 32 ++----- meilisearch-http/src/task.rs | 94 +++++++++---------- .../tests/documents/add_documents.rs | 10 +- meilisearch-http/tests/dumps/mod.rs | 18 ++-- meilisearch-http/tests/tasks/mod.rs | 17 ++-- 5 files changed, 75 insertions(+), 96 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 02f700ccd..66f4bbbdb 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -1,7 +1,6 @@ use actix_web::{web, HttpRequest, HttpResponse}; use meilisearch_error::ResponseError; -use meilisearch_lib::milli::update::IndexDocumentsMethod; -use meilisearch_lib::tasks::task::{DocumentDeletion, TaskContent, TaskEvent, TaskId}; +use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId}; use meilisearch_lib::tasks::TaskFilter; use meilisearch_lib::{IndexUid, MeiliSearch}; use serde::Deserialize; @@ -30,34 +29,23 @@ pub struct TaskFilterQuery { #[rustfmt::skip] fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool { matches!((type_, content), - (TaskType::IndexCreation, TaskContent::IndexCreation { .. }) + (TaskType::IndexCreation, TaskContent::IndexCreation { .. }) | (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. }) | (TaskType::IndexDeletion, TaskContent::IndexDeletion) - | (TaskType::DocumentAddition, TaskContent::DocumentAddition { - merge_strategy: IndexDocumentsMethod::ReplaceDocuments, - .. - }) - | (TaskType::DocumentPartial, TaskContent::DocumentAddition { - merge_strategy: IndexDocumentsMethod::UpdateDocuments, - .. - }) - | (TaskType::DocumentDeletion, TaskContent::DocumentDeletion(DocumentDeletion::Ids(_))) + | (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. }) + | (TaskType::DocumentDeletion, TaskContent::DocumentDeletion(_)) | (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. }) - | (TaskType::ClearAll, TaskContent::DocumentDeletion(DocumentDeletion::Clear)) ) } +#[rustfmt::skip] fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool { events.last().map_or(false, |event| { - matches!( - (status, event), - (TaskStatus::Enqueued, TaskEvent::Created(_)) - | ( - TaskStatus::Processing, - TaskEvent::Processing(_) | TaskEvent::Batched { .. } - ) - | (TaskStatus::Succeeded, TaskEvent::Succeded { .. }) - | (TaskStatus::Failed, TaskEvent::Failed { .. }), + matches!((status, event), + (TaskStatus::Enqueued, TaskEvent::Created(_)) + | (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. }) + | (TaskStatus::Succeeded, TaskEvent::Succeded { .. }) + | (TaskStatus::Failed, TaskEvent::Failed { .. }), ) }) } diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 4ecb6cead..c7aaf0030 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -4,7 +4,6 @@ use std::write; use meilisearch_error::ResponseError; use meilisearch_lib::index::{Settings, Unchecked}; -use meilisearch_lib::milli::update::IndexDocumentsMethod; use meilisearch_lib::tasks::batch::BatchId; use meilisearch_lib::tasks::task::{ DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult, @@ -20,33 +19,22 @@ pub enum TaskType { IndexCreation, IndexUpdate, IndexDeletion, - DocumentAddition, - DocumentPartial, + DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, - ClearAll, DumpCreation, } impl From for TaskType { fn from(other: TaskContent) -> Self { match other { - TaskContent::DocumentAddition { - merge_strategy: IndexDocumentsMethod::ReplaceDocuments, - .. - } => TaskType::DocumentAddition, - TaskContent::DocumentAddition { - merge_strategy: IndexDocumentsMethod::UpdateDocuments, - .. - } => TaskType::DocumentPartial, - TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll, - TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion, - TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate, - TaskContent::IndexDeletion => TaskType::IndexDeletion, TaskContent::IndexCreation { .. } => TaskType::IndexCreation, TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, + TaskContent::IndexDeletion => TaskType::IndexDeletion, + TaskContent::DocumentAddition { .. } => TaskType::DocumentAdditionOrUpdate, + TaskContent::DocumentDeletion(_) => TaskType::DocumentDeletion, + TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate, TaskContent::Dump { .. } => TaskType::DumpCreation, - _ => unreachable!("unexpected task type"), } } } @@ -55,21 +43,27 @@ impl FromStr for TaskType { type Err = String; fn from_str(status: &str) -> Result { - match status { - "indexCreation" => Ok(TaskType::IndexCreation), - "indexUpdate" => Ok(TaskType::IndexUpdate), - "indexDeletion" => Ok(TaskType::IndexDeletion), - "documentAddition" => Ok(TaskType::DocumentAddition), - "documentPartial" => Ok(TaskType::DocumentPartial), - "documentDeletion" => Ok(TaskType::DocumentDeletion), - "settingsUpdate" => Ok(TaskType::SettingsUpdate), - "clearAll" => Ok(TaskType::ClearAll), - unknown => Err(format!( - "invalid task type `{}` value, expecting one of: \ - indexCreation, indexUpdate, indexDeletion, documentAddition, \ - documentPartial, documentDeletion, settingsUpdate, or clearAll", - unknown - )), + if status.eq_ignore_ascii_case("indexCreation") { + Ok(TaskType::IndexCreation) + } else if status.eq_ignore_ascii_case("indexUpdate") { + Ok(TaskType::IndexUpdate) + } else if status.eq_ignore_ascii_case("indexDeletion") { + Ok(TaskType::IndexDeletion) + } else if status.eq_ignore_ascii_case("documentAdditionOrUpdate") { + Ok(TaskType::DocumentAdditionOrUpdate) + } else if status.eq_ignore_ascii_case("documentDeletion") { + Ok(TaskType::DocumentDeletion) + } else if status.eq_ignore_ascii_case("settingsUpdate") { + Ok(TaskType::SettingsUpdate) + } else if status.eq_ignore_ascii_case("dumpCreation") { + Ok(TaskType::DumpCreation) + } else { + Err(format!( + "invalid task type `{}`, expecting one of: \ + indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \ + documentDeletion, settingsUpdate, dumpCreation", + status + )) } } } @@ -87,16 +81,20 @@ impl FromStr for TaskStatus { type Err = String; fn from_str(status: &str) -> Result { - match status { - "enqueued" => Ok(TaskStatus::Enqueued), - "processing" => Ok(TaskStatus::Processing), - "succeeded" => Ok(TaskStatus::Succeeded), - "failed" => Ok(TaskStatus::Failed), - unknown => Err(format!( - "invalid task status `{}` value, expecting one of: \ + if status.eq_ignore_ascii_case("enqueued") { + Ok(TaskStatus::Enqueued) + } else if status.eq_ignore_ascii_case("processing") { + Ok(TaskStatus::Processing) + } else if status.eq_ignore_ascii_case("succeeded") { + Ok(TaskStatus::Succeeded) + } else if status.eq_ignore_ascii_case("failed") { + Ok(TaskStatus::Failed) + } else { + Err(format!( + "invalid task status `{}`, expecting one of: \ enqueued, processing, succeeded, or failed", - unknown - )), + status, + )) } } } @@ -214,22 +212,14 @@ impl From for TaskView { let (task_type, mut details) = match content { TaskContent::DocumentAddition { - merge_strategy, - documents_count, - .. + documents_count, .. } => { let details = TaskDetails::DocumentAddition { received_documents: documents_count, indexed_documents: None, }; - let task_type = match merge_strategy { - IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial, - IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition, - _ => unreachable!("Unexpected document merge strategy."), - }; - - (task_type, Some(details)) + (TaskType::DocumentAdditionOrUpdate, Some(details)) } TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => ( TaskType::DocumentDeletion, @@ -239,7 +229,7 @@ impl From for TaskView { }), ), TaskContent::DocumentDeletion(DocumentDeletion::Clear) => ( - TaskType::ClearAll, + TaskType::DocumentDeletion, Some(TaskDetails::ClearAll { deleted_documents: None, }), diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index 238df6332..ab271ce18 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -615,7 +615,7 @@ async fn add_documents_no_index_creation() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentAddition"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1); @@ -685,7 +685,7 @@ async fn document_addition_with_primary_key() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentAddition"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1); @@ -714,7 +714,7 @@ async fn document_update_with_primary_key() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentPartial"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["indexedDocuments"], 1); assert_eq!(response["details"]["receivedDocuments"], 1); @@ -818,7 +818,7 @@ async fn add_larger_dataset() { let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); - assert_eq!(response["type"], "documentAddition"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["indexedDocuments"], 77); assert_eq!(response["details"]["receivedDocuments"], 77); let (response, code) = index @@ -840,7 +840,7 @@ async fn update_larger_dataset() { index.wait_task(0).await; let (response, code) = index.get_task(0).await; assert_eq!(code, 200); - assert_eq!(response["type"], "documentPartial"); + assert_eq!(response["type"], "documentAdditionOrUpdate"); assert_eq!(response["details"]["indexedDocuments"], 77); let (response, code) = index .get_all_documents(GetAllDocumentsOptions { diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 22625f17f..6d6e6494a 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -69,7 +69,7 @@ async fn import_dump_v2_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) ); // finally we're just going to check that we can still get a few documents by id @@ -134,7 +134,7 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) ); // finally we're just going to check that we can still get a few documents by id @@ -199,7 +199,7 @@ async fn import_dump_v2_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); assert_eq!( tasks["results"][92], @@ -268,7 +268,7 @@ async fn import_dump_v3_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) ); // finally we're just going to check that we can still get a few documents by id @@ -333,7 +333,7 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) ); // finally we're just going to check that we can still get a few documents by id @@ -398,7 +398,7 @@ async fn import_dump_v3_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); assert_eq!( tasks["results"][92], @@ -467,7 +467,7 @@ async fn import_dump_v4_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) ); // finally we're just going to check that we can still get a few documents by id @@ -532,7 +532,7 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) ); // finally we're just going to check that we can still get a few documents by id @@ -597,7 +597,7 @@ async fn import_dump_v4_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); assert_eq!( tasks["results"][92], diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index 300cddde7..80bf6cb3d 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -76,9 +76,10 @@ async fn list_tasks_status_filtered() { assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 1); - let (response, code) = index.filtered_tasks(&[], &["processing"]).await; - assert_eq!(code, 200, "{}", response); - assert_eq!(response["results"].as_array().unwrap().len(), 1); + // We can't be sure that the update isn't already processed so we can't test this + // let (response, code) = index.filtered_tasks(&[], &["processing"]).await; + // assert_eq!(code, 200, "{}", response); + // assert_eq!(response["results"].as_array().unwrap().len(), 1); index.wait_task(1).await; @@ -105,7 +106,7 @@ async fn list_tasks_type_filtered() { assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index - .filtered_tasks(&["indexCreation", "documentAddition"], &[]) + .filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[]) .await; assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 2); @@ -130,7 +131,7 @@ async fn list_tasks_status_and_type_filtered() { let (response, code) = index .filtered_tasks( - &["indexCreation", "documentAddition"], + &["indexCreation", "documentAdditionOrUpdate"], &["succeeded", "processing"], ) .await; @@ -166,16 +167,16 @@ async fn test_summarized_task_view() { assert_valid_summarized_task!(response, "settingsUpdate", "test"); let (response, _) = index.update_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentPartial", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); let (response, _) = index.add_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentAddition", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); let (response, _) = index.delete_document(1).await; assert_valid_summarized_task!(response, "documentDeletion", "test"); let (response, _) = index.clear_all_documents().await; - assert_valid_summarized_task!(response, "clearAll", "test"); + assert_valid_summarized_task!(response, "documentDeletion", "test"); let (response, _) = index.delete().await; assert_valid_summarized_task!(response, "indexDeletion", "test"); From 36d94257d8b6850dbd222180df8d0d969c05d439 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 25 May 2022 18:21:50 +0200 Subject: [PATCH 058/185] Make clippy happy --- meilisearch-http/src/routes/tasks.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 66f4bbbdb..34132db0d 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -102,10 +102,10 @@ async fn get_tasks( filters.filter_fn(move |task| { let matches_type = types .iter() - .any(|t| task_type_matches_content(&t, &task.content)); + .any(|t| task_type_matches_content(t, &task.content)); let matches_status = statuses .iter() - .any(|s| task_status_matches_events(&s, &task.events)); + .any(|s| task_status_matches_events(s, &task.events)); matches_type && matches_status }); Some(filters) @@ -115,7 +115,7 @@ async fn get_tasks( filters.filter_fn(move |task| { types .iter() - .any(|t| task_type_matches_content(&t, &task.content)) + .any(|t| task_type_matches_content(t, &task.content)) }); Some(filters) } @@ -124,7 +124,7 @@ async fn get_tasks( filters.filter_fn(move |task| { statuses .iter() - .any(|s| task_status_matches_events(&s, &task.events)) + .any(|s| task_status_matches_events(s, &task.events)) }); Some(filters) } From b82c86c8f5fb49e3f52ec30182779ba1c6a219a3 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 30 May 2022 13:59:27 +0200 Subject: [PATCH 059/185] Allow users to filter indexUid with a * --- meilisearch-http/src/routes/tasks.rs | 43 ++++++++++++++++++++++++++-- meilisearch-http/tests/tasks/mod.rs | 25 ++++++++++++++++ 2 files changed, 65 insertions(+), 3 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 34132db0d..93af5af26 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -6,6 +6,7 @@ use meilisearch_lib::{IndexUid, MeiliSearch}; use serde::Deserialize; use serde_cs::vec::CS; use serde_json::json; +use std::str::FromStr; use crate::analytics::Analytics; use crate::extractors::authentication::{policies::*, GuardedData}; @@ -23,7 +24,26 @@ pub struct TaskFilterQuery { #[serde(rename = "type")] type_: Option>, status: Option>, - index_uid: Option>, + index_uid: Option>, +} + +/// A type that tries to match either a star (*) or an IndexUid. +#[derive(Debug)] +enum StarOrIndexUid { + Star, + IndexUid(IndexUid), +} + +impl FromStr for StarOrIndexUid { + type Err = ::Err; + + fn from_str(s: &str) -> Result { + if s.trim() == "*" { + Ok(StarOrIndexUid::Star) + } else { + IndexUid::from_str(s).map(StarOrIndexUid::IndexUid) + } + } } #[rustfmt::skip] @@ -70,12 +90,29 @@ async fn get_tasks( let search_rules = &meilisearch.filters().search_rules; - // We first filter on potential indexes and make sure + // We first tranform a potential indexUid=* into a + // "not specified indexUid filter". + let index_uid = + match index_uid { + Some(indexes) => indexes + .into_inner() + .into_iter() + .fold(Some(Vec::new()), |acc, val| match (acc, val) { + (None, _) | (_, StarOrIndexUid::Star) => None, + (Some(mut acc), StarOrIndexUid::IndexUid(uid)) => { + acc.push(uid); + Some(acc) + } + }), + None => None, + }; + + // Then we filter on potential indexes and make sure // that the search filter restrictions are also applied. let indexes_filters = match index_uid { Some(indexes) => { let mut filters = TaskFilter::default(); - for name in indexes.into_inner() { + for name in indexes { if search_rules.is_index_authorized(&name) { filters.filter_index(name.to_string()); } diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index 80bf6cb3d..b14491fd2 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -59,6 +59,31 @@ async fn list_tasks() { assert_eq!(response["results"].as_array().unwrap().len(), 2); } +#[actix_rt::test] +async fn list_tasks_with_index_filter() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents( + serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), + None, + ) + .await; + let (response, code) = index.service.get("/tasks?indexUid=test").await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index.service.get("/tasks?indexUid=*").await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index.service.get("/tasks?indexUid=*,pasteque").await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + #[actix_rt::test] async fn list_tasks_status_filtered() { let server = Server::new().await; From 082d6b89ffffe1d5e30a9ddadb7d2211dec56420 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 30 May 2022 17:01:51 +0200 Subject: [PATCH 060/185] Make the StarOrIndexUid Generic and call it StarOr --- meilisearch-http/src/routes/tasks.rs | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 93af5af26..a5dcc6513 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -24,24 +24,25 @@ pub struct TaskFilterQuery { #[serde(rename = "type")] type_: Option>, status: Option>, - index_uid: Option>, + index_uid: Option>>, } -/// A type that tries to match either a star (*) or an IndexUid. +/// A type that tries to match either a star (*) or +/// any other thing that implements `FromStr`. #[derive(Debug)] -enum StarOrIndexUid { +enum StarOr { Star, - IndexUid(IndexUid), + Other(T), } -impl FromStr for StarOrIndexUid { - type Err = ::Err; +impl FromStr for StarOr { + type Err = T::Err; fn from_str(s: &str) -> Result { if s.trim() == "*" { - Ok(StarOrIndexUid::Star) + Ok(StarOr::Star) } else { - IndexUid::from_str(s).map(StarOrIndexUid::IndexUid) + T::from_str(s).map(StarOr::Other) } } } @@ -98,8 +99,8 @@ async fn get_tasks( .into_inner() .into_iter() .fold(Some(Vec::new()), |acc, val| match (acc, val) { - (None, _) | (_, StarOrIndexUid::Star) => None, - (Some(mut acc), StarOrIndexUid::IndexUid(uid)) => { + (None, _) | (_, StarOr::Star) => None, + (Some(mut acc), StarOr::Other(uid)) => { acc.push(uid); Some(acc) } From 8800b348f095891ef447a03df361982ea46ad199 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 30 May 2022 17:12:53 +0200 Subject: [PATCH 061/185] Implement the StarOr on all the tasks filters --- meilisearch-http/src/routes/tasks.rs | 49 +++++++++++++++------------- meilisearch-http/tests/tasks/mod.rs | 27 ++++++++++++++- 2 files changed, 52 insertions(+), 24 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index a5dcc6513..6bb9d1e91 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -22,8 +22,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) { #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct TaskFilterQuery { #[serde(rename = "type")] - type_: Option>, - status: Option>, + type_: Option>>, + status: Option>>, index_uid: Option>>, } @@ -47,6 +47,20 @@ impl FromStr for StarOr { } } +/// Extracts the raw values from the `StarOr` types and +/// return None if a `StarOr::Star` is encountered. +fn fold_star_or(content: Vec>) -> Option> { + content + .into_iter() + .fold(Some(Vec::new()), |acc, val| match (acc, val) { + (None, _) | (_, StarOr::Star) => None, + (Some(mut acc), StarOr::Other(uid)) => { + acc.push(uid); + Some(acc) + } + }) +} + #[rustfmt::skip] fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool { matches!((type_, content), @@ -91,25 +105,14 @@ async fn get_tasks( let search_rules = &meilisearch.filters().search_rules; - // We first tranform a potential indexUid=* into a - // "not specified indexUid filter". - let index_uid = - match index_uid { - Some(indexes) => indexes - .into_inner() - .into_iter() - .fold(Some(Vec::new()), |acc, val| match (acc, val) { - (None, _) | (_, StarOr::Star) => None, - (Some(mut acc), StarOr::Other(uid)) => { - acc.push(uid); - Some(acc) - } - }), - None => None, - }; + // We first tranform a potential indexUid=* into a "not specified indexUid filter" + // for every one of the filters: type, status, and indexUid. + let type_ = type_.map(CS::into_inner).and_then(fold_star_or); + let status = status.map(CS::into_inner).and_then(fold_star_or); + let index_uid = index_uid.map(CS::into_inner).and_then(fold_star_or); - // Then we filter on potential indexes and make sure - // that the search filter restrictions are also applied. + // Then we filter on potential indexes and make sure that the search filter + // restrictions are also applied. let indexes_filters = match index_uid { Some(indexes) => { let mut filters = TaskFilter::default(); @@ -135,7 +138,7 @@ async fn get_tasks( // Then we complete the task filter with other potential status and types filters. let filters = match (type_, status) { - (Some(CS(types)), Some(CS(statuses))) => { + (Some(types), Some(statuses)) => { let mut filters = indexes_filters.unwrap_or_default(); filters.filter_fn(move |task| { let matches_type = types @@ -148,7 +151,7 @@ async fn get_tasks( }); Some(filters) } - (Some(CS(types)), None) => { + (Some(types), None) => { let mut filters = indexes_filters.unwrap_or_default(); filters.filter_fn(move |task| { types @@ -157,7 +160,7 @@ async fn get_tasks( }); Some(filters) } - (None, Some(CS(statuses))) => { + (None, Some(statuses)) => { let mut filters = indexes_filters.unwrap_or_default(); filters.filter_fn(move |task| { statuses diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index b14491fd2..1ba7a4936 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -60,7 +60,7 @@ async fn list_tasks() { } #[actix_rt::test] -async fn list_tasks_with_index_filter() { +async fn list_tasks_with_star_filters() { let server = Server::new().await; let index = server.index("test"); index.create(None).await; @@ -82,6 +82,31 @@ async fn list_tasks_with_index_filter() { let (response, code) = index.service.get("/tasks?indexUid=*,pasteque").await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index.service.get("/tasks?type=*").await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index + .service + .get("/tasks?type=*,documentAdditionOrUpdate&status=*") + .await; + assert_eq!(code, 200, "{:?}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index + .service + .get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test") + .await; + assert_eq!(code, 200, "{:?}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index + .service + .get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test,*") + .await; + assert_eq!(code, 200, "{:?}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] From 1465b5e0ffaf2d80f7ab9ada11304a1dab76d40a Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 30 May 2022 17:38:25 +0200 Subject: [PATCH 062/185] Refactorize the tasks filters by moving the match inside --- meilisearch-http/src/routes/tasks.rs | 51 +++++++++++----------------- 1 file changed, 20 insertions(+), 31 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 6bb9d1e91..1fe903abf 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -137,39 +137,28 @@ async fn get_tasks( }; // Then we complete the task filter with other potential status and types filters. - let filters = match (type_, status) { - (Some(types), Some(statuses)) => { - let mut filters = indexes_filters.unwrap_or_default(); - filters.filter_fn(move |task| { - let matches_type = types + let filters = if type_.is_some() || status.is_some() { + let mut filters = indexes_filters.unwrap_or_default(); + filters.filter_fn(move |task| { + let matches_type = match &type_ { + Some(types) => types .iter() - .any(|t| task_type_matches_content(t, &task.content)); - let matches_status = statuses + .any(|t| task_type_matches_content(t, &task.content)), + None => true, + }; + + let matches_status = match &status { + Some(statuses) => statuses .iter() - .any(|s| task_status_matches_events(s, &task.events)); - matches_type && matches_status - }); - Some(filters) - } - (Some(types), None) => { - let mut filters = indexes_filters.unwrap_or_default(); - filters.filter_fn(move |task| { - types - .iter() - .any(|t| task_type_matches_content(t, &task.content)) - }); - Some(filters) - } - (None, Some(statuses)) => { - let mut filters = indexes_filters.unwrap_or_default(); - filters.filter_fn(move |task| { - statuses - .iter() - .any(|s| task_status_matches_events(s, &task.events)) - }); - Some(filters) - } - (None, None) => indexes_filters, + .any(|t| task_status_matches_events(t, &task.events)), + None => true, + }; + + matches_type && matches_status + }); + Some(filters) + } else { + indexes_filters }; let tasks: TaskListView = meilisearch From ddad6cc0691de278becdc04b47033d2267dd3c44 Mon Sep 17 00:00:00 2001 From: Irevoire Date: Wed, 25 May 2022 11:51:26 +0200 Subject: [PATCH 063/185] feat(http): update the documents resource - Return Documents API resources on `/documents` in an array in the the results field. - Add limit, offset and total in the response body. - Rename `attributesToRetrieve` into `fields` (only for the `/documents` endpoints, not for the `/search` ones). - The `displayedAttributes` settings does not impact anymore the displayed fields returned in the `/documents` endpoints. These settings only impacts the `/search` endpoint. Fix #2372 --- .../src/routes/indexes/documents.rs | 59 +++--- meilisearch-http/src/routes/mod.rs | 34 ++++ meilisearch-http/src/routes/tasks.rs | 37 +--- meilisearch-http/tests/common/index.rs | 16 +- .../tests/documents/add_documents.rs | 4 +- .../tests/documents/delete_documents.rs | 10 +- .../tests/documents/get_documents.rs | 179 ++++++++++-------- meilisearch-http/tests/dumps/mod.rs | 36 ++-- meilisearch-lib/src/index/index.rs | 29 ++- meilisearch-lib/src/index/mod.rs | 6 +- meilisearch-lib/src/index_controller/mod.rs | 7 +- 11 files changed, 217 insertions(+), 200 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/documents.rs b/meilisearch-http/src/routes/indexes/documents.rs index 66551ec77..4c87044db 100644 --- a/meilisearch-http/src/routes/indexes/documents.rs +++ b/meilisearch-http/src/routes/indexes/documents.rs @@ -13,7 +13,8 @@ use meilisearch_lib::MeiliSearch; use mime::Mime; use once_cell::sync::Lazy; use serde::Deserialize; -use serde_json::Value; +use serde_cs::vec::CS; +use serde_json::{json, Value}; use tokio::sync::mpsc; use crate::analytics::Analytics; @@ -21,11 +22,9 @@ use crate::error::MeilisearchHttpError; use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::payload::Payload; use crate::extractors::sequential_extractor::SeqHandler; +use crate::routes::{fold_star_or, StarOr}; use crate::task::SummarizedTaskView; -const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0; -const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20; - static ACCEPTED_CONTENT_TYPE: Lazy> = Lazy::new(|| { vec![ "application/json".to_string(), @@ -86,14 +85,24 @@ pub fn configure(cfg: &mut web::ServiceConfig) { ); } +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct GetDocument { + fields: Option>>, +} + pub async fn get_document( meilisearch: GuardedData, MeiliSearch>, path: web::Path, + params: web::Query, ) -> Result { let index = path.index_uid.clone(); let id = path.document_id.clone(); + let GetDocument { fields } = params.into_inner(); + let attributes_to_retrieve = fields.map(CS::into_inner).and_then(fold_star_or); + let document = meilisearch - .document(index, id, None as Option>) + .document(index, id, attributes_to_retrieve) .await?; debug!("returns: {:?}", document); Ok(HttpResponse::Ok().json(document)) @@ -113,12 +122,16 @@ pub async fn delete_document( Ok(HttpResponse::Accepted().json(task)) } +const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20; + #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct BrowseQuery { - offset: Option, - limit: Option, - attributes_to_retrieve: Option, + #[serde(default)] + offset: usize, + #[serde(default = "PAGINATION_DEFAULT_LIMIT")] + limit: usize, + fields: Option>>, } pub async fn get_all_documents( @@ -127,27 +140,21 @@ pub async fn get_all_documents( params: web::Query, ) -> Result { debug!("called with params: {:?}", params); - let attributes_to_retrieve = params.attributes_to_retrieve.as_ref().and_then(|attrs| { - let mut names = Vec::new(); - for name in attrs.split(',').map(String::from) { - if name == "*" { - return None; - } - names.push(name); - } - Some(names) - }); + let BrowseQuery { + offset, + limit, + fields, + } = params.into_inner(); + let attributes_to_retrieve = fields.map(CS::into_inner).and_then(fold_star_or); - let documents = meilisearch - .documents( - path.into_inner(), - params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET), - params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT), - attributes_to_retrieve, - ) + let (total, documents) = meilisearch + .documents(path.into_inner(), offset, limit, attributes_to_retrieve) .await?; + debug!("returns: {:?}", documents); - Ok(HttpResponse::Ok().json(documents)) + Ok(HttpResponse::Ok().json(json!( + { "limit": limit, "offset": offset, "total": total, "results": documents } + ))) } #[derive(Deserialize, Debug)] diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index 49397444f..a34b7578d 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -1,3 +1,5 @@ +use std::str::FromStr; + use actix_web::{web, HttpResponse}; use log::debug; use serde::{Deserialize, Serialize}; @@ -24,6 +26,38 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .service(web::scope("/indexes").configure(indexes::configure)); } +/// A type that tries to match either a star (*) or +/// any other thing that implements `FromStr`. +#[derive(Debug)] +pub enum StarOr { + Star, + Other(T), +} + +impl FromStr for StarOr { + type Err = T::Err; + + fn from_str(s: &str) -> Result { + if s.trim() == "*" { + Ok(StarOr::Star) + } else { + T::from_str(s).map(StarOr::Other) + } + } +} + +/// Extracts the raw values from the `StarOr` types and +/// return None if a `StarOr::Star` is encountered. +pub fn fold_star_or(content: impl IntoIterator>) -> Option> { + content + .into_iter() + .map(|value| match value { + StarOr::Star => None, + StarOr::Other(val) => Some(val), + }) + .collect() +} + #[derive(Debug, Clone, Serialize, Deserialize)] #[allow(clippy::large_enum_variant)] #[serde(tag = "name")] diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 1fe903abf..096e44f8d 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -6,13 +6,14 @@ use meilisearch_lib::{IndexUid, MeiliSearch}; use serde::Deserialize; use serde_cs::vec::CS; use serde_json::json; -use std::str::FromStr; use crate::analytics::Analytics; use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::sequential_extractor::SeqHandler; use crate::task::{TaskListView, TaskStatus, TaskType, TaskView}; +use super::{fold_star_or, StarOr}; + pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks)))) .service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task)))); @@ -27,40 +28,6 @@ pub struct TaskFilterQuery { index_uid: Option>>, } -/// A type that tries to match either a star (*) or -/// any other thing that implements `FromStr`. -#[derive(Debug)] -enum StarOr { - Star, - Other(T), -} - -impl FromStr for StarOr { - type Err = T::Err; - - fn from_str(s: &str) -> Result { - if s.trim() == "*" { - Ok(StarOr::Star) - } else { - T::from_str(s).map(StarOr::Other) - } - } -} - -/// Extracts the raw values from the `StarOr` types and -/// return None if a `StarOr::Star` is encountered. -fn fold_star_or(content: Vec>) -> Option> { - content - .into_iter() - .fold(Some(Vec::new()), |acc, val| match (acc, val) { - (None, _) | (_, StarOr::Star) => None, - (Some(mut acc), StarOr::Other(uid)) => { - acc.push(uid); - Some(acc) - } - }) -} - #[rustfmt::skip] fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool { matches!((type_, content), diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index bdce22db2..e21dbcb67 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -145,9 +145,12 @@ impl Index<'_> { pub async fn get_document( &self, id: u64, - _options: Option, + options: Option, ) -> (Value, StatusCode) { - let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id); + let mut url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id); + if let Some(fields) = options.and_then(|o| o.fields) { + url.push_str(&format!("?fields={}", fields.join(","))); + } self.service.get(url).await } @@ -162,10 +165,7 @@ impl Index<'_> { } if let Some(attributes_to_retrieve) = options.attributes_to_retrieve { - url.push_str(&format!( - "attributesToRetrieve={}&", - attributes_to_retrieve.join(",") - )); + url.push_str(&format!("fields={}&", attributes_to_retrieve.join(","))); } self.service.get(url).await @@ -245,7 +245,9 @@ impl Index<'_> { make_settings_test_routes!(distinct_attribute); } -pub struct GetDocumentOptions; +pub struct GetDocumentOptions { + pub fields: Option>, +} #[derive(Debug, Default)] pub struct GetAllDocumentsOptions { diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index ab271ce18..8ef8c54fd 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -828,7 +828,7 @@ async fn add_larger_dataset() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 77); + assert_eq!(response["results"].as_array().unwrap().len(), 77); } #[actix_rt::test] @@ -849,7 +849,7 @@ async fn update_larger_dataset() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 77); + assert_eq!(response["results"].as_array().unwrap().len(), 77); } #[actix_rt::test] diff --git a/meilisearch-http/tests/documents/delete_documents.rs b/meilisearch-http/tests/documents/delete_documents.rs index 5198b2bfb..8c7ddaa7b 100644 --- a/meilisearch-http/tests/documents/delete_documents.rs +++ b/meilisearch-http/tests/documents/delete_documents.rs @@ -72,7 +72,7 @@ async fn clear_all_documents() { .get_all_documents(GetAllDocumentsOptions::default()) .await; assert_eq!(code, 200); - assert!(response.as_array().unwrap().is_empty()); + assert!(response["results"].as_array().unwrap().is_empty()); } #[actix_rt::test] @@ -89,7 +89,7 @@ async fn clear_all_documents_empty_index() { .get_all_documents(GetAllDocumentsOptions::default()) .await; assert_eq!(code, 200); - assert!(response.as_array().unwrap().is_empty()); + assert!(response["results"].as_array().unwrap().is_empty()); } #[actix_rt::test] @@ -125,8 +125,8 @@ async fn delete_batch() { .get_all_documents(GetAllDocumentsOptions::default()) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 1); - assert_eq!(response.as_array().unwrap()[0]["id"], 3); + assert_eq!(response["results"].as_array().unwrap().len(), 1); + assert_eq!(response["results"][0]["id"], json!(3)); } #[actix_rt::test] @@ -143,5 +143,5 @@ async fn delete_no_document_batch() { .get_all_documents(GetAllDocumentsOptions::default()) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 3); + assert_eq!(response["results"].as_array().unwrap().len(), 3); } diff --git a/meilisearch-http/tests/documents/get_documents.rs b/meilisearch-http/tests/documents/get_documents.rs index 6c93b9c13..cad656088 100644 --- a/meilisearch-http/tests/documents/get_documents.rs +++ b/meilisearch-http/tests/documents/get_documents.rs @@ -1,5 +1,5 @@ -use crate::common::GetAllDocumentsOptions; use crate::common::Server; +use crate::common::{GetAllDocumentsOptions, GetDocumentOptions}; use serde_json::json; @@ -39,7 +39,7 @@ async fn get_document() { let documents = serde_json::json!([ { "id": 0, - "content": "foobar", + "nested": { "content": "foobar" }, } ]); let (_, code) = index.add_documents(documents, None).await; @@ -49,11 +49,45 @@ async fn get_document() { assert_eq!(code, 200); assert_eq!( response, - serde_json::json!( { + serde_json::json!({ "id": 0, - "content": "foobar", + "nested": { "content": "foobar" }, }) ); + + let (response, code) = index + .get_document( + 0, + Some(GetDocumentOptions { + fields: Some(vec!["id"]), + }), + ) + .await; + assert_eq!(code, 200); + assert_eq!( + response, + serde_json::json!({ + "id": 0, + }) + ); + + /* This currently doesn't work but should be fixed by #2433 + let (response, code) = index + .get_document( + 0, + Some(GetDocumentOptions { + fields: Some(vec!["nested.content"]), + }), + ) + .await; + assert_eq!(code, 200); + assert_eq!( + response, + serde_json::json!({ + "nested": { "content": "foobar" }, + }) + ); + */ } #[actix_rt::test] @@ -88,7 +122,7 @@ async fn get_no_document() { .get_all_documents(GetAllDocumentsOptions::default()) .await; assert_eq!(code, 200); - assert!(response.as_array().unwrap().is_empty()); + assert!(response["results"].as_array().unwrap().is_empty()); } #[actix_rt::test] @@ -101,7 +135,7 @@ async fn get_all_documents_no_options() { .get_all_documents(GetAllDocumentsOptions::default()) .await; assert_eq!(code, 200); - let arr = response.as_array().unwrap(); + let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 20); let first = serde_json::json!({ "id":0, @@ -137,8 +171,11 @@ async fn test_get_all_documents_limit() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 5); - assert_eq!(response.as_array().unwrap()[0]["id"], 0); + assert_eq!(response["results"].as_array().unwrap().len(), 5); + assert_eq!(response["results"][0]["id"], json!(0)); + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["limit"], json!(5)); + assert_eq!(response["total"], json!(77)); } #[actix_rt::test] @@ -154,8 +191,11 @@ async fn test_get_all_documents_offset() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 20); - assert_eq!(response.as_array().unwrap()[0]["id"], 5); + assert_eq!(response["results"].as_array().unwrap().len(), 20); + assert_eq!(response["results"][0]["id"], json!(5)); + assert_eq!(response["offset"], json!(5)); + assert_eq!(response["limit"], json!(20)); + assert_eq!(response["total"], json!(77)); } #[actix_rt::test] @@ -171,20 +211,14 @@ async fn test_get_all_documents_attributes_to_retrieve() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 20); - assert_eq!( - response.as_array().unwrap()[0] - .as_object() - .unwrap() - .keys() - .count(), - 1 - ); - assert!(response.as_array().unwrap()[0] - .as_object() - .unwrap() - .get("name") - .is_some()); + assert_eq!(response["results"].as_array().unwrap().len(), 20); + for results in response["results"].as_array().unwrap() { + assert_eq!(results.as_object().unwrap().keys().count(), 1); + assert!(results["name"] != json!(null)); + } + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["limit"], json!(20)); + assert_eq!(response["total"], json!(77)); let (response, code) = index .get_all_documents(GetAllDocumentsOptions { @@ -193,15 +227,13 @@ async fn test_get_all_documents_attributes_to_retrieve() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 20); - assert_eq!( - response.as_array().unwrap()[0] - .as_object() - .unwrap() - .keys() - .count(), - 0 - ); + assert_eq!(response["results"].as_array().unwrap().len(), 20); + for results in response["results"].as_array().unwrap() { + assert_eq!(results.as_object().unwrap().keys().count(), 0); + } + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["limit"], json!(20)); + assert_eq!(response["total"], json!(77)); let (response, code) = index .get_all_documents(GetAllDocumentsOptions { @@ -210,15 +242,13 @@ async fn test_get_all_documents_attributes_to_retrieve() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 20); - assert_eq!( - response.as_array().unwrap()[0] - .as_object() - .unwrap() - .keys() - .count(), - 0 - ); + assert_eq!(response["results"].as_array().unwrap().len(), 20); + for results in response["results"].as_array().unwrap() { + assert_eq!(results.as_object().unwrap().keys().count(), 0); + } + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["limit"], json!(20)); + assert_eq!(response["total"], json!(77)); let (response, code) = index .get_all_documents(GetAllDocumentsOptions { @@ -227,15 +257,12 @@ async fn test_get_all_documents_attributes_to_retrieve() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 20); - assert_eq!( - response.as_array().unwrap()[0] - .as_object() - .unwrap() - .keys() - .count(), - 2 - ); + assert_eq!(response["results"].as_array().unwrap().len(), 20); + for results in response["results"].as_array().unwrap() { + assert_eq!(results.as_object().unwrap().keys().count(), 2); + assert!(results["name"] != json!(null)); + assert!(results["tags"] != json!(null)); + } let (response, code) = index .get_all_documents(GetAllDocumentsOptions { @@ -244,15 +271,10 @@ async fn test_get_all_documents_attributes_to_retrieve() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 20); - assert_eq!( - response.as_array().unwrap()[0] - .as_object() - .unwrap() - .keys() - .count(), - 16 - ); + assert_eq!(response["results"].as_array().unwrap().len(), 20); + for results in response["results"].as_array().unwrap() { + assert_eq!(results.as_object().unwrap().keys().count(), 16); + } let (response, code) = index .get_all_documents(GetAllDocumentsOptions { @@ -261,19 +283,14 @@ async fn test_get_all_documents_attributes_to_retrieve() { }) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 20); - assert_eq!( - response.as_array().unwrap()[0] - .as_object() - .unwrap() - .keys() - .count(), - 16 - ); + assert_eq!(response["results"].as_array().unwrap().len(), 20); + for results in response["results"].as_array().unwrap() { + assert_eq!(results.as_object().unwrap().keys().count(), 16); + } } #[actix_rt::test] -async fn get_documents_displayed_attributes() { +async fn get_documents_displayed_attributes_is_ignored() { let server = Server::new().await; let index = server.index("test"); index @@ -285,23 +302,19 @@ async fn get_documents_displayed_attributes() { .get_all_documents(GetAllDocumentsOptions::default()) .await; assert_eq!(code, 200); - assert_eq!(response.as_array().unwrap().len(), 20); + assert_eq!(response["results"].as_array().unwrap().len(), 20); assert_eq!( - response.as_array().unwrap()[0] - .as_object() - .unwrap() - .keys() - .count(), - 1 + response["results"][0].as_object().unwrap().keys().count(), + 16 ); - assert!(response.as_array().unwrap()[0] - .as_object() - .unwrap() - .get("gender") - .is_some()); + assert!(response["results"][0]["gender"] != json!(null)); + + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["limit"], json!(20)); + assert_eq!(response["total"], json!(77)); let (response, code) = index.get_document(0, None).await; assert_eq!(code, 200); - assert_eq!(response.as_object().unwrap().keys().count(), 1); + assert_eq!(response.as_object().unwrap().keys().count(), 16); assert!(response.as_object().unwrap().get("gender").is_some()); } diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 6d6e6494a..c24fbe19d 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -142,21 +142,21 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( document, - json!({ "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) + json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) ); let (document, code) = index.get_document(500, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({"title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) ); let (document, code) = index.get_document(10006, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({"title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) ); } @@ -211,21 +211,21 @@ async fn import_dump_v2_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( document, - json!({"name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "version": "0.15.2", "total_downloads": "7465"}) + json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"}) ); let (document, code) = index.get_document(191940, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({"name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "version": "1.1.0", "total_downloads": "9394"}) + json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"}) ); let (document, code) = index.get_document(159227, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({"name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "version": "0.1.0", "total_downloads": "1007"}) + json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"}) ); } @@ -341,21 +341,21 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( document, - json!({ "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) + json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) ); let (document, code) = index.get_document(500, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({"title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) ); let (document, code) = index.get_document(10006, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({"title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) ); } @@ -410,21 +410,21 @@ async fn import_dump_v3_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( document, - json!({"name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "version": "0.15.2", "total_downloads": "7465"}) + json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"}) ); let (document, code) = index.get_document(191940, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({"name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "version": "1.1.0", "total_downloads": "9394"}) + json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"}) ); let (document, code) = index.get_document(159227, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({"name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "version": "0.1.0", "total_downloads": "1007"}) + json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"}) ); } @@ -540,21 +540,21 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( document, - json!({ "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) + json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) ); let (document, code) = index.get_document(500, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({ "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) ); let (document, code) = index.get_document(10006, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({ "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) ); } @@ -609,20 +609,20 @@ async fn import_dump_v4_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( document, - json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "version": "0.15.2", "total_downloads": "7465"}) + json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"}) ); let (document, code) = index.get_document(191940, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "version": "1.1.0", "total_downloads": "9394"}) + json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"}) ); let (document, code) = index.get_document(159227, None).await; assert_eq!(code, 200); assert_eq!( document, - json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "version": "0.1.0", "total_downloads": "1007"}) + json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"}) ); } diff --git a/meilisearch-lib/src/index/index.rs b/meilisearch-lib/src/index/index.rs index f5122c8c1..bcf94bb0c 100644 --- a/meilisearch-lib/src/index/index.rs +++ b/meilisearch-lib/src/index/index.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeSet, HashSet}; +use std::collections::BTreeSet; use std::fs::create_dir_all; use std::marker::PhantomData; use std::ops::Deref; @@ -218,17 +218,17 @@ impl Index { }) } + /// Return the total number of documents contained in the index + the selected documents. pub fn retrieve_documents>( &self, offset: usize, limit: usize, attributes_to_retrieve: Option>, - ) -> Result>> { + ) -> Result<(u64, Vec)> { let txn = self.read_txn()?; let fields_ids_map = self.fields_ids_map(&txn)?; - let fields_to_display = - self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?; + let fields_to_display = self.fields_to_display(&attributes_to_retrieve, &fields_ids_map)?; let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit); @@ -240,20 +240,20 @@ impl Index { documents.push(object); } - Ok(documents) + let number_of_documents = self.number_of_documents(&txn)?; + + Ok((number_of_documents, documents)) } pub fn retrieve_document>( &self, doc_id: String, attributes_to_retrieve: Option>, - ) -> Result> { + ) -> Result { let txn = self.read_txn()?; let fields_ids_map = self.fields_ids_map(&txn)?; - - let fields_to_display = - self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?; + let fields_to_display = self.fields_to_display(&attributes_to_retrieve, &fields_ids_map)?; let internal_id = self .external_documents_ids(&txn)? @@ -278,25 +278,18 @@ impl Index { fn fields_to_display>( &self, - txn: &milli::heed::RoTxn, attributes_to_retrieve: &Option>, fields_ids_map: &milli::FieldsIdsMap, ) -> Result> { - let mut displayed_fields_ids = match self.displayed_fields_ids(txn)? { - Some(ids) => ids.into_iter().collect::>(), - None => fields_ids_map.iter().map(|(id, _)| id).collect(), - }; - let attributes_to_retrieve_ids = match attributes_to_retrieve { Some(attrs) => attrs .iter() .filter_map(|f| fields_ids_map.id(f.as_ref())) - .collect::>(), + .collect(), None => fields_ids_map.iter().map(|(id, _)| id).collect(), }; - displayed_fields_ids.retain(|fid| attributes_to_retrieve_ids.contains(fid)); - Ok(displayed_fields_ids) + Ok(attributes_to_retrieve_ids) } pub fn snapshot(&self, path: impl AsRef) -> Result<()> { diff --git a/meilisearch-lib/src/index/mod.rs b/meilisearch-lib/src/index/mod.rs index 3a42b2617..b46d97849 100644 --- a/meilisearch-lib/src/index/mod.rs +++ b/meilisearch-lib/src/index/mod.rs @@ -32,11 +32,11 @@ pub mod test { use milli::update::IndexerConfig; use milli::update::{DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsMethod}; use nelson::Mocker; - use serde_json::{Map, Value}; use uuid::Uuid; use super::error::Result; use super::index::Index; + use super::Document; use super::{Checked, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings}; use crate::update_file_store::UpdateFileStore; @@ -102,7 +102,7 @@ pub mod test { offset: usize, limit: usize, attributes_to_retrieve: Option>, - ) -> Result>> { + ) -> Result<(u64, Vec)> { match self { MockIndex::Real(index) => { index.retrieve_documents(offset, limit, attributes_to_retrieve) @@ -115,7 +115,7 @@ pub mod test { &self, doc_id: String, attributes_to_retrieve: Option>, - ) -> Result> { + ) -> Result { match self { MockIndex::Real(index) => index.retrieve_document(doc_id, attributes_to_retrieve), MockIndex::Mock(_) => todo!(), diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 7ec159684..1381a251f 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -524,18 +524,19 @@ where Ok(settings) } + /// Return the total number of documents contained in the index + the selected documents. pub async fn documents( &self, uid: String, offset: usize, limit: usize, attributes_to_retrieve: Option>, - ) -> Result> { + ) -> Result<(u64, Vec)> { let index = self.index_resolver.get_index(uid).await?; - let documents = + let result = spawn_blocking(move || index.retrieve_documents(offset, limit, attributes_to_retrieve)) .await??; - Ok(documents) + Ok(result) } pub async fn document( From 446f1f31e01056667a1135e7937f6f3e13cf6f88 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 30 May 2022 16:42:51 +0200 Subject: [PATCH 064/185] rename Succeded to Succeeded --- meilisearch-http/src/routes/tasks.rs | 2 +- meilisearch-http/src/task.rs | 2 +- meilisearch-http/tests/search/mod.rs | 1 - meilisearch-lib/src/dump/compat/v3.rs | 5 +- meilisearch-lib/src/dump/compat/v4.rs | 66 +++++++++++++++++++ meilisearch-lib/src/dump/loaders/v3.rs | 6 +- meilisearch-lib/src/dump/loaders/v4.rs | 31 ++++++++- meilisearch-lib/src/index_resolver/mod.rs | 4 +- .../src/tasks/handlers/dump_handler.rs | 2 +- meilisearch-lib/src/tasks/task.rs | 9 ++- 10 files changed, 111 insertions(+), 17 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 1fe903abf..5c6ef711e 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -79,7 +79,7 @@ fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool matches!((status, event), (TaskStatus::Enqueued, TaskEvent::Created(_)) | (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. }) - | (TaskStatus::Succeeded, TaskEvent::Succeded { .. }) + | (TaskStatus::Succeeded, TaskEvent::Succeeded { .. }) | (TaskStatus::Failed, TaskEvent::Failed { .. }), ) }) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index c7aaf0030..56eeabfc8 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -263,7 +263,7 @@ impl From for TaskView { TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None), TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None), TaskEvent::Processing(_) => (TaskStatus::Processing, None, None), - TaskEvent::Succeded { timestamp, result } => { + TaskEvent::Succeeded { timestamp, result } => { match (result, &mut details) { ( TaskResult::DocumentAddition { diff --git a/meilisearch-http/tests/search/mod.rs b/meilisearch-http/tests/search/mod.rs index c570e8b06..98893dac5 100644 --- a/meilisearch-http/tests/search/mod.rs +++ b/meilisearch-http/tests/search/mod.rs @@ -496,7 +496,6 @@ async fn search_facet_distribution() { |response, code| { assert_eq!(code, 200, "{}", response); let dist = response["facetDistribution"].as_object().unwrap(); - dbg!(&dist); assert_eq!(dist.len(), 3); assert_eq!( dist["doggos.name"], diff --git a/meilisearch-lib/src/dump/compat/v3.rs b/meilisearch-lib/src/dump/compat/v3.rs index befd70963..164b7153d 100644 --- a/meilisearch-lib/src/dump/compat/v3.rs +++ b/meilisearch-lib/src/dump/compat/v3.rs @@ -4,9 +4,10 @@ use serde::{Deserialize, Serialize}; use time::OffsetDateTime; use uuid::Uuid; +use super::v4::{Task, TaskEvent}; use crate::index::{Settings, Unchecked}; use crate::index_resolver::IndexUid; -use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult}; +use crate::tasks::task::{DocumentDeletion, TaskContent, TaskId, TaskResult}; use super::v2; @@ -187,7 +188,7 @@ impl From<(UpdateStatus, String, TaskId)> for Task { // Dummy task let mut task = Task { id: task_id, - index_uid: Some(IndexUid::new(uid).unwrap()), + index_uid: IndexUid::new(uid).unwrap(), content: TaskContent::IndexDeletion, events: Vec::new(), }; diff --git a/meilisearch-lib/src/dump/compat/v4.rs b/meilisearch-lib/src/dump/compat/v4.rs index 8b1378917..6fa0e582a 100644 --- a/meilisearch-lib/src/dump/compat/v4.rs +++ b/meilisearch-lib/src/dump/compat/v4.rs @@ -1 +1,67 @@ +use meilisearch_error::ResponseError; +use serde::{Deserialize, Serialize}; +use time::OffsetDateTime; +use crate::tasks::batch::BatchId; +use crate::tasks::task::{TaskContent, TaskEvent as NewTaskEvent, TaskId, TaskResult}; +use crate::IndexUid; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Task { + pub id: TaskId, + pub index_uid: IndexUid, + pub content: TaskContent, + pub events: Vec, +} + +impl From for crate::tasks::task::Task { + fn from(other: Task) -> Self { + Self { + id: other.id, + index_uid: Some(other.index_uid), + content: other.content, + events: other.events.into_iter().map(Into::into).collect(), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum TaskEvent { + Created(#[serde(with = "time::serde::rfc3339")] OffsetDateTime), + Batched { + #[serde(with = "time::serde::rfc3339")] + timestamp: OffsetDateTime, + batch_id: BatchId, + }, + Processing(#[serde(with = "time::serde::rfc3339")] OffsetDateTime), + Succeded { + result: TaskResult, + #[serde(with = "time::serde::rfc3339")] + timestamp: OffsetDateTime, + }, + Failed { + error: ResponseError, + #[serde(with = "time::serde::rfc3339")] + timestamp: OffsetDateTime, + }, +} + +impl From for NewTaskEvent { + fn from(other: TaskEvent) -> Self { + match other { + TaskEvent::Created(x) => NewTaskEvent::Created(x), + TaskEvent::Batched { + timestamp, + batch_id, + } => NewTaskEvent::Batched { + timestamp, + batch_id, + }, + TaskEvent::Processing(x) => NewTaskEvent::Processing(x), + TaskEvent::Succeded { result, timestamp } => { + NewTaskEvent::Succeeded { result, timestamp } + } + TaskEvent::Failed { error, timestamp } => NewTaskEvent::Failed { error, timestamp }, + } + } +} diff --git a/meilisearch-lib/src/dump/loaders/v3.rs b/meilisearch-lib/src/dump/loaders/v3.rs index 0a2ea438b..44984c946 100644 --- a/meilisearch-lib/src/dump/loaders/v3.rs +++ b/meilisearch-lib/src/dump/loaders/v3.rs @@ -9,11 +9,11 @@ use log::info; use tempfile::tempdir; use uuid::Uuid; -use crate::dump::compat::v3; +use crate::dump::compat::{self, v3}; use crate::dump::Metadata; use crate::index_resolver::meta_store::{DumpEntry, IndexMeta}; use crate::options::IndexerOpts; -use crate::tasks::task::{Task, TaskId}; +use crate::tasks::task::TaskId; /// dump structure for V3: /// . @@ -124,7 +124,7 @@ fn patch_updates( .clone(); serde_json::to_writer( &mut dst_file, - &Task::from((entry.update, name, task_id as TaskId)), + &compat::v4::Task::from((entry.update, name, task_id as TaskId)), )?; dst_file.write_all(b"\n")?; Ok(()) diff --git a/meilisearch-lib/src/dump/loaders/v4.rs b/meilisearch-lib/src/dump/loaders/v4.rs index 50fffeb9e..126300af8 100644 --- a/meilisearch-lib/src/dump/loaders/v4.rs +++ b/meilisearch-lib/src/dump/loaders/v4.rs @@ -1,12 +1,14 @@ -use std::fs; +use std::fs::{self, create_dir_all, File}; +use std::io::Write; use std::path::Path; use fs_extra::dir::{self, CopyOptions}; use log::info; use tempfile::tempdir; -use crate::dump::Metadata; +use crate::dump::{compat, Metadata}; use crate::options::IndexerOpts; +use crate::tasks::task::Task; pub fn load_dump( meta: Metadata, @@ -38,7 +40,7 @@ pub fn load_dump( )?; // Updates - dir::copy(src.as_ref().join("updates"), patched_dir.path(), &options)?; + patch_updates(&src, &patched_dir)?; // Keys if src.as_ref().join("keys").exists() { @@ -54,3 +56,26 @@ pub fn load_dump( indexing_options, ) } + +fn patch_updates(src: impl AsRef, dst: impl AsRef) -> anyhow::Result<()> { + let updates_path = src.as_ref().join("updates/data.jsonl"); + let output_updates_path = dst.as_ref().join("updates/data.jsonl"); + create_dir_all(output_updates_path.parent().unwrap())?; + let udpates_file = File::open(updates_path)?; + let mut output_update_file = File::create(output_updates_path)?; + + serde_json::Deserializer::from_reader(udpates_file) + .into_iter::() + .try_for_each(|task| -> anyhow::Result<()> { + let task: Task = task?.into(); + + serde_json::to_writer(&mut output_update_file, &task)?; + output_update_file.write_all(b"\n")?; + + Ok(()) + })?; + + output_update_file.flush()?; + + Ok(()) +} diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 1900061c7..33b480f61 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -200,7 +200,7 @@ where .await; let event = match result { - Ok(Ok(result)) => TaskEvent::Succeded { + Ok(Ok(result)) => TaskEvent::Succeeded { timestamp: OffsetDateTime::now_utc(), result: TaskResult::DocumentAddition { indexed_documents: result.indexed_documents, @@ -594,7 +594,7 @@ mod test { { assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Failed { .. }), "{:?}", result); } else { - assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Succeded { .. }), "{:?}", result); + assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Succeeded { .. }), "{:?}", result); } } }); diff --git a/meilisearch-lib/src/tasks/handlers/dump_handler.rs b/meilisearch-lib/src/tasks/handlers/dump_handler.rs index 715beafee..c708dadcc 100644 --- a/meilisearch-lib/src/tasks/handlers/dump_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/dump_handler.rs @@ -91,7 +91,7 @@ mod test { if accept { let batch = dump_handler.process_batch(batch).await; let last_event = batch.content.first().unwrap().events.last().unwrap(); - assert!(matches!(last_event, TaskEvent::Succeded { .. })); + assert!(matches!(last_event, TaskEvent::Succeeded { .. })); } }); diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index 0499d9702..97eb11467 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -48,7 +48,7 @@ pub enum TaskEvent { #[serde(with = "time::serde::rfc3339")] OffsetDateTime, ), - Succeded { + Succeeded { result: TaskResult, #[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] #[serde(with = "time::serde::rfc3339")] @@ -64,7 +64,7 @@ pub enum TaskEvent { impl TaskEvent { pub fn succeeded(result: TaskResult) -> Self { - Self::Succeded { + Self::Succeeded { result, timestamp: OffsetDateTime::now_utc(), } @@ -106,7 +106,10 @@ impl Task { /// A task is finished when its last state is either `Succeeded` or `Failed`. pub fn is_finished(&self) -> bool { self.events.last().map_or(false, |event| { - matches!(event, TaskEvent::Succeded { .. } | TaskEvent::Failed { .. }) + matches!( + event, + TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. } + ) }) } From 627f13df856cfadfb44cbcb990f5bccfae19cd91 Mon Sep 17 00:00:00 2001 From: Irevoire Date: Tue, 24 May 2022 11:29:03 +0200 Subject: [PATCH 065/185] feat(http): paginate the index resource Fix #2373 --- meilisearch-http/src/routes/indexes/mod.rs | 27 +++- meilisearch-http/tests/auth/authorization.rs | 8 +- meilisearch-http/tests/common/server.rs | 23 +++- meilisearch-http/tests/dumps/mod.rs | 83 ++++++------ meilisearch-http/tests/index/get_index.rs | 128 +++++++++++++++++-- meilisearch-http/tests/index/update_index.rs | 3 +- meilisearch-http/tests/snapshot/mod.rs | 2 +- meilisearch-lib/src/index_controller/mod.rs | 9 +- 8 files changed, 208 insertions(+), 75 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/mod.rs b/meilisearch-http/src/routes/indexes/mod.rs index 956761eb3..37f4ee7b8 100644 --- a/meilisearch-http/src/routes/indexes/mod.rs +++ b/meilisearch-http/src/routes/indexes/mod.rs @@ -37,19 +37,38 @@ pub fn configure(cfg: &mut web::ServiceConfig) { ); } +const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20; + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct Paginate { + #[serde(default)] + offset: usize, + #[serde(default = "PAGINATION_DEFAULT_LIMIT")] + limit: usize, +} + pub async fn list_indexes( data: GuardedData, MeiliSearch>, + paginate: web::Query, ) -> Result { let search_rules = &data.filters().search_rules; - let indexes: Vec<_> = data - .list_indexes() - .await? + let indexes: Vec<_> = data.list_indexes().await?; + let nb_indexes = indexes.len(); + let indexes: Vec<_> = indexes .into_iter() .filter(|i| search_rules.is_index_authorized(&i.uid)) + .skip(paginate.offset) + .take(paginate.limit) .collect(); debug!("returns: {:?}", indexes); - Ok(HttpResponse::Ok().json(indexes)) + Ok(HttpResponse::Ok().json(json!({ + "results": indexes, + "offset": paginate.offset, + "limit": paginate.limit, + "total": nb_indexes, + }))) } #[derive(Debug, Deserialize)] diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 56a1a13ca..fc18758ef 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -353,10 +353,10 @@ async fn list_authorized_indexes_restricted_index() { let key = response["key"].as_str().unwrap(); server.use_api_key(&key); - let (response, code) = server.list_indexes().await; + let (response, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - let response = response.as_array().unwrap(); + let response = response["results"].as_array().unwrap(); // key should have access on `products` index. assert!(response.iter().any(|index| index["uid"] == "products")); @@ -394,10 +394,10 @@ async fn list_authorized_indexes_no_index_restriction() { let key = response["key"].as_str().unwrap(); server.use_api_key(&key); - let (response, code) = server.list_indexes().await; + let (response, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - let response = response.as_array().unwrap(); + let response = response["results"].as_array().unwrap(); // key should have access on `products` index. assert!(response.iter().any(|index| index["uid"] == "products")); diff --git a/meilisearch-http/tests/common/server.rs b/meilisearch-http/tests/common/server.rs index 884036228..2dd235e8f 100644 --- a/meilisearch-http/tests/common/server.rs +++ b/meilisearch-http/tests/common/server.rs @@ -103,8 +103,27 @@ impl Server { } } - pub async fn list_indexes(&self) -> (Value, StatusCode) { - self.service.get("/indexes").await + pub async fn list_indexes( + &self, + offset: Option, + limit: Option, + ) -> (Value, StatusCode) { + let (offset, limit) = ( + offset.map(|offset| format!("offset={offset}")), + limit.map(|limit| format!("limit={limit}")), + ); + let query_parameter = offset + .as_ref() + .zip(limit.as_ref()) + .map(|(offset, limit)| format!("{offset}&{limit}")) + .or_else(|| offset.xor(limit)); + if let Some(query_parameter) = query_parameter { + self.service + .get(format!("/indexes?{query_parameter}")) + .await + } else { + self.service.get("/indexes").await + } } pub async fn version(&self) -> (Value, StatusCode) { diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 6d6e6494a..661032f98 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -41,13 +41,12 @@ async fn import_dump_v2_movie_raw() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("indexUID")); - assert_eq!(indexes[0]["name"], json!("indexUID")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("indexUID")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("indexUID"); @@ -106,13 +105,12 @@ async fn import_dump_v2_movie_with_settings() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("indexUID")); - assert_eq!(indexes[0]["name"], json!("indexUID")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("indexUID")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("indexUID"); @@ -171,13 +169,12 @@ async fn import_dump_v2_rubygems_with_settings() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("rubygems")); - assert_eq!(indexes[0]["name"], json!("rubygems")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("rubygems")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("rubygems"); @@ -240,13 +237,12 @@ async fn import_dump_v3_movie_raw() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("indexUID")); - assert_eq!(indexes[0]["name"], json!("indexUID")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("indexUID")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("indexUID"); @@ -305,13 +301,12 @@ async fn import_dump_v3_movie_with_settings() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("indexUID")); - assert_eq!(indexes[0]["name"], json!("indexUID")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("indexUID")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("indexUID"); @@ -336,7 +331,7 @@ async fn import_dump_v3_movie_with_settings() { json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) ); - // finally we're just going to check that we can still get a few documents by id + // finally we're just going to check that we can["results"] still get a few documents by id let (document, code) = index.get_document(100, None).await; assert_eq!(code, 200); assert_eq!( @@ -370,13 +365,12 @@ async fn import_dump_v3_rubygems_with_settings() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("rubygems")); - assert_eq!(indexes[0]["name"], json!("rubygems")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("rubygems")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("rubygems"); @@ -439,13 +433,12 @@ async fn import_dump_v4_movie_raw() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("indexUID")); - assert_eq!(indexes[0]["name"], json!("indexUID")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("indexUID")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("indexUID"); @@ -504,13 +497,12 @@ async fn import_dump_v4_movie_with_settings() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("indexUID")); - assert_eq!(indexes[0]["name"], json!("indexUID")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("indexUID")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("indexUID"); @@ -569,13 +561,12 @@ async fn import_dump_v4_rubygems_with_settings() { }; let server = Server::new_with_options(options).await.unwrap(); - let (indexes, code) = server.list_indexes().await; + let (indexes, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert_eq!(indexes.as_array().unwrap().len(), 1); - assert_eq!(indexes[0]["uid"], json!("rubygems")); - assert_eq!(indexes[0]["name"], json!("rubygems")); - assert_eq!(indexes[0]["primaryKey"], json!("id")); + assert_eq!(indexes["results"].as_array().unwrap().len(), 1); + assert_eq!(indexes["results"][0]["uid"], json!("rubygems")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); let index = server.index("rubygems"); diff --git a/meilisearch-http/tests/index/get_index.rs b/meilisearch-http/tests/index/get_index.rs index 924f603df..a8c036483 100644 --- a/meilisearch-http/tests/index/get_index.rs +++ b/meilisearch-http/tests/index/get_index.rs @@ -16,12 +16,11 @@ async fn create_and_get_index() { assert_eq!(code, 200); assert_eq!(response["uid"], "test"); - assert_eq!(response["name"], "test"); assert!(response.get("createdAt").is_some()); assert!(response.get("updatedAt").is_some()); assert_eq!(response["createdAt"], response["updatedAt"]); assert_eq!(response["primaryKey"], Value::Null); - assert_eq!(response.as_object().unwrap().len(), 5); + assert_eq!(response.as_object().unwrap().len(), 4); } #[actix_rt::test] @@ -45,10 +44,10 @@ async fn error_get_unexisting_index() { #[actix_rt::test] async fn no_index_return_empty_list() { let server = Server::new().await; - let (response, code) = server.list_indexes().await; + let (response, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert!(response.is_array()); - assert!(response.as_array().unwrap().is_empty()); + assert!(response["results"].is_array()); + assert!(response["results"].as_array().unwrap().is_empty()); } #[actix_rt::test] @@ -59,10 +58,10 @@ async fn list_multiple_indexes() { server.index("test").wait_task(1).await; - let (response, code) = server.list_indexes().await; + let (response, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); - assert!(response.is_array()); - let arr = response.as_array().unwrap(); + assert!(response["results"].is_array()); + let arr = response["results"].as_array().unwrap(); assert_eq!(arr.len(), 2); assert!(arr .iter() @@ -72,6 +71,119 @@ async fn list_multiple_indexes() { .any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key")); } +#[actix_rt::test] +async fn get_and_paginate_indexes() { + let server = Server::new().await; + const NB_INDEXES: usize = 50; + for i in 0..NB_INDEXES { + server.index(&format!("test_{i:02}")).create(None).await; + } + + server + .index(&format!("test_{NB_INDEXES}")) + .wait_task(NB_INDEXES as u64 - 1) + .await; + + // basic + let (response, code) = server.list_indexes(None, None).await; + assert_eq!(code, 200); + assert_eq!(response["limit"], json!(20)); + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["total"], json!(NB_INDEXES)); + assert!(response["results"].is_array()); + let arr = response["results"].as_array().unwrap(); + assert_eq!(arr.len(), 20); + // ensuring we get all the indexes in the alphabetical order + assert!((0..20) + .map(|idx| format!("test_{idx:02}")) + .zip(arr) + .all(|(expected, entry)| entry["uid"] == expected)); + + // with an offset + let (response, code) = server.list_indexes(Some(15), None).await; + assert_eq!(code, 200); + assert_eq!(response["limit"], json!(20)); + assert_eq!(response["offset"], json!(15)); + assert_eq!(response["total"], json!(NB_INDEXES)); + assert!(response["results"].is_array()); + let arr = response["results"].as_array().unwrap(); + assert_eq!(arr.len(), 20); + assert!((15..35) + .map(|idx| format!("test_{idx:02}")) + .zip(arr) + .all(|(expected, entry)| entry["uid"] == expected)); + + // with an offset and not enough elements + let (response, code) = server.list_indexes(Some(45), None).await; + assert_eq!(code, 200); + assert_eq!(response["limit"], json!(20)); + assert_eq!(response["offset"], json!(45)); + assert_eq!(response["total"], json!(NB_INDEXES)); + assert!(response["results"].is_array()); + let arr = response["results"].as_array().unwrap(); + assert_eq!(arr.len(), 5); + assert!((45..50) + .map(|idx| format!("test_{idx:02}")) + .zip(arr) + .all(|(expected, entry)| entry["uid"] == expected)); + + // with a limit lower than the default + let (response, code) = server.list_indexes(None, Some(5)).await; + assert_eq!(code, 200); + assert_eq!(response["limit"], json!(5)); + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["total"], json!(NB_INDEXES)); + assert!(response["results"].is_array()); + let arr = response["results"].as_array().unwrap(); + assert_eq!(arr.len(), 5); + assert!((0..5) + .map(|idx| format!("test_{idx:02}")) + .zip(arr) + .all(|(expected, entry)| entry["uid"] == expected)); + + // with a limit higher than the default + let (response, code) = server.list_indexes(None, Some(40)).await; + assert_eq!(code, 200); + assert_eq!(response["limit"], json!(40)); + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["total"], json!(NB_INDEXES)); + assert!(response["results"].is_array()); + let arr = response["results"].as_array().unwrap(); + assert_eq!(arr.len(), 40); + assert!((0..40) + .map(|idx| format!("test_{idx:02}")) + .zip(arr) + .all(|(expected, entry)| entry["uid"] == expected)); + + // with a limit higher than the default + let (response, code) = server.list_indexes(None, Some(80)).await; + assert_eq!(code, 200); + assert_eq!(response["limit"], json!(80)); + assert_eq!(response["offset"], json!(0)); + assert_eq!(response["total"], json!(NB_INDEXES)); + assert!(response["results"].is_array()); + let arr = response["results"].as_array().unwrap(); + assert_eq!(arr.len(), 50); + assert!((0..50) + .map(|idx| format!("test_{idx:02}")) + .zip(arr) + .all(|(expected, entry)| entry["uid"] == expected)); + + // with a limit and an offset + let (response, code) = server.list_indexes(Some(20), Some(10)).await; + assert_eq!(code, 200); + assert_eq!(response["limit"], json!(10)); + assert_eq!(response["offset"], json!(20)); + assert_eq!(response["total"], json!(NB_INDEXES)); + assert!(response["results"].is_array()); + let arr = response["results"].as_array().unwrap(); + assert_eq!(arr.len(), 10); + assert!((20..30) + .map(|idx| format!("test_{idx:02}")) + .zip(arr) + .all(|(expected, entry)| entry["uid"] == expected)); +} + #[actix_rt::test] async fn get_invalid_index_uid() { let server = Server::new().await; diff --git a/meilisearch-http/tests/index/update_index.rs b/meilisearch-http/tests/index/update_index.rs index 1896f731f..48fde5608 100644 --- a/meilisearch-http/tests/index/update_index.rs +++ b/meilisearch-http/tests/index/update_index.rs @@ -21,7 +21,6 @@ async fn update_primary_key() { assert_eq!(code, 200); assert_eq!(response["uid"], "test"); - assert_eq!(response["name"], "test"); assert!(response.get("createdAt").is_some()); assert!(response.get("updatedAt").is_some()); @@ -32,7 +31,7 @@ async fn update_primary_key() { assert!(created_at < updated_at); assert_eq!(response["primaryKey"], "primary"); - assert_eq!(response.as_object().unwrap().len(), 5); + assert_eq!(response.as_object().unwrap().len(), 4); } #[actix_rt::test] diff --git a/meilisearch-http/tests/snapshot/mod.rs b/meilisearch-http/tests/snapshot/mod.rs index a0645733e..27ff838e1 100644 --- a/meilisearch-http/tests/snapshot/mod.rs +++ b/meilisearch-http/tests/snapshot/mod.rs @@ -70,7 +70,7 @@ async fn perform_snapshot() { let snapshot_server = Server::new_with_options(options).await.unwrap(); verify_snapshot!(server, snapshot_server, |server| => - server.list_indexes(), + server.list_indexes(None, None), // for some reason the db sizes differ. this may be due to the compaction options we have // set when performing the snapshot //server.stats(), diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 7ec159684..05801ad29 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -62,7 +62,6 @@ pub struct IndexMetadata { #[serde(skip)] pub uuid: Uuid, pub uid: String, - name: String, #[serde(flatten)] pub meta: IndexMeta, } @@ -508,7 +507,6 @@ where let meta = index.meta()?; let meta = IndexMetadata { uuid: index.uuid(), - name: uid.clone(), uid, meta, }; @@ -561,12 +559,7 @@ where let index = self.index_resolver.get_index(uid.clone()).await?; let uuid = index.uuid(); let meta = spawn_blocking(move || index.meta()).await??; - let meta = IndexMetadata { - uuid, - name: uid.clone(), - uid, - meta, - }; + let meta = IndexMetadata { uuid, uid, meta }; Ok(meta) } From 39db6ea42bd130b012a63334dd3fff01ae1c611a Mon Sep 17 00:00:00 2001 From: Ryan Russell Date: Tue, 31 May 2022 14:21:34 -0500 Subject: [PATCH 066/185] docs(security): Fix `Supported` Signed-off-by: Ryan Russell --- SECURITY.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SECURITY.md b/SECURITY.md index 63bc15a40..5de4af60d 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -4,7 +4,7 @@ Meilisearch takes the security of our software products and services seriously. If you believe you have found a security vulnerability in any Meilisearch-owned repository, please report it to us as described below. -## Suported versions +## Supported versions As long as we are pre-v1.0, only the latest version of Meilisearch will be supported with security updates. From 9d5cc88cd5d99ec937478e7b0182bee7a8f398ef Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 31 May 2022 11:56:51 +0200 Subject: [PATCH 067/185] Implement the seek-based tasks list pagination --- meilisearch-http/src/routes/tasks.rs | 33 +++++++++++++++++++++++----- meilisearch-http/src/task.rs | 12 ++++------ 2 files changed, 32 insertions(+), 13 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index ca4824517..821142399 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -26,6 +26,8 @@ pub struct TaskFilterQuery { type_: Option>>, status: Option>>, index_uid: Option>>, + limit: Option, // TODO must not return an error when deser fail + after: Option, // TODO must not return an error when deser fail } #[rustfmt::skip] @@ -68,11 +70,13 @@ async fn get_tasks( type_, status, index_uid, + limit, + after, } = params.into_inner(); let search_rules = &meilisearch.filters().search_rules; - // We first tranform a potential indexUid=* into a "not specified indexUid filter" + // We first transform a potential indexUid=* into a "not specified indexUid filter" // for every one of the filters: type, status, and indexUid. let type_ = type_.map(CS::into_inner).and_then(fold_star_or); let status = status.map(CS::into_inner).and_then(fold_star_or); @@ -128,13 +132,32 @@ async fn get_tasks( indexes_filters }; - let tasks: TaskListView = meilisearch - .list_tasks(filters, None, None) + // We +1 just to know if there is more after this "page" or not. + let limit = limit.unwrap_or(DEFAULT_LIMIT).saturating_add(1); + // We -1 here because we need an offset and we must exclude the `after` one. + let offset = after.map(|n| n.saturating_sub(1)); + + let mut tasks_results = meilisearch + .list_tasks(filters, Some(limit), offset) .await? .into_iter() .map(TaskView::from) - .collect::>() - .into(); + .collect::>(); + + // If we were able to fetch the number +1 tasks we asked + // it means that there is more to come. + let after = if tasks_results.len() == limit { + tasks_results.pop(); + tasks_results.last().map(|t| t.uid) + } else { + None + }; + + let tasks = TaskListView { + results: tasks_results, + limit: limit.saturating_sub(1), + after, + }; Ok(HttpResponse::Ok().json(tasks)) } diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 56eeabfc8..8eec71a4e 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -180,7 +180,7 @@ fn serialize_duration( #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub struct TaskView { - uid: TaskId, + pub uid: TaskId, index_uid: Option, status: TaskStatus, #[serde(rename = "type")] @@ -369,13 +369,9 @@ impl From for TaskView { #[derive(Debug, Serialize)] pub struct TaskListView { - results: Vec, -} - -impl From> for TaskListView { - fn from(results: Vec) -> Self { - Self { results } - } + pub results: Vec, + pub limit: usize, + pub after: Option, } #[derive(Debug, Serialize)] From 004c8b6be36780954ad2e2cc541e852e4a8c9e13 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 31 May 2022 16:58:08 +0200 Subject: [PATCH 068/185] Add the new limit and after fields in the dump tests --- meilisearch-http/tests/dumps/mod.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 426e4f941..2f8938d28 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -68,7 +68,7 @@ async fn import_dump_v2_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "after": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -132,7 +132,7 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "after": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -264,7 +264,7 @@ async fn import_dump_v3_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "after": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -328,7 +328,7 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "after": null }) ); // finally we're just going to check that we can["results"] still get a few documents by id @@ -460,7 +460,7 @@ async fn import_dump_v4_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "after": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -524,7 +524,7 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "after": null }) ); // finally we're just going to check that we can still get a few documents by id From 461b91fd13a5575d574fb3612a74ba948d993cb3 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 1 Jun 2022 10:20:33 +0200 Subject: [PATCH 069/185] Introduce the fetch_unfinished_tasks function to fetch tasks --- meilisearch-lib/src/tasks/scheduler.rs | 8 +--- meilisearch-lib/src/tasks/task_store/mod.rs | 18 ++++++++ meilisearch-lib/src/tasks/task_store/store.rs | 43 ++++++++++++++++--- 3 files changed, 56 insertions(+), 13 deletions(-) diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 19265a911..dddb6dff9 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -342,14 +342,8 @@ impl Scheduler { } async fn fetch_pending_tasks(&mut self) -> Result<()> { - // We must NEVER re-enqueue an already processed task! It's content uuid would point to an unexisting file. - // - // TODO(marin): This may create some latency when the first batch lazy loads the pending updates. - let mut filter = TaskFilter::default(); - filter.filter_fn(|task| !task.is_finished()); - self.store - .list_tasks(Some(self.next_fetched_task_id), Some(filter), None) + .fetch_unfinished_tasks(Some(self.next_fetched_task_id)) .await? .into_iter() // The tasks arrive in reverse order, and we need to insert them in order. diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index 3645717e6..835f378e5 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -186,6 +186,17 @@ impl TaskStore { Ok(tasks) } + pub async fn fetch_unfinished_tasks(&self, offset: Option) -> Result> { + let store = self.store.clone(); + + tokio::task::spawn_blocking(move || { + let txn = store.rtxn()?; + let tasks = store.fetch_unfinished_tasks(&txn, offset)?; + Ok(tasks) + }) + .await? + } + pub async fn list_tasks( &self, offset: Option, @@ -325,6 +336,13 @@ pub mod test { } } + pub async fn fetch_unfinished_tasks(&self, from: Option) -> Result> { + match self { + Self::Real(s) => s.fetch_unfinished_tasks(from).await, + Self::Mock(m) => unsafe { m.get("fetch_unfinished_tasks").call(from) }, + } + } + pub async fn list_tasks( &self, from: Option, diff --git a/meilisearch-lib/src/tasks/task_store/store.rs b/meilisearch-lib/src/tasks/task_store/store.rs index 75ece0ae8..a109935ab 100644 --- a/meilisearch-lib/src/tasks/task_store/store.rs +++ b/meilisearch-lib/src/tasks/task_store/store.rs @@ -121,9 +121,27 @@ impl Store { Ok(task) } - pub fn list_tasks<'a>( + /// Returns the unfinished tasks starting from the given taskId in ascending order. + pub fn fetch_unfinished_tasks(&self, txn: &RoTxn, from: Option) -> Result> { + // We must NEVER re-enqueue an already processed task! It's content uuid would point to an unexisting file. + // + // TODO(marin): This may create some latency when the first batch lazy loads the pending updates. + let from = from.unwrap_or_default(); + + let result: StdResult, milli::heed::Error> = self + .tasks + .range(txn, &(BEU64::new(from)..))? + .map(|r| r.map(|(_, t)| t)) + .filter(|result| result.as_ref().map_or(true, |t| !t.is_finished())) + .collect(); + + result.map_err(Into::into) + } + + /// Returns all the tasks starting from the given taskId and going in descending order. + pub fn list_tasks( &self, - txn: &'a RoTxn, + txn: &RoTxn, from: Option, filter: Option, limit: Option, @@ -132,6 +150,7 @@ impl Store { let range = from..limit .map(|limit| (limit as u64).saturating_add(from)) .unwrap_or(u64::MAX); + let iter: Box>> = match filter { Some( ref filter @ TaskFilter { @@ -152,7 +171,7 @@ impl Store { ), }; - let apply_fitler = |task: &StdResult<_, milli::heed::Error>| match task { + let apply_filter = |task: &StdResult<_, milli::heed::Error>| match task { Ok(ref t) => filter .as_ref() .and_then(|filter| filter.filter_fn.as_ref()) @@ -160,9 +179,10 @@ impl Store { .unwrap_or(true), Err(_) => true, }; + // Collect 'limit' task if it exists or all of them. let tasks = iter - .filter(apply_fitler) + .filter(apply_filter) .take(limit.unwrap_or(usize::MAX)) .try_fold::<_, _, StdResult<_, milli::heed::Error>>(Vec::new(), |mut v, task| { v.push(task?); @@ -305,9 +325,20 @@ pub mod test { } } - pub fn list_tasks<'a>( + pub fn fetch_unfinished_tasks( &self, - txn: &'a RoTxn, + txn: &RoTxn, + from: Option, + ) -> Result> { + match self { + MockStore::Real(index) => index.fetch_unfinished_tasks(txn, from), + MockStore::Fake(_) => todo!(), + } + } + + pub fn list_tasks( + &self, + txn: &RoTxn, from: Option, filter: Option, limit: Option, From 497052554177f7eac71e2e6d75f5d3d2c4e55919 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Wed, 1 Jun 2022 12:29:36 +0200 Subject: [PATCH 070/185] Update README.md Co-authored-by: Tamo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d009be3fe..d882ba0de 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ meilisearch #### Docker ```bash -docker run -p 7700:7700 -v "$(pwd)/data.ms:/meili_data/data.ms" getmeili/meilisearch +docker run -p 7700:7700 -v "$(pwd)/meili_data:/meili_data" getmeili/meilisearch ``` #### Announcing a cloud-hosted Meilisearch From c11d21879ac843c3ed705e778ac8bfa52cc61100 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 1 Jun 2022 12:04:01 +0200 Subject: [PATCH 071/185] Introduce tasks limit and after to the tasks route --- Cargo.lock | 1 + meilisearch-http/src/routes/tasks.rs | 25 +- meilisearch-lib/Cargo.toml | 1 + meilisearch-lib/src/tasks/batch.rs | 2 +- meilisearch-lib/src/tasks/task.rs | 2 +- meilisearch-lib/src/tasks/task_store/mod.rs | 6 +- meilisearch-lib/src/tasks/task_store/store.rs | 225 +++++------------- 7 files changed, 95 insertions(+), 167 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 39eb78987..bbf557c2e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2114,6 +2114,7 @@ dependencies = [ "rayon", "regex", "reqwest", + "roaring", "rustls", "serde", "serde_json", diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 821142399..a82ca835f 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -14,6 +14,8 @@ use crate::task::{TaskListView, TaskStatus, TaskType, TaskView}; use super::{fold_star_or, StarOr}; +const DEFAULT_LIMIT: fn() -> usize = || 20; + pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks)))) .service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task)))); @@ -26,8 +28,9 @@ pub struct TaskFilterQuery { type_: Option>>, status: Option>>, index_uid: Option>>, - limit: Option, // TODO must not return an error when deser fail - after: Option, // TODO must not return an error when deser fail + #[serde(default = "DEFAULT_LIMIT")] + limit: usize, + after: Option, } #[rustfmt::skip] @@ -132,10 +135,22 @@ async fn get_tasks( indexes_filters }; + let offset = match after { + Some(0) => { + let tasks = TaskListView { + results: vec![], + limit, + after: None, + }; + return Ok(HttpResponse::Ok().json(tasks)); + } + // We -1 here because we need an offset and we must exclude the `after` one. + Some(n) => Some(n - 1), + None => None, + }; + // We +1 just to know if there is more after this "page" or not. - let limit = limit.unwrap_or(DEFAULT_LIMIT).saturating_add(1); - // We -1 here because we need an offset and we must exclude the `after` one. - let offset = after.map(|n| n.saturating_sub(1)); + let limit = limit.saturating_add(1); let mut tasks_results = meilisearch .list_tasks(filters, Some(limit), offset) diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 85ae49f64..020862cea 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -41,6 +41,7 @@ rand = "0.8.5" rayon = "1.5.1" regex = "1.5.5" reqwest = { version = "0.11.9", features = ["json", "rustls-tls"], default-features = false, optional = true } +roaring = "0.9.0" rustls = "0.20.4" serde = { version = "1.0.136", features = ["derive"] } serde_json = { version = "1.0.79", features = ["preserve_order"] } diff --git a/meilisearch-lib/src/tasks/batch.rs b/meilisearch-lib/src/tasks/batch.rs index d5116f750..7173ecd33 100644 --- a/meilisearch-lib/src/tasks/batch.rs +++ b/meilisearch-lib/src/tasks/batch.rs @@ -4,7 +4,7 @@ use crate::snapshot::SnapshotJob; use super::task::{Task, TaskEvent}; -pub type BatchId = u64; +pub type BatchId = u32; #[derive(Debug)] pub enum BatchContent { diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index 97eb11467..3b94cd991 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -10,7 +10,7 @@ use crate::{ index_resolver::IndexUid, }; -pub type TaskId = u64; +pub type TaskId = u32; #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[cfg_attr(test, derive(proptest_derive::Arbitrary))] diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index 835f378e5..6c7584683 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -41,6 +41,10 @@ impl TaskFilter { } } + fn filtered_indexes(&self) -> Option<&HashSet> { + self.indexes.as_ref() + } + /// Adds an index to the filter, so the filter must match this index. pub fn filter_index(&mut self, index: String) { self.indexes @@ -396,7 +400,7 @@ pub mod test { let mut runner = TestRunner::new(Config::default()); runner - .run(&(0..100u64).prop_map(gen_task), |task| { + .run(&(0..100u32).prop_map(gen_task), |task| { let mut txn = store.wtxn().unwrap(); let previous_id = store.next_task_id(&mut txn).unwrap(); diff --git a/meilisearch-lib/src/tasks/task_store/store.rs b/meilisearch-lib/src/tasks/task_store/store.rs index a109935ab..5b17da716 100644 --- a/meilisearch-lib/src/tasks/task_store/store.rs +++ b/meilisearch-lib/src/tasks/task_store/store.rs @@ -1,62 +1,30 @@ #[allow(clippy::upper_case_acronyms)] -type BEU64 = milli::heed::zerocopy::U64; -const UID_TASK_IDS: &str = "uid_task_id"; +type BEU32 = milli::heed::zerocopy::U32; + +const INDEX_UIDS_TASK_IDS: &str = "index-uids-task-ids"; const TASKS: &str = "tasks"; -use std::borrow::Cow; -use std::collections::BinaryHeap; -use std::convert::TryInto; -use std::mem::size_of; -use std::ops::Range; +use std::collections::HashSet; +use std::ops::Bound::{Excluded, Unbounded}; use std::result::Result as StdResult; use std::sync::Arc; -use milli::heed::types::{ByteSlice, OwnedType, SerdeJson, Unit}; -use milli::heed::{BytesDecode, BytesEncode, Database, Env, RoTxn, RwTxn}; +use milli::heed::types::{OwnedType, SerdeJson, Str}; +use milli::heed::{Database, Env, RoTxn, RwTxn}; +use milli::heed_codec::RoaringBitmapCodec; +use roaring::RoaringBitmap; use crate::tasks::task::{Task, TaskId}; use super::super::Result; - use super::TaskFilter; -enum IndexUidTaskIdCodec {} - -impl<'a> BytesEncode<'a> for IndexUidTaskIdCodec { - type EItem = (&'a str, TaskId); - - fn bytes_encode((s, id): &'a Self::EItem) -> Option> { - let size = s.len() + std::mem::size_of::() + 1; - if size > 512 { - return None; - } - let mut b = Vec::with_capacity(size); - b.extend_from_slice(s.as_bytes()); - // null terminate the string - b.push(0); - b.extend_from_slice(&id.to_be_bytes()); - Some(Cow::Owned(b)) - } -} - -impl<'a> BytesDecode<'a> for IndexUidTaskIdCodec { - type DItem = (&'a str, TaskId); - - fn bytes_decode(bytes: &'a [u8]) -> Option { - let len = bytes.len(); - let s_end = len.checked_sub(size_of::())?.checked_sub(1)?; - let str_bytes = &bytes[..s_end]; - let str = std::str::from_utf8(str_bytes).ok()?; - let id = TaskId::from_be_bytes(bytes[(len - size_of::())..].try_into().ok()?); - Some((str, id)) - } -} - pub struct Store { env: Arc, - uids_task_ids: Database, - tasks: Database, SerdeJson>, + /// Maps an index uid to the set of tasks ids associated to it. + index_uid_task_ids: Database, + tasks: Database, SerdeJson>, } impl Drop for Store { @@ -74,12 +42,12 @@ impl Store { /// You want to patch all un-finished tasks and put them in your pending /// queue with the `reset_and_return_unfinished_update` method. pub fn new(env: Arc) -> Result { - let uids_task_ids = env.create_database(Some(UID_TASK_IDS))?; + let index_uid_task_ids = env.create_database(Some(INDEX_UIDS_TASK_IDS))?; let tasks = env.create_database(Some(TASKS))?; Ok(Self { env, - uids_task_ids, + index_uid_task_ids, tasks, }) } @@ -107,17 +75,24 @@ impl Store { } pub fn put(&self, txn: &mut RwTxn, task: &Task) -> Result<()> { - self.tasks.put(txn, &BEU64::new(task.id), task)?; + self.tasks.put(txn, &BEU32::new(task.id), task)?; // only add the task to the indexes index if it has an index_uid - if let Some(ref index_uid) = task.index_uid { - self.uids_task_ids.put(txn, &(index_uid, task.id), &())?; + if let Some(index_uid) = &task.index_uid { + let mut tasks_set = self + .index_uid_task_ids + .get(txn, index_uid)? + .unwrap_or_default(); + + tasks_set.insert(task.id); + + self.index_uid_task_ids.put(txn, index_uid, &tasks_set)?; } Ok(()) } pub fn get(&self, txn: &RoTxn, id: TaskId) -> Result> { - let task = self.tasks.get(txn, &BEU64::new(id))?; + let task = self.tasks.get(txn, &BEU32::new(id))?; Ok(task) } @@ -130,7 +105,7 @@ impl Store { let result: StdResult, milli::heed::Error> = self .tasks - .range(txn, &(BEU64::new(from)..))? + .range(txn, &(BEU32::new(from)..))? .map(|r| r.map(|(_, t)| t)) .filter(|result| result.as_ref().map_or(true, |t| !t.is_finished())) .collect(); @@ -146,102 +121,58 @@ impl Store { filter: Option, limit: Option, ) -> Result> { - let from = from.unwrap_or_default(); - let range = from..limit - .map(|limit| (limit as u64).saturating_add(from)) - .unwrap_or(u64::MAX); - - let iter: Box>> = match filter { - Some( - ref filter @ TaskFilter { - indexes: Some(_), .. - }, - ) => { - let iter = self - .compute_candidates(txn, filter, range)? - .into_iter() - .filter_map(|id| self.tasks.get(txn, &BEU64::new(id)).transpose()); - - Box::new(iter) - } - _ => Box::new( - self.tasks - .rev_range(txn, &(BEU64::new(range.start)..BEU64::new(range.end)))? - .map(|r| r.map(|(_, t)| t)), - ), + let from = match from { + Some(from) => from, + None => self.tasks.last(txn)?.map_or(0, |(id, _)| id.get()), }; - let apply_filter = |task: &StdResult<_, milli::heed::Error>| match task { - Ok(ref t) => filter + let filter_fn = |task: &Task| { + filter .as_ref() - .and_then(|filter| filter.filter_fn.as_ref()) - .map(|f| f(t)) - .unwrap_or(true), - Err(_) => true, + .and_then(|f| f.filter_fn.as_ref()) + .map_or(true, |f| f(task)) }; - // Collect 'limit' task if it exists or all of them. - let tasks = iter - .filter(apply_filter) - .take(limit.unwrap_or(usize::MAX)) - .try_fold::<_, _, StdResult<_, milli::heed::Error>>(Vec::new(), |mut v, task| { - v.push(task?); - Ok(v) - })?; + let result: Result> = match filter.as_ref().and_then(|f| f.filtered_indexes()) { + Some(indexes) => self + .compute_candidates(txn, indexes, from)? + .filter(|result| result.as_ref().map_or(true, filter_fn)) + .take(limit.unwrap_or(usize::MAX)) + .collect(), + None => self + .tasks + .rev_range(txn, &(..=BEU32::new(from)))? + .map(|r| r.map(|(_, t)| t).map_err(Into::into)) + .filter(|result| result.as_ref().map_or(true, filter_fn)) + .take(limit.unwrap_or(usize::MAX)) + .collect(), + }; - Ok(tasks) + result.map_err(Into::into) } - fn compute_candidates( - &self, - txn: &milli::heed::RoTxn, - filter: &TaskFilter, - range: Range, - ) -> Result> { - let mut candidates = BinaryHeap::new(); - if let Some(ref indexes) = filter.indexes { - for index in indexes { - // We need to prefix search the null terminated string to make sure that we only - // get exact matches for the index, and not other uids that would share the same - // prefix, i.e test and test1. - let mut index_uid = index.as_bytes().to_vec(); - index_uid.push(0); + fn compute_candidates<'a>( + &'a self, + txn: &'a RoTxn, + indexes: &HashSet, + from: TaskId, + ) -> Result> + 'a> { + let mut candidates = RoaringBitmap::new(); - self.uids_task_ids - .remap_key_type::() - .rev_prefix_iter(txn, &index_uid)? - .map(|entry| -> StdResult<_, milli::heed::Error> { - let (key, _) = entry?; - let (_, id) = IndexUidTaskIdCodec::bytes_decode(key) - .ok_or(milli::heed::Error::Decoding)?; - Ok(id) - }) - .skip_while(|entry| { - entry - .as_ref() - .ok() - // we skip all elements till we enter in the range - .map(|key| !range.contains(key)) - // if we encounter an error we returns true to collect it later - .unwrap_or(true) - }) - .take_while(|entry| { - entry - .as_ref() - .ok() - // as soon as we are out of the range we exit - .map(|key| range.contains(key)) - // if we encounter an error we returns true to collect it later - .unwrap_or(true) - }) - .try_for_each::<_, StdResult<(), milli::heed::Error>>(|id| { - candidates.push(id?); - Ok(()) - })?; + for index_uid in indexes { + if let Some(tasks_set) = self.index_uid_task_ids.get(txn, index_uid)? { + candidates |= tasks_set; } } - Ok(candidates) + candidates.remove_range((Excluded(from), Unbounded)); + + let iter = candidates + .into_iter() + .rev() + .filter_map(|id| self.get(txn, id).transpose()); + + Ok(iter) } } @@ -250,8 +181,6 @@ pub mod test { use itertools::Itertools; use milli::heed::EnvOpenOptions; use nelson::Mocker; - use proptest::collection::vec; - use proptest::prelude::*; use tempfile::TempDir; use crate::index_resolver::IndexUid; @@ -460,26 +389,4 @@ pub mod test { "test" ); } - - proptest! { - #[test] - fn encode_decode_roundtrip(index_uid in any::(), task_id in 0..TaskId::MAX) { - let value = (index_uid.as_ref(), task_id); - let bytes = IndexUidTaskIdCodec::bytes_encode(&value).unwrap(); - let (index, id) = IndexUidTaskIdCodec::bytes_decode(bytes.as_ref()).unwrap(); - assert_eq!(&*index_uid, index); - assert_eq!(task_id, id); - } - - #[test] - fn encode_doesnt_crash(index_uid in "\\PC*", task_id in 0..TaskId::MAX) { - let value = (index_uid.as_ref(), task_id); - IndexUidTaskIdCodec::bytes_encode(&value); - } - - #[test] - fn decode_doesnt_crash(bytes in vec(any::(), 0..1000)) { - IndexUidTaskIdCodec::bytes_decode(&bytes); - } - } } From d80e8b64afcca8a7cba6f7ce86c6758064fabdd4 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 1 Jun 2022 15:30:39 +0200 Subject: [PATCH 072/185] Align the tasks route API to the new spec --- meilisearch-http/src/routes/tasks.rs | 30 +++++++++------------------- meilisearch-http/src/task.rs | 3 ++- 2 files changed, 11 insertions(+), 22 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index a82ca835f..49554858d 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -30,7 +30,7 @@ pub struct TaskFilterQuery { index_uid: Option>>, #[serde(default = "DEFAULT_LIMIT")] limit: usize, - after: Option, + from: Option, } #[rustfmt::skip] @@ -74,7 +74,7 @@ async fn get_tasks( status, index_uid, limit, - after, + from, } = params.into_inner(); let search_rules = &meilisearch.filters().search_rules; @@ -135,25 +135,11 @@ async fn get_tasks( indexes_filters }; - let offset = match after { - Some(0) => { - let tasks = TaskListView { - results: vec![], - limit, - after: None, - }; - return Ok(HttpResponse::Ok().json(tasks)); - } - // We -1 here because we need an offset and we must exclude the `after` one. - Some(n) => Some(n - 1), - None => None, - }; - // We +1 just to know if there is more after this "page" or not. let limit = limit.saturating_add(1); let mut tasks_results = meilisearch - .list_tasks(filters, Some(limit), offset) + .list_tasks(filters, Some(limit), from) .await? .into_iter() .map(TaskView::from) @@ -161,17 +147,19 @@ async fn get_tasks( // If we were able to fetch the number +1 tasks we asked // it means that there is more to come. - let after = if tasks_results.len() == limit { - tasks_results.pop(); - tasks_results.last().map(|t| t.uid) + let next = if tasks_results.len() == limit { + tasks_results.pop().map(|t| t.uid) } else { None }; + let from = tasks_results.first().map(|t| t.uid); + let tasks = TaskListView { results: tasks_results, limit: limit.saturating_sub(1), - after, + from, + next, }; Ok(HttpResponse::Ok().json(tasks)) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 8eec71a4e..f8ba941d8 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -371,7 +371,8 @@ impl From for TaskView { pub struct TaskListView { pub results: Vec, pub limit: usize, - pub after: Option, + pub from: Option, + pub next: Option, } #[derive(Debug, Serialize)] From e2c204cf86e6b71d9ccb9c512558707acfb94bc4 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 23 May 2022 17:03:28 +0200 Subject: [PATCH 073/185] Update tests to fit to the new requirements --- meilisearch-http/tests/auth/api_keys.rs | 513 +++++++++++++------ meilisearch-http/tests/auth/authorization.rs | 272 ++++++---- meilisearch-http/tests/auth/mod.rs | 9 + meilisearch-http/tests/auth/tenant_token.rs | 31 +- meilisearch-http/tests/common/index.rs | 2 +- 5 files changed, 556 insertions(+), 271 deletions(-) diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index e9fb3d127..7919c8ee9 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -9,7 +9,9 @@ async fn add_valid_api_key() { server.use_api_key("MASTER_KEY"); let content = json!({ + "name": "indexing-key", "description": "Indexing API key", + "uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8", "indexes": ["products"], "actions": [ "search", @@ -31,13 +33,16 @@ async fn add_valid_api_key() { }); let (response, code) = server.add_api_key(content).await; + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); assert!(response["updatedAt"].is_string()); let expected_response = json!({ + "name": "indexing-key", "description": "Indexing API key", + "uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8", "indexes": ["products"], "actions": [ "search", @@ -59,7 +64,6 @@ async fn add_valid_api_key() { }); assert_json_include!(actual: response, expected: expected_response); - assert_eq!(code, 201); } #[actix_rt::test] @@ -90,7 +94,8 @@ async fn add_valid_api_key_expired_at() { }); let (response, code) = server.add_api_key(content).await; - assert!(response["key"].is_string(), "{:?}", response); + assert_eq!(201, code, "{:?}", &response); + assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); assert!(response["updatedAt"].is_string()); @@ -118,7 +123,6 @@ async fn add_valid_api_key_expired_at() { }); assert_json_include!(actual: response, expected: expected_response); - assert_eq!(code, 201); } #[actix_rt::test] @@ -128,23 +132,19 @@ async fn add_valid_api_key_no_description() { let content = json!({ "indexes": ["products"], - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": "2050-11-13T00:00:00" }); let (response, code) = server.add_api_key(content).await; - + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); assert!(response["updatedAt"].is_string()); let expected_response = json!({ - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "indexes": [ "products" ], @@ -152,7 +152,6 @@ async fn add_valid_api_key_no_description() { }); assert_json_include!(actual: response, expected: expected_response); - assert_eq!(code, 201); } #[actix_rt::test] @@ -163,23 +162,19 @@ async fn add_valid_api_key_null_description() { let content = json!({ "description": Value::Null, "indexes": ["products"], - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": "2050-11-13T00:00:00" }); let (response, code) = server.add_api_key(content).await; - + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); assert!(response["updatedAt"].is_string()); let expected_response = json!({ - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "indexes": [ "products" ], @@ -187,7 +182,6 @@ async fn add_valid_api_key_null_description() { }); assert_json_include!(actual: response, expected: expected_response); - assert_eq!(code, 201); } #[actix_rt::test] @@ -196,12 +190,11 @@ async fn error_add_api_key_no_header() { let content = json!({ "description": "Indexing API key", "indexes": ["products"], - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(401, code, "{:?}", &response); let expected_response = json!({ "message": "The Authorization header is missing. It must use the bearer authorization method.", @@ -211,7 +204,6 @@ async fn error_add_api_key_no_header() { }); assert_eq!(response, expected_response); - assert_eq!(code, 401); } #[actix_rt::test] @@ -222,12 +214,11 @@ async fn error_add_api_key_bad_key() { let content = json!({ "description": "Indexing API key", "indexes": ["products"], - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(403, code, "{:?}", &response); let expected_response = json!({ "message": "The provided API key is invalid.", @@ -237,7 +228,6 @@ async fn error_add_api_key_bad_key() { }); assert_eq!(response, expected_response); - assert_eq!(code, 403); } #[actix_rt::test] @@ -252,6 +242,7 @@ async fn error_add_api_key_missing_parameter() { "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": "`indexes` field is mandatory.", @@ -261,7 +252,6 @@ async fn error_add_api_key_missing_parameter() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); // missing actions let content = json!({ @@ -270,6 +260,7 @@ async fn error_add_api_key_missing_parameter() { "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": "`actions` field is mandatory.", @@ -279,7 +270,6 @@ async fn error_add_api_key_missing_parameter() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); // missing expiration date let content = json!({ @@ -288,6 +278,7 @@ async fn error_add_api_key_missing_parameter() { "actions": ["documents.add"], }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": "`expiresAt` field is mandatory.", @@ -297,7 +288,6 @@ async fn error_add_api_key_missing_parameter() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); } #[actix_rt::test] @@ -308,12 +298,11 @@ async fn error_add_api_key_invalid_parameters_description() { let content = json!({ "description": {"name":"products"}, "indexes": ["products"], - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": r#"`description` field value `{"name":"products"}` is invalid. It should be a string or specified as a null value."#, @@ -323,7 +312,30 @@ async fn error_add_api_key_invalid_parameters_description() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); +} + +#[actix_rt::test] +async fn error_add_api_key_invalid_parameters_name() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "name": {"name":"products"}, + "indexes": ["products"], + "actions": ["documents.add"], + "expiresAt": "2050-11-13T00:00:00Z" + }); + let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); + + let expected_response = json!({ + "message": r#"`name` field value `{"name":"products"}` is invalid. It should be a string or specified as a null value."#, + "code": "invalid_api_key_name", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_name" + }); + + assert_eq!(response, expected_response); } #[actix_rt::test] @@ -334,12 +346,11 @@ async fn error_add_api_key_invalid_parameters_indexes() { let content = json!({ "description": "Indexing API key", "indexes": {"name":"products"}, - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": r#"`indexes` field value `{"name":"products"}` is invalid. It should be an array of string representing index names."#, @@ -349,7 +360,31 @@ async fn error_add_api_key_invalid_parameters_indexes() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); +} + +#[ignore] +#[actix_rt::test] +async fn error_add_api_key_invalid_index_uid_format() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "description": "Indexing API key", + "indexes": ["inv@lid uid"], + "actions": ["documents.add"], + "expiresAt": "2050-11-13T00:00:00Z" + }); + let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); + + let expected_response = json!({ + "message": "`inv@lid uid` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_api_key_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" + }); + + assert_eq!(response, expected_response); } #[actix_rt::test] @@ -364,6 +399,7 @@ async fn error_add_api_key_invalid_parameters_actions() { "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": r#"`actions` field value `{"name":"products"}` is invalid. It should be an array of string representing action names."#, @@ -373,7 +409,6 @@ async fn error_add_api_key_invalid_parameters_actions() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); let content = json!({ "description": "Indexing API key", @@ -384,6 +419,7 @@ async fn error_add_api_key_invalid_parameters_actions() { "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": r#"`actions` field value `["doc.add"]` is invalid. It should be an array of string representing action names."#, @@ -393,7 +429,6 @@ async fn error_add_api_key_invalid_parameters_actions() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); } #[actix_rt::test] @@ -404,12 +439,11 @@ async fn error_add_api_key_invalid_parameters_expires_at() { let content = json!({ "description": "Indexing API key", "indexes": ["products"], - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": {"name":"products"} }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": r#"`expiresAt` field value `{"name":"products"}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'."#, @@ -419,7 +453,6 @@ async fn error_add_api_key_invalid_parameters_expires_at() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); } #[actix_rt::test] @@ -430,12 +463,11 @@ async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() { let content = json!({ "description": "Indexing API key", "indexes": ["products"], - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": "2010-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": r#"`expiresAt` field value `"2010-11-13T00:00:00Z"` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'."#, @@ -445,7 +477,60 @@ async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); +} + +#[actix_rt::test] +async fn error_add_api_key_invalid_parameters_uid() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "description": "Indexing API key", + "uid": "aaaaabbbbbccc", + "indexes": ["products"], + "actions": ["documents.add"], + "expiresAt": "2050-11-13T00:00:00Z" + }); + let (response, code) = server.add_api_key(content).await; + assert_eq!(400, code, "{:?}", &response); + + let expected_response = json!({ + "message": r#"`uid` field value `"aaaaabbbbbccc"` is invalid. It should be a valid uuidv4 string or ommited."#, + "code": "invalid_api_key_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_uid" + }); + + assert_eq!(response, expected_response); +} + +#[actix_rt::test] +async fn error_add_api_key_parameters_uid_already_exist() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + let content = json!({ + "uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8", + "indexes": ["products"], + "actions": ["search"], + "expiresAt": "2050-11-13T00:00:00Z" + }); + + // first creation is valid. + let (response, code) = server.add_api_key(content.clone()).await; + assert_eq!(201, code, "{:?}", &response); + + // uid already exist. + let (response, code) = server.add_api_key(content).await; + assert_eq!(409, code, "{:?}", &response); + + let expected_response = json!({ + "message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` already exists for an API key.", + "code": "api_key_already_exists", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#api_key_already_exists" + }); + + assert_eq!(response, expected_response); } #[actix_rt::test] @@ -453,9 +538,11 @@ async fn get_api_key() { let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); + let uid = "4bc0887a-0e41-4f3b-935d-0c451dcee9c8"; let content = json!({ "description": "Indexing API key", "indexes": ["products"], + "uid": uid.to_string(), "actions": [ "search", "documents.add", @@ -477,20 +564,15 @@ async fn get_api_key() { let (response, code) = server.add_api_key(content).await; // must pass if add_valid_api_key test passes. - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); let key = response["key"].as_str().unwrap(); - let (response, code) = server.get_api_key(&key).await; - assert!(response["key"].is_string()); - assert!(response["expiresAt"].is_string()); - assert!(response["createdAt"].is_string()); - assert!(response["updatedAt"].is_string()); - let expected_response = json!({ "description": "Indexing API key", "indexes": ["products"], + "uid": uid.to_string(), "actions": [ "search", "documents.add", @@ -510,8 +592,23 @@ async fn get_api_key() { "expiresAt": "2050-11-13T00:00:00Z" }); - assert_json_include!(actual: response, expected: expected_response); - assert_eq!(code, 200); + // get with uid + let (response, code) = server.get_api_key(&uid).await; + assert_eq!(200, code, "{:?}", &response); + assert!(response["key"].is_string()); + assert!(response["expiresAt"].is_string()); + assert!(response["createdAt"].is_string()); + assert!(response["updatedAt"].is_string()); + assert_json_include!(actual: response, expected: &expected_response); + + // get with key + let (response, code) = server.get_api_key(&key).await; + assert_eq!(200, code, "{:?}", &response); + assert!(response["key"].is_string()); + assert!(response["expiresAt"].is_string()); + assert!(response["createdAt"].is_string()); + assert!(response["updatedAt"].is_string()); + assert_json_include!(actual: response, expected: &expected_response); } #[actix_rt::test] @@ -521,6 +618,7 @@ async fn error_get_api_key_no_header() { let (response, code) = server .get_api_key("d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4") .await; + assert_eq!(401, code, "{:?}", &response); let expected_response = json!({ "message": "The Authorization header is missing. It must use the bearer authorization method.", @@ -530,7 +628,6 @@ async fn error_get_api_key_no_header() { }); assert_eq!(response, expected_response); - assert_eq!(code, 401); } #[actix_rt::test] @@ -541,6 +638,7 @@ async fn error_get_api_key_bad_key() { let (response, code) = server .get_api_key("d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4") .await; + assert_eq!(403, code, "{:?}", &response); let expected_response = json!({ "message": "The provided API key is invalid.", @@ -550,7 +648,6 @@ async fn error_get_api_key_bad_key() { }); assert_eq!(response, expected_response); - assert_eq!(code, 403); } #[actix_rt::test] @@ -561,6 +658,7 @@ async fn error_get_api_key_not_found() { let (response, code) = server .get_api_key("d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4") .await; + assert_eq!(404, code, "{:?}", &response); let expected_response = json!({ "message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.", @@ -570,7 +668,6 @@ async fn error_get_api_key_not_found() { }); assert_eq!(response, expected_response); - assert_eq!(code, 404); } #[actix_rt::test] @@ -600,11 +697,12 @@ async fn list_api_keys() { "expiresAt": "2050-11-13T00:00:00Z" }); - let (_response, code) = server.add_api_key(content).await; + let (response, code) = server.add_api_key(content).await; // must pass if add_valid_api_key test passes. - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); let (response, code) = server.list_api_keys().await; + assert_eq!(200, code, "{:?}", &response); let expected_response = json!({ "results": [ @@ -644,7 +742,6 @@ async fn list_api_keys() { ]}); assert_json_include!(actual: response, expected: expected_response); - assert_eq!(code, 200); } #[actix_rt::test] @@ -652,6 +749,7 @@ async fn error_list_api_keys_no_header() { let server = Server::new_auth().await; let (response, code) = server.list_api_keys().await; + assert_eq!(401, code, "{:?}", &response); let expected_response = json!({ "message": "The Authorization header is missing. It must use the bearer authorization method.", @@ -661,7 +759,6 @@ async fn error_list_api_keys_no_header() { }); assert_eq!(response, expected_response); - assert_eq!(code, 401); } #[actix_rt::test] @@ -670,6 +767,7 @@ async fn error_list_api_keys_bad_key() { server.use_api_key("d4000bd7225f77d1eb22cc706ed36772bbc36767c016a27f76def7537b68600d"); let (response, code) = server.list_api_keys().await; + assert_eq!(403, code, "{:?}", &response); let expected_response = json!({ "message": "The provided API key is invalid.", @@ -679,7 +777,6 @@ async fn error_list_api_keys_bad_key() { }); assert_eq!(response, expected_response); - assert_eq!(code, 403); } #[actix_rt::test] @@ -711,17 +808,17 @@ async fn delete_api_key() { let (response, code) = server.add_api_key(content).await; // must pass if add_valid_api_key test passes. - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); - let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); - let (_response, code) = server.delete_api_key(&key).await; - assert_eq!(code, 204); + let (response, code) = server.delete_api_key(&uid).await; + assert_eq!(204, code, "{:?}", &response); // check if API key no longer exist. - let (_response, code) = server.get_api_key(&key).await; - assert_eq!(code, 404); + let (_response, code) = server.get_api_key(&uid).await; + assert_eq!(404, code, "{:?}", &response); } #[actix_rt::test] @@ -731,6 +828,7 @@ async fn error_delete_api_key_no_header() { let (response, code) = server .delete_api_key("d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4") .await; + assert_eq!(401, code, "{:?}", &response); let expected_response = json!({ "message": "The Authorization header is missing. It must use the bearer authorization method.", @@ -740,7 +838,6 @@ async fn error_delete_api_key_no_header() { }); assert_eq!(response, expected_response); - assert_eq!(code, 401); } #[actix_rt::test] @@ -751,6 +848,7 @@ async fn error_delete_api_key_bad_key() { let (response, code) = server .delete_api_key("d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4") .await; + assert_eq!(403, code, "{:?}", &response); let expected_response = json!({ "message": "The provided API key is invalid.", @@ -760,7 +858,6 @@ async fn error_delete_api_key_bad_key() { }); assert_eq!(response, expected_response); - assert_eq!(code, 403); } #[actix_rt::test] @@ -771,6 +868,7 @@ async fn error_delete_api_key_not_found() { let (response, code) = server .delete_api_key("d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4") .await; + assert_eq!(404, code, "{:?}", &response); let expected_response = json!({ "message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.", @@ -780,7 +878,6 @@ async fn error_delete_api_key_not_found() { }); assert_eq!(response, expected_response); - assert_eq!(code, 404); } #[actix_rt::test] @@ -808,12 +905,12 @@ async fn patch_api_key_description() { let (response, code) = server.add_api_key(content).await; // must pass if add_valid_api_key test passes. - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["createdAt"].is_string()); assert!(response["updatedAt"].is_string()); - let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); let created_at = response["createdAt"].as_str().unwrap(); let updated_at = response["updatedAt"].as_str().unwrap(); @@ -821,7 +918,8 @@ async fn patch_api_key_description() { let content = json!({ "description": "Indexing API key" }); thread::sleep(time::Duration::new(1, 0)); - let (response, code) = server.patch_api_key(&key, content).await; + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); @@ -848,18 +946,18 @@ async fn patch_api_key_description() { }); assert_json_include!(actual: response, expected: expected); - assert_eq!(code, 200); // Change the description - let content = json!({ "description": "Porduct API key" }); + let content = json!({ "description": "Product API key" }); - let (response, code) = server.patch_api_key(&key, content).await; + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); let expected = json!({ - "description": "Porduct API key", + "description": "Product API key", "indexes": ["products"], "actions": [ "search", @@ -878,12 +976,12 @@ async fn patch_api_key_description() { }); assert_json_include!(actual: response, expected: expected); - assert_eq!(code, 200); // Remove the description let content = json!({ "description": serde_json::Value::Null }); - let (response, code) = server.patch_api_key(&key, content).await; + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); @@ -907,11 +1005,137 @@ async fn patch_api_key_description() { }); assert_json_include!(actual: response, expected: expected); - assert_eq!(code, 200); } #[actix_rt::test] -async fn patch_api_key_indexes() { +async fn patch_api_key_name() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "indexes": ["products"], + "actions": [ + "search", + "documents.add", + "documents.get", + "documents.delete", + "indexes.create", + "indexes.get", + "indexes.update", + "indexes.delete", + "stats.get", + "dumps.create", + "dumps.get" + ], + "expiresAt": "2050-11-13T00:00:00Z" + }); + + let (response, code) = server.add_api_key(content).await; + // must pass if add_valid_api_key test passes. + assert_eq!(201, code, "{:?}", &response); + assert!(response["key"].is_string()); + assert!(response["createdAt"].is_string()); + assert!(response["updatedAt"].is_string()); + + let uid = response["uid"].as_str().unwrap(); + let created_at = response["createdAt"].as_str().unwrap(); + let updated_at = response["updatedAt"].as_str().unwrap(); + + // Add a name + let content = json!({ "name": "Indexing API key" }); + + thread::sleep(time::Duration::new(1, 0)); + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); + assert!(response["key"].is_string()); + assert!(response["expiresAt"].is_string()); + assert!(response["createdAt"].is_string()); + assert_ne!(response["updatedAt"].as_str().unwrap(), updated_at); + assert_eq!(response["createdAt"].as_str().unwrap(), created_at); + + let expected = json!({ + "name": "Indexing API key", + "indexes": ["products"], + "actions": [ + "search", + "documents.add", + "documents.get", + "documents.delete", + "indexes.create", + "indexes.get", + "indexes.update", + "indexes.delete", + "stats.get", + "dumps.create", + "dumps.get" + ], + "expiresAt": "2050-11-13T00:00:00Z" + }); + + assert_json_include!(actual: response, expected: expected); + + // Change the name + let content = json!({ "name": "Product API key" }); + + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); + assert!(response["key"].is_string()); + assert!(response["expiresAt"].is_string()); + assert!(response["createdAt"].is_string()); + + let expected = json!({ + "name": "Product API key", + "indexes": ["products"], + "actions": [ + "search", + "documents.add", + "documents.get", + "documents.delete", + "indexes.create", + "indexes.get", + "indexes.update", + "indexes.delete", + "stats.get", + "dumps.create", + "dumps.get" + ], + "expiresAt": "2050-11-13T00:00:00Z" + }); + + assert_json_include!(actual: response, expected: expected); + + // Remove the name + let content = json!({ "name": serde_json::Value::Null }); + + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); + assert!(response["key"].is_string()); + assert!(response["expiresAt"].is_string()); + assert!(response["createdAt"].is_string()); + + let expected = json!({ + "indexes": ["products"], + "actions": [ + "search", + "documents.add", + "documents.get", + "documents.delete", + "indexes.create", + "indexes.get", + "indexes.update", + "indexes.delete", + "stats.get", + "dumps.create", + "dumps.get" + ], + "expiresAt": "2050-11-13T00:00:00Z" + }); + + assert_json_include!(actual: response, expected: expected); +} + +#[actix_rt::test] +async fn patch_api_key_indexes_unchanged() { let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); @@ -936,19 +1160,20 @@ async fn patch_api_key_indexes() { let (response, code) = server.add_api_key(content).await; // must pass if add_valid_api_key test passes. - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["createdAt"].is_string()); assert!(response["updatedAt"].is_string()); - let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); let created_at = response["createdAt"].as_str().unwrap(); let updated_at = response["updatedAt"].as_str().unwrap(); let content = json!({ "indexes": ["products", "prices"] }); thread::sleep(time::Duration::new(1, 0)); - let (response, code) = server.patch_api_key(&key, content).await; + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); @@ -957,7 +1182,7 @@ async fn patch_api_key_indexes() { let expected = json!({ "description": "Indexing API key", - "indexes": ["products", "prices"], + "indexes": ["products"], "actions": [ "search", "documents.add", @@ -975,11 +1200,10 @@ async fn patch_api_key_indexes() { }); assert_json_include!(actual: response, expected: expected); - assert_eq!(code, 200); } #[actix_rt::test] -async fn patch_api_key_actions() { +async fn patch_api_key_actions_unchanged() { let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); @@ -1004,12 +1228,13 @@ async fn patch_api_key_actions() { let (response, code) = server.add_api_key(content).await; // must pass if add_valid_api_key test passes. - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["createdAt"].is_string()); assert!(response["updatedAt"].is_string()); - let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); + let created_at = response["createdAt"].as_str().unwrap(); let updated_at = response["updatedAt"].as_str().unwrap(); @@ -1024,7 +1249,8 @@ async fn patch_api_key_actions() { }); thread::sleep(time::Duration::new(1, 0)); - let (response, code) = server.patch_api_key(&key, content).await; + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); @@ -1036,20 +1262,25 @@ async fn patch_api_key_actions() { "indexes": ["products"], "actions": [ "search", + "documents.add", "documents.get", + "documents.delete", + "indexes.create", "indexes.get", - "tasks.get", - "settings.get", + "indexes.update", + "indexes.delete", + "stats.get", + "dumps.create", + "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); assert_json_include!(actual: response, expected: expected); - assert_eq!(code, 200); } #[actix_rt::test] -async fn patch_api_key_expiration_date() { +async fn patch_api_key_expiration_date_unchanged() { let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); @@ -1074,19 +1305,20 @@ async fn patch_api_key_expiration_date() { let (response, code) = server.add_api_key(content).await; // must pass if add_valid_api_key test passes. - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["createdAt"].is_string()); assert!(response["updatedAt"].is_string()); - let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); let created_at = response["createdAt"].as_str().unwrap(); let updated_at = response["updatedAt"].as_str().unwrap(); let content = json!({ "expiresAt": "2055-11-13T00:00:00Z" }); thread::sleep(time::Duration::new(1, 0)); - let (response, code) = server.patch_api_key(&key, content).await; + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(200, code, "{:?}", &response); assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); assert!(response["createdAt"].is_string()); @@ -1109,11 +1341,10 @@ async fn patch_api_key_expiration_date() { "dumps.create", "dumps.get" ], - "expiresAt": "2055-11-13T00:00:00Z" + "expiresAt": "2050-11-13T00:00:00Z" }); assert_json_include!(actual: response, expected: expected); - assert_eq!(code, 200); } #[actix_rt::test] @@ -1126,6 +1357,7 @@ async fn error_patch_api_key_no_header() { json!({}), ) .await; + assert_eq!(401, code, "{:?}", &response); let expected_response = json!({ "message": "The Authorization header is missing. It must use the bearer authorization method.", @@ -1135,7 +1367,6 @@ async fn error_patch_api_key_no_header() { }); assert_eq!(response, expected_response); - assert_eq!(code, 401); } #[actix_rt::test] @@ -1149,6 +1380,7 @@ async fn error_patch_api_key_bad_key() { json!({}), ) .await; + assert_eq!(403, code, "{:?}", &response); let expected_response = json!({ "message": "The provided API key is invalid.", @@ -1158,7 +1390,6 @@ async fn error_patch_api_key_bad_key() { }); assert_eq!(response, expected_response); - assert_eq!(code, 403); } #[actix_rt::test] @@ -1172,6 +1403,7 @@ async fn error_patch_api_key_not_found() { json!({}), ) .await; + assert_eq!(404, code, "{:?}", &response); let expected_response = json!({ "message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.", @@ -1181,7 +1413,6 @@ async fn error_patch_api_key_not_found() { }); assert_eq!(response, expected_response); - assert_eq!(code, 404); } #[actix_rt::test] @@ -1200,17 +1431,18 @@ async fn error_patch_api_key_indexes_invalid_parameters() { let (response, code) = server.add_api_key(content).await; // must pass if add_valid_api_key test passes. - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); - let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); // invalid description let content = json!({ "description": 13 }); - let (response, code) = server.patch_api_key(&key, content).await; + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ "message": "`description` field value `13` is invalid. It should be a string or specified as a null value.", @@ -1220,56 +1452,23 @@ async fn error_patch_api_key_indexes_invalid_parameters() { }); assert_eq!(response, expected_response); - assert_eq!(code, 400); - // invalid indexes + // invalid name let content = json!({ - "indexes": 13 + "name": 13 }); - let (response, code) = server.patch_api_key(&key, content).await; + let (response, code) = server.patch_api_key(&uid, content).await; + assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ - "message": "`indexes` field value `13` is invalid. It should be an array of string representing index names.", - "code": "invalid_api_key_indexes", + "message": "`name` field value `13` is invalid. It should be a string or specified as a null value.", + "code": "invalid_api_key_name", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_name" }); assert_eq!(response, expected_response); - assert_eq!(code, 400); - - // invalid actions - let content = json!({ - "actions": 13 - }); - let (response, code) = server.patch_api_key(&key, content).await; - - let expected_response = json!({ - "message": "`actions` field value `13` is invalid. It should be an array of string representing action names.", - "code": "invalid_api_key_actions", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" - }); - - assert_eq!(response, expected_response); - assert_eq!(code, 400); - - // invalid expiresAt - let content = json!({ - "expiresAt": 13 - }); - let (response, code) = server.patch_api_key(&key, content).await; - - let expected_response = json!({ - "message": "`expiresAt` field value `13` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", - "code": "invalid_api_key_expires_at", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" - }); - - assert_eq!(response, expected_response); - assert_eq!(code, 400); } #[actix_rt::test] @@ -1286,23 +1485,23 @@ async fn error_access_api_key_routes_no_master_key_set() { let (response, code) = server.add_api_key(json!({})).await; + assert_eq!(expected_code, code, "{:?}", &response); assert_eq!(response, expected_response); - assert_eq!(code, expected_code); let (response, code) = server.patch_api_key("content", json!({})).await; + assert_eq!(expected_code, code, "{:?}", &response); assert_eq!(response, expected_response); - assert_eq!(code, expected_code); let (response, code) = server.get_api_key("content").await; + assert_eq!(expected_code, code, "{:?}", &response); assert_eq!(response, expected_response); - assert_eq!(code, expected_code); let (response, code) = server.list_api_keys().await; + assert_eq!(expected_code, code, "{:?}", &response); assert_eq!(response, expected_response); - assert_eq!(code, expected_code); server.use_api_key("MASTER_KEY"); @@ -1315,21 +1514,21 @@ async fn error_access_api_key_routes_no_master_key_set() { let (response, code) = server.add_api_key(json!({})).await; + assert_eq!(expected_code, code, "{:?}", &response); assert_eq!(response, expected_response); - assert_eq!(code, expected_code); let (response, code) = server.patch_api_key("content", json!({})).await; + assert_eq!(expected_code, code, "{:?}", &response); assert_eq!(response, expected_response); - assert_eq!(code, expected_code); let (response, code) = server.get_api_key("content").await; + assert_eq!(expected_code, code, "{:?}", &response); assert_eq!(response, expected_response); - assert_eq!(code, expected_code); let (response, code) = server.list_api_keys().await; + assert_eq!(expected_code, code, "{:?}", &response); assert_eq!(response, expected_response); - assert_eq!(code, expected_code); } diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index fc18758ef..81c626215 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -46,6 +46,11 @@ pub static AUTHORIZATIONS: Lazy hashset!{"stats.get", "*"}, ("POST", "/dumps") => hashset!{"dumps.create", "*"}, ("GET", "/version") => hashset!{"version", "*"}, + ("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"}, + ("GET", "/keys/mykey/") => hashset!{"keys.get", "*"}, + ("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"}, + ("POST", "/keys") => hashset!{"keys.create", "*"}, + ("GET", "/keys") => hashset!{"keys.get", "*"}, } }); @@ -80,7 +85,7 @@ async fn error_access_expired_key() { }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); let key = response["key"].as_str().unwrap(); @@ -92,8 +97,14 @@ async fn error_access_expired_key() { for (method, route) in AUTHORIZATIONS.keys() { let (response, code) = server.dummy_request(method, route).await; - assert_eq!(response, INVALID_RESPONSE.clone()); - assert_eq!(code, 403); + assert_eq!( + response, + INVALID_RESPONSE.clone(), + "on route: {:?} - {:?}", + method, + route + ); + assert_eq!(403, code, "{:?}", &response); } } @@ -110,7 +121,7 @@ async fn error_access_unauthorized_index() { }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); let key = response["key"].as_str().unwrap(); @@ -123,8 +134,14 @@ async fn error_access_unauthorized_index() { { let (response, code) = server.dummy_request(method, route).await; - assert_eq!(response, INVALID_RESPONSE.clone()); - assert_eq!(code, 403); + assert_eq!( + response, + INVALID_RESPONSE.clone(), + "on route: {:?} - {:?}", + method, + route + ); + assert_eq!(403, code, "{:?}", &response); } } @@ -141,7 +158,7 @@ async fn error_access_unauthorized_action() { }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); let key = response["key"].as_str().unwrap(); @@ -154,14 +171,68 @@ async fn error_access_unauthorized_action() { let content = json!({ "actions": ALL_ACTIONS.difference(action).collect::>(), }); - let (_, code) = server.patch_api_key(&key, content).await; - assert_eq!(code, 200); + let (response, code) = server.patch_api_key(&key, content).await; + assert_eq!(200, code, "{:?}", &response); server.use_api_key(&key); let (response, code) = server.dummy_request(method, route).await; - assert_eq!(response, INVALID_RESPONSE.clone()); - assert_eq!(code, 403); + assert_eq!( + response, + INVALID_RESPONSE.clone(), + "on route: {:?} - {:?}", + method, + route + ); + assert_eq!(403, code, "{:?}", &response); + } +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn error_access_master_key() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + // master key must only have access to /keys + for ((method, route), _) in AUTHORIZATIONS + .iter() + .filter(|(_, action)| action.iter().all(|a| !a.starts_with("keys."))) + { + let (response, code) = server.dummy_request(method, route).await; + + assert_eq!( + response, + INVALID_RESPONSE.clone(), + "on route: {:?} - {:?}", + method, + route + ); + assert_eq!(403, code, "{:?}", &response); + } +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn access_authorized_master_key() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + // master key must only have access to /keys + for ((method, route), _) in AUTHORIZATIONS + .iter() + .filter(|(_, action)| action.iter().any(|a| a.starts_with("keys."))) + { + let (response, code) = server.dummy_request(method, route).await; + + assert_ne!( + response, + INVALID_RESPONSE.clone(), + "on route: {:?} - {:?}", + method, + route + ); + assert_ne!(code, 403); } } @@ -169,36 +240,34 @@ async fn error_access_unauthorized_action() { #[cfg_attr(target_os = "windows", ignore)] async fn access_authorized_restricted_index() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); - - let content = json!({ - "indexes": ["products"], - "actions": [], - "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), - }); - - let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); - assert!(response["key"].is_string()); - - let key = response["key"].as_str().unwrap(); - server.use_api_key(&key); - for ((method, route), actions) in AUTHORIZATIONS.iter() { for action in actions { - // Patch API key letting only the needed action. + // create a new API key letting only the needed action. + server.use_api_key("MASTER_KEY"); + let content = json!({ + "indexes": ["products"], "actions": [action], + "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); - server.use_api_key("MASTER_KEY"); - let (_, code) = server.patch_api_key(&key, content).await; - assert_eq!(code, 200); + let (response, code) = server.add_api_key(content).await; + assert_eq!(201, code, "{:?}", &response); + assert!(response["key"].is_string()); + let key = response["key"].as_str().unwrap(); server.use_api_key(&key); + let (response, code) = server.dummy_request(method, route).await; - assert_ne!(response, INVALID_RESPONSE.clone()); + assert_ne!( + response, + INVALID_RESPONSE.clone(), + "on route: {:?} - {:?} with action: {:?}", + method, + route, + action + ); assert_ne!(code, 403); } } @@ -208,36 +277,35 @@ async fn access_authorized_restricted_index() { #[cfg_attr(target_os = "windows", ignore)] async fn access_authorized_no_index_restriction() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); - - let content = json!({ - "indexes": ["*"], - "actions": [], - "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), - }); - - let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); - assert!(response["key"].is_string()); - - let key = response["key"].as_str().unwrap(); - server.use_api_key(&key); for ((method, route), actions) in AUTHORIZATIONS.iter() { for action in actions { + // create a new API key letting only the needed action. server.use_api_key("MASTER_KEY"); - // Patch API key letting only the needed action. let content = json!({ + "indexes": ["products"], "actions": [action], + "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); - let (_, code) = server.patch_api_key(&key, content).await; - assert_eq!(code, 200); + let (response, code) = server.add_api_key(content).await; + assert_eq!(201, code, "{:?}", &response); + assert!(response["key"].is_string()); + + let key = response["key"].as_str().unwrap(); server.use_api_key(&key); + let (response, code) = server.dummy_request(method, route).await; - assert_ne!(response, INVALID_RESPONSE.clone()); + assert_ne!( + response, + INVALID_RESPONSE.clone(), + "on route: {:?} - {:?} with action: {:?}", + method, + route, + action + ); assert_ne!(code, 403); } } @@ -247,16 +315,16 @@ async fn access_authorized_no_index_restriction() { #[cfg_attr(target_os = "windows", ignore)] async fn access_authorized_stats_restricted_index() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_admin_key("MASTER_KEY").await; // create index `test` let index = server.index("test"); - let (_, code) = index.create(Some("id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); // create index `products` let index = server.index("products"); - let (_, code) = index.create(Some("product_id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("product_id")).await; + assert_eq!(202, code, "{:?}", &response); index.wait_task(0).await; // create key with access on `products` index only. @@ -266,7 +334,7 @@ async fn access_authorized_stats_restricted_index() { "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); // use created key. @@ -274,7 +342,7 @@ async fn access_authorized_stats_restricted_index() { server.use_api_key(&key); let (response, code) = server.stats().await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); // key should have access on `products` index. assert!(response["indexes"].get("products").is_some()); @@ -287,16 +355,16 @@ async fn access_authorized_stats_restricted_index() { #[cfg_attr(target_os = "windows", ignore)] async fn access_authorized_stats_no_index_restriction() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_admin_key("MASTER_KEY").await; // create index `test` let index = server.index("test"); - let (_, code) = index.create(Some("id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); // create index `products` let index = server.index("products"); - let (_, code) = index.create(Some("product_id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("product_id")).await; + assert_eq!(202, code, "{:?}", &response); index.wait_task(0).await; // create key with access on all indexes. @@ -306,7 +374,7 @@ async fn access_authorized_stats_no_index_restriction() { "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); // use created key. @@ -314,7 +382,7 @@ async fn access_authorized_stats_no_index_restriction() { server.use_api_key(&key); let (response, code) = server.stats().await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); // key should have access on `products` index. assert!(response["indexes"].get("products").is_some()); @@ -327,16 +395,16 @@ async fn access_authorized_stats_no_index_restriction() { #[cfg_attr(target_os = "windows", ignore)] async fn list_authorized_indexes_restricted_index() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_admin_key("MASTER_KEY").await; // create index `test` let index = server.index("test"); - let (_, code) = index.create(Some("id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); // create index `products` let index = server.index("products"); - let (_, code) = index.create(Some("product_id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("product_id")).await; + assert_eq!(202, code, "{:?}", &response); index.wait_task(0).await; // create key with access on `products` index only. @@ -346,7 +414,7 @@ async fn list_authorized_indexes_restricted_index() { "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); // use created key. @@ -354,7 +422,7 @@ async fn list_authorized_indexes_restricted_index() { server.use_api_key(&key); let (response, code) = server.list_indexes(None, None).await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); let response = response["results"].as_array().unwrap(); // key should have access on `products` index. @@ -368,16 +436,16 @@ async fn list_authorized_indexes_restricted_index() { #[cfg_attr(target_os = "windows", ignore)] async fn list_authorized_indexes_no_index_restriction() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_admin_key("MASTER_KEY").await; // create index `test` let index = server.index("test"); - let (_, code) = index.create(Some("id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); // create index `products` let index = server.index("products"); - let (_, code) = index.create(Some("product_id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("product_id")).await; + assert_eq!(202, code, "{:?}", &response); index.wait_task(0).await; // create key with access on all indexes. @@ -387,7 +455,7 @@ async fn list_authorized_indexes_no_index_restriction() { "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); // use created key. @@ -395,7 +463,7 @@ async fn list_authorized_indexes_no_index_restriction() { server.use_api_key(&key); let (response, code) = server.list_indexes(None, None).await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); let response = response["results"].as_array().unwrap(); // key should have access on `products` index. @@ -408,16 +476,16 @@ async fn list_authorized_indexes_no_index_restriction() { #[actix_rt::test] async fn list_authorized_tasks_restricted_index() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_admin_key("MASTER_KEY").await; // create index `test` let index = server.index("test"); - let (_, code) = index.create(Some("id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); // create index `products` let index = server.index("products"); - let (_, code) = index.create(Some("product_id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("product_id")).await; + assert_eq!(202, code, "{:?}", &response); index.wait_task(0).await; // create key with access on `products` index only. @@ -427,7 +495,7 @@ async fn list_authorized_tasks_restricted_index() { "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); // use created key. @@ -435,7 +503,7 @@ async fn list_authorized_tasks_restricted_index() { server.use_api_key(&key); let (response, code) = server.service.get("/tasks").await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); println!("{}", response); let response = response["results"].as_array().unwrap(); // key should have access on `products` index. @@ -448,16 +516,16 @@ async fn list_authorized_tasks_restricted_index() { #[actix_rt::test] async fn list_authorized_tasks_no_index_restriction() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_admin_key("MASTER_KEY").await; // create index `test` let index = server.index("test"); - let (_, code) = index.create(Some("id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); // create index `products` let index = server.index("products"); - let (_, code) = index.create(Some("product_id")).await; - assert_eq!(code, 202); + let (response, code) = index.create(Some("product_id")).await; + assert_eq!(202, code, "{:?}", &response); index.wait_task(0).await; // create key with access on all indexes. @@ -467,7 +535,7 @@ async fn list_authorized_tasks_no_index_restriction() { "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); // use created key. @@ -475,7 +543,7 @@ async fn list_authorized_tasks_no_index_restriction() { server.use_api_key(&key); let (response, code) = server.service.get("/tasks").await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); let response = response["results"].as_array().unwrap(); // key should have access on `products` index. @@ -498,7 +566,7 @@ async fn error_creating_index_without_action() { "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); // use created key. @@ -522,7 +590,7 @@ async fn error_creating_index_without_action() { ]); let (response, code) = index.add_documents(documents, None).await; - assert_eq!(code, 202, "{:?}", response); + assert_eq!(202, code, "{:?}", &response); let task_id = response["taskUid"].as_u64().unwrap(); let response = index.wait_task(task_id).await; @@ -533,7 +601,7 @@ async fn error_creating_index_without_action() { let settings = json!({ "distinctAttribute": "test"}); let (response, code) = index.update_settings(settings).await; - assert_eq!(code, 202); + assert_eq!(202, code, "{:?}", &response); let task_id = response["taskUid"].as_u64().unwrap(); let response = index.wait_task(task_id).await; @@ -543,7 +611,7 @@ async fn error_creating_index_without_action() { // try to create a index via add specialized settings route let (response, code) = index.update_distinct_attribute(json!("test")).await; - assert_eq!(code, 202); + assert_eq!(202, code, "{:?}", &response); let task_id = response["taskUid"].as_u64().unwrap(); let response = index.wait_task(task_id).await; @@ -565,7 +633,7 @@ async fn lazy_create_index() { }); let (response, code) = server.add_api_key(content).await; - assert_eq!(code, 201); + assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); // use created key. @@ -582,13 +650,13 @@ async fn lazy_create_index() { ]); let (response, code) = index.add_documents(documents, None).await; - assert_eq!(code, 202, "{:?}", response); + assert_eq!(202, code, "{:?}", &response); let task_id = response["taskUid"].as_u64().unwrap(); index.wait_task(task_id).await; let (response, code) = index.get_task(task_id).await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); assert_eq!(response["status"], "succeeded"); // try to create a index via add settings route @@ -596,24 +664,24 @@ async fn lazy_create_index() { let settings = json!({ "distinctAttribute": "test"}); let (response, code) = index.update_settings(settings).await; - assert_eq!(code, 202); + assert_eq!(202, code, "{:?}", &response); let task_id = response["taskUid"].as_u64().unwrap(); index.wait_task(task_id).await; let (response, code) = index.get_task(task_id).await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); assert_eq!(response["status"], "succeeded"); // try to create a index via add specialized settings route let index = server.index("test2"); let (response, code) = index.update_distinct_attribute(json!("test")).await; - assert_eq!(code, 202); + assert_eq!(202, code, "{:?}", &response); let task_id = response["taskUid"].as_u64().unwrap(); index.wait_task(task_id).await; let (response, code) = index.get_task(task_id).await; - assert_eq!(code, 200); + assert_eq!(200, code, "{:?}", &response); assert_eq!(response["status"], "succeeded"); } diff --git a/meilisearch-http/tests/auth/mod.rs b/meilisearch-http/tests/auth/mod.rs index ef47f4a6a..03c24dd6d 100644 --- a/meilisearch-http/tests/auth/mod.rs +++ b/meilisearch-http/tests/auth/mod.rs @@ -13,6 +13,15 @@ impl Server { self.service.api_key = Some(api_key.as_ref().to_string()); } + /// Fetch and use the default admin key for nexts http requests. + pub async fn use_admin_key(&mut self, master_key: impl AsRef) { + self.use_api_key(master_key); + let (response, code) = self.list_api_keys().await; + assert_eq!(200, code, "{:?}", response); + let admin_key = &response["results"][1]["key"]; + self.use_api_key(admin_key.as_str().unwrap()); + } + pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) { let url = "/keys"; self.service.post(url, content).await diff --git a/meilisearch-http/tests/auth/tenant_token.rs b/meilisearch-http/tests/auth/tenant_token.rs index bb9224590..d82e170aa 100644 --- a/meilisearch-http/tests/auth/tenant_token.rs +++ b/meilisearch-http/tests/auth/tenant_token.rs @@ -8,11 +8,15 @@ use time::{Duration, OffsetDateTime}; use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS}; -fn generate_tenant_token(parent_key: impl AsRef, mut body: HashMap<&str, Value>) -> String { +fn generate_tenant_token( + parent_uid: impl AsRef, + parent_key: impl AsRef, + mut body: HashMap<&str, Value>, +) -> String { use jsonwebtoken::{encode, EncodingKey, Header}; - let key_id = &parent_key.as_ref()[..8]; - body.insert("apiKeyPrefix", json!(key_id)); + let parent_uid = parent_uid.as_ref(); + body.insert("apiKeyUid", json!(parent_uid)); encode( &Header::default(), &body, @@ -114,7 +118,7 @@ static REFUSED_KEYS: Lazy> = Lazy::new(|| { macro_rules! compute_autorized_search { ($tenant_tokens:expr, $filter:expr, $expected_count:expr) => { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_admin_key("MASTER_KEY").await; let index = server.index("sales"); let documents = DOCUMENTS.clone(); index.add_documents(documents, None).await; @@ -130,9 +134,10 @@ macro_rules! compute_autorized_search { let (response, code) = server.add_api_key(key_content.clone()).await; assert_eq!(code, 201); let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); for tenant_token in $tenant_tokens.iter() { - let web_token = generate_tenant_token(&key, tenant_token.clone()); + let web_token = generate_tenant_token(&uid, &key, tenant_token.clone()); server.use_api_key(&web_token); let index = server.index("sales"); index @@ -160,7 +165,7 @@ macro_rules! compute_autorized_search { macro_rules! compute_forbidden_search { ($tenant_tokens:expr, $parent_keys:expr) => { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_admin_key("MASTER_KEY").await; let index = server.index("sales"); let documents = DOCUMENTS.clone(); index.add_documents(documents, None).await; @@ -172,9 +177,10 @@ macro_rules! compute_forbidden_search { let (response, code) = server.add_api_key(key_content.clone()).await; assert_eq!(code, 201, "{:?}", response); let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); for tenant_token in $tenant_tokens.iter() { - let web_token = generate_tenant_token(&key, tenant_token.clone()); + let web_token = generate_tenant_token(&uid, &key, tenant_token.clone()); server.use_api_key(&web_token); let index = server.index("sales"); index @@ -461,12 +467,13 @@ async fn error_access_forbidden_routes() { assert!(response["key"].is_string()); let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); let tenant_token = hashmap! { "searchRules" => json!(["*"]), "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) }; - let web_token = generate_tenant_token(&key, tenant_token); + let web_token = generate_tenant_token(&uid, &key, tenant_token); server.use_api_key(&web_token); for ((method, route), actions) in AUTHORIZATIONS.iter() { @@ -496,12 +503,13 @@ async fn error_access_expired_parent_key() { assert!(response["key"].is_string()); let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); let tenant_token = hashmap! { "searchRules" => json!(["*"]), "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) }; - let web_token = generate_tenant_token(&key, tenant_token); + let web_token = generate_tenant_token(&uid, &key, tenant_token); server.use_api_key(&web_token); // test search request while parent_key is not expired @@ -538,12 +546,13 @@ async fn error_access_modified_token() { assert!(response["key"].is_string()); let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); let tenant_token = hashmap! { "searchRules" => json!(["products"]), "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) }; - let web_token = generate_tenant_token(&key, tenant_token); + let web_token = generate_tenant_token(&uid, &key, tenant_token); server.use_api_key(&web_token); // test search request while web_token is valid @@ -558,7 +567,7 @@ async fn error_access_modified_token() { "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) }; - let alt = generate_tenant_token(&key, tenant_token); + let alt = generate_tenant_token(&uid, &key, tenant_token); let altered_token = [ web_token.split('.').next().unwrap(), alt.split('.').nth(1).unwrap(), diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index e21dbcb67..4be8ad873 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -110,7 +110,7 @@ impl Index<'_> { let url = format!("/tasks/{}", update_id); for _ in 0..10 { let (response, status_code) = self.service.get(&url).await; - assert_eq!(status_code, 200, "response: {}", response); + assert_eq!(200, status_code, "response: {}", response); if response["status"] == "succeeded" || response["status"] == "failed" { return response; From 96a5791e395829c8770f3519fee3ba0e3b988d20 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 25 May 2022 10:32:47 +0200 Subject: [PATCH 074/185] Add uid and name fields in keys --- Cargo.lock | 1 + meilisearch-auth/Cargo.toml | 1 + meilisearch-auth/src/error.rs | 11 ++ meilisearch-auth/src/key.rs | 50 +++--- meilisearch-auth/src/lib.rs | 143 ++++++++---------- meilisearch-auth/src/store.rs | 106 +++++++------ meilisearch-error/src/lib.rs | 6 + .../src/extractors/authentication/mod.rs | 28 ++-- meilisearch-http/src/routes/api_key.rs | 36 +++-- 9 files changed, 205 insertions(+), 177 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 39eb78987..f48e6c59d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1982,6 +1982,7 @@ dependencies = [ "sha2", "thiserror", "time 0.3.9", + "uuid", ] [[package]] diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index dd12b5b63..9a7ce0d3e 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -13,3 +13,4 @@ serde_json = { version = "1.0.79", features = ["preserve_order"] } sha2 = "0.10.2" thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } +uuid = { version = "0.8.2", features = ["serde"] } diff --git a/meilisearch-auth/src/error.rs b/meilisearch-auth/src/error.rs index 8a87eda27..dc6301348 100644 --- a/meilisearch-auth/src/error.rs +++ b/meilisearch-auth/src/error.rs @@ -18,8 +18,16 @@ pub enum AuthControllerError { InvalidApiKeyExpiresAt(Value), #[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")] InvalidApiKeyDescription(Value), + #[error( + "`name` field value `{0}` is invalid. It should be a string or specified as a null value." + )] + InvalidApiKeyName(Value), + #[error("`uid` field value `{0}` is invalid. It should be a valid uuidv4 string or ommited.")] + InvalidApiKeyUid(Value), #[error("API key `{0}` not found.")] ApiKeyNotFound(String), + #[error("`uid` field value `{0}` already exists for an API key.")] + ApiKeyAlreadyExists(String), #[error("Internal error: {0}")] Internal(Box), } @@ -39,7 +47,10 @@ impl ErrorCode for AuthControllerError { Self::InvalidApiKeyIndexes(_) => Code::InvalidApiKeyIndexes, Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt, Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription, + Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName, Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound, + Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid, + Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists, Self::Internal(_) => Code::Internal, } } diff --git a/meilisearch-auth/src/key.rs b/meilisearch-auth/src/key.rs index 1b06f34be..d69c0aed4 100644 --- a/meilisearch-auth/src/key.rs +++ b/meilisearch-auth/src/key.rs @@ -1,18 +1,21 @@ use crate::action::Action; use crate::error::{AuthControllerError, Result}; -use crate::store::{KeyId, KEY_ID_LENGTH}; -use rand::Rng; +use crate::store::KeyId; + use serde::{Deserialize, Serialize}; use serde_json::{from_value, Value}; use time::format_description::well_known::Rfc3339; use time::macros::{format_description, time}; use time::{Date, OffsetDateTime, PrimitiveDateTime}; +use uuid::Uuid; #[derive(Debug, Deserialize, Serialize)] pub struct Key { #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, - pub id: KeyId, + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + pub uid: KeyId, pub actions: Vec, pub indexes: Vec, #[serde(with = "time::serde::rfc3339::option")] @@ -25,6 +28,15 @@ pub struct Key { impl Key { pub fn create_from_value(value: Value) -> Result { + let name = match value.get("name") { + Some(Value::Null) => None, + Some(des) => Some( + from_value(des.clone()) + .map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()))?, + ), + None => None, + }; + let description = match value.get("description") { Some(Value::Null) => None, Some(des) => Some( @@ -34,7 +46,13 @@ impl Key { None => None, }; - let id = generate_id(); + let uid = value.get("uid").map_or_else( + || Ok(Uuid::new_v4()), + |uid| { + from_value(uid.clone()) + .map_err(|_| AuthControllerError::InvalidApiKeyUid(uid.clone())) + }, + )?; let actions = value .get("actions") @@ -61,8 +79,9 @@ impl Key { let updated_at = created_at; Ok(Self { + name, description, - id, + uid, actions, indexes, expires_at, @@ -101,9 +120,11 @@ impl Key { pub(crate) fn default_admin() -> Self { let now = OffsetDateTime::now_utc(); + let uid = Uuid::new_v4(); Self { + name: Some("admin".to_string()), description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()), - id: generate_id(), + uid, actions: vec![Action::All], indexes: vec!["*".to_string()], expires_at: None, @@ -114,11 +135,13 @@ impl Key { pub(crate) fn default_search() -> Self { let now = OffsetDateTime::now_utc(); + let uid = Uuid::new_v4(); Self { + name: Some("search".to_string()), description: Some( "Default Search API Key (Use it to search from the frontend)".to_string(), ), - id: generate_id(), + uid, actions: vec![Action::Search], indexes: vec!["*".to_string()], expires_at: None, @@ -128,19 +151,6 @@ impl Key { } } -/// Generate a printable key of 64 characters using thread_rng. -fn generate_id() -> [u8; KEY_ID_LENGTH] { - const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"; - - let mut rng = rand::thread_rng(); - let mut bytes = [0; KEY_ID_LENGTH]; - for byte in bytes.iter_mut() { - *byte = CHARSET[rng.gen_range(0..CHARSET.len())]; - } - - bytes -} - fn parse_expiration_date(value: &Value) -> Result> { match value { Value::String(string) => OffsetDateTime::parse(string, &Rfc3339) diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 22263735e..9f9c59c35 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -4,14 +4,15 @@ pub mod error; mod key; mod store; +use crate::store::generate_key; use std::collections::{HashMap, HashSet}; use std::path::Path; -use std::str::from_utf8; + use std::sync::Arc; +use uuid::Uuid; use serde::{Deserialize, Serialize}; use serde_json::Value; -use sha2::{Digest, Sha256}; use time::OffsetDateTime; pub use action::{actions, Action}; @@ -42,62 +43,73 @@ impl AuthController { pub fn create_key(&self, value: Value) -> Result { let key = Key::create_from_value(value)?; - self.store.put_api_key(key) + match self.store.get_api_key(key.uid)? { + Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists( + key.uid.to_string(), + )), + None => self.store.put_api_key(key), + } } - pub fn update_key(&self, key: impl AsRef, value: Value) -> Result { - let mut key = self.get_key(key)?; + pub fn update_key(&self, uid: Uuid, value: Value) -> Result { + let mut key = self.get_key(uid)?; key.update_from_value(value)?; self.store.put_api_key(key) } - pub fn get_key(&self, key: impl AsRef) -> Result { + pub fn get_key(&self, uid: Uuid) -> Result { self.store - .get_api_key(&key)? - .ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string())) + .get_api_key(uid)? + .ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string())) + } + + pub fn get_uid_from_sha(&self, key: &[u8]) -> Result> { + match &self.master_key { + Some(master_key) => self.store.get_uid_from_sha(key, master_key.as_bytes()), + None => Ok(None), + } + } + + pub fn try_get_uid_from_sha(&self, key: &str) -> Result { + self.get_uid_from_sha(key.as_bytes())? + .ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.to_string())) } pub fn get_key_filters( &self, - key: impl AsRef, + uid: Uuid, search_rules: Option, ) -> Result { let mut filters = AuthFilter::default(); - if self - .master_key - .as_ref() - .map_or(false, |master_key| master_key != key.as_ref()) - { - let key = self - .store - .get_api_key(&key)? - .ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))?; + let key = self + .store + .get_api_key(uid)? + .ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?; - if !key.indexes.iter().any(|i| i.as_str() == "*") { - filters.search_rules = match search_rules { - // Intersect search_rules with parent key authorized indexes. - Some(search_rules) => SearchRules::Map( - key.indexes - .into_iter() - .filter_map(|index| { - search_rules - .get_index_search_rules(&index) - .map(|index_search_rules| (index, Some(index_search_rules))) - }) - .collect(), - ), - None => SearchRules::Set(key.indexes.into_iter().collect()), - }; - } else if let Some(search_rules) = search_rules { - filters.search_rules = search_rules; - } - - filters.allow_index_creation = key - .actions - .iter() - .any(|&action| action == Action::IndexesAdd || action == Action::All); + if !key.indexes.iter().any(|i| i.as_str() == "*") { + filters.search_rules = match search_rules { + // Intersect search_rules with parent key authorized indexes. + Some(search_rules) => SearchRules::Map( + key.indexes + .into_iter() + .filter_map(|index| { + search_rules + .get_index_search_rules(&index) + .map(|index_search_rules| (index, Some(index_search_rules))) + }) + .collect(), + ), + None => SearchRules::Set(key.indexes.into_iter().collect()), + }; + } else if let Some(search_rules) = search_rules { + filters.search_rules = search_rules; } + filters.allow_index_creation = key + .actions + .iter() + .any(|&action| action == Action::IndexesAdd || action == Action::All); + Ok(filters) } @@ -105,13 +117,11 @@ impl AuthController { self.store.list_api_keys() } - pub fn delete_key(&self, key: impl AsRef) -> Result<()> { - if self.store.delete_api_key(&key)? { + pub fn delete_key(&self, uid: Uuid) -> Result<()> { + if self.store.delete_api_key(uid)? { Ok(()) } else { - Err(AuthControllerError::ApiKeyNotFound( - key.as_ref().to_string(), - )) + Err(AuthControllerError::ApiKeyNotFound(uid.to_string())) } } @@ -121,32 +131,32 @@ impl AuthController { /// Generate a valid key from a key id using the current master key. /// Returns None if no master key has been set. - pub fn generate_key(&self, id: &str) -> Option { + pub fn generate_key(&self, uid: Uuid) -> Option { self.master_key .as_ref() - .map(|master_key| generate_key(master_key.as_bytes(), id)) + .map(|master_key| generate_key(uid.as_bytes(), master_key.as_bytes())) } /// Check if the provided key is authorized to make a specific action /// without checking if the key is valid. pub fn is_key_authorized( &self, - key: &[u8], + uid: Uuid, action: Action, index: Option<&str>, ) -> Result { match self .store // check if the key has access to all indexes. - .get_expiration_date(key, action, None)? + .get_expiration_date(uid, action, None)? .or(match index { // else check if the key has access to the requested index. Some(index) => { self.store - .get_expiration_date(key, action, Some(index.as_bytes()))? + .get_expiration_date(uid, action, Some(index.as_bytes()))? } // or to any index if no index has been requested. - None => self.store.prefix_first_expiration_date(key, action)?, + None => self.store.prefix_first_expiration_date(uid, action)?, }) { // check expiration date. Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp), @@ -156,29 +166,6 @@ impl AuthController { None => Ok(false), } } - - /// Check if the provided key is valid - /// without checking if the key is authorized to make a specific action. - pub fn is_key_valid(&self, key: &[u8]) -> Result { - if let Some(id) = self.store.get_key_id(key) { - let id = from_utf8(&id)?; - if let Some(generated) = self.generate_key(id) { - return Ok(generated.as_bytes() == key); - } - } - - Ok(false) - } - - /// Check if the provided key is valid - /// and is authorized to make a specific action. - pub fn authenticate(&self, key: &[u8], action: Action, index: Option<&str>) -> Result { - if self.is_key_authorized(key, action, index)? { - self.is_key_valid(key) - } else { - Ok(false) - } - } } pub struct AuthFilter { @@ -258,12 +245,6 @@ pub struct IndexSearchRules { pub filter: Option, } -fn generate_key(master_key: &[u8], keyid: &str) -> String { - let key = [keyid.as_bytes(), master_key].concat(); - let sha = Sha256::digest(&key); - format!("{}{:x}", keyid, sha) -} - fn generate_default_keys(store: &HeedAuthStore) -> Result<()> { store.put_api_key(Key::default_admin())?; store.put_api_key(Key::default_search())?; diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 4bd3cdded..762e707bc 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -1,4 +1,3 @@ -use enum_iterator::IntoEnumIterator; use std::borrow::Cow; use std::cmp::Reverse; use std::convert::TryFrom; @@ -8,20 +7,22 @@ use std::path::Path; use std::str; use std::sync::Arc; +use enum_iterator::IntoEnumIterator; use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; use milli::heed::{Database, Env, EnvOpenOptions, RwTxn}; +use sha2::{Digest, Sha256}; use time::OffsetDateTime; +use uuid::Uuid; use super::error::Result; use super::{Action, Key}; const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB -pub const KEY_ID_LENGTH: usize = 8; const AUTH_DB_PATH: &str = "auth"; const KEY_DB_NAME: &str = "api-keys"; const KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME: &str = "keyid-action-index-expiration"; -pub type KeyId = [u8; KEY_ID_LENGTH]; +pub type KeyId = Uuid; #[derive(Clone)] pub struct HeedAuthStore { @@ -73,12 +74,13 @@ impl HeedAuthStore { } pub fn put_api_key(&self, key: Key) -> Result { + let uid = key.uid; let mut wtxn = self.env.write_txn()?; - self.keys.put(&mut wtxn, &key.id, &key)?; - let id = key.id; + self.keys.put(&mut wtxn, uid.as_bytes(), &key)?; + // delete key from inverted database before refilling it. - self.delete_key_from_inverted_db(&mut wtxn, &id)?; + self.delete_key_from_inverted_db(&mut wtxn, &uid)?; // create inverted database. let db = self.action_keyid_index_expiration; @@ -93,13 +95,13 @@ impl HeedAuthStore { for action in actions { if no_index_restriction { // If there is no index restriction we put None. - db.put(&mut wtxn, &(&id, &action, None), &key.expires_at)?; + db.put(&mut wtxn, &(&uid, &action, None), &key.expires_at)?; } else { // else we create a key for each index. for index in key.indexes.iter() { db.put( &mut wtxn, - &(&id, &action, Some(index.as_bytes())), + &(&uid, &action, Some(index.as_bytes())), &key.expires_at, )?; } @@ -111,24 +113,33 @@ impl HeedAuthStore { Ok(key) } - pub fn get_api_key(&self, key: impl AsRef) -> Result> { + pub fn get_api_key(&self, uid: Uuid) -> Result> { let rtxn = self.env.read_txn()?; - match self.get_key_id(key.as_ref().as_bytes()) { - Some(id) => self.keys.get(&rtxn, &id).map_err(|e| e.into()), - None => Ok(None), - } + self.keys.get(&rtxn, uid.as_bytes()).map_err(|e| e.into()) } - pub fn delete_api_key(&self, key: impl AsRef) -> Result { + pub fn get_uid_from_sha(&self, key_sha: &[u8], master_key: &[u8]) -> Result> { + let rtxn = self.env.read_txn()?; + let uid = self + .keys + .remap_data_type::() + .iter(&rtxn)? + .filter_map(|res| match res { + Ok((uid, _)) if generate_key(uid, master_key).as_bytes() == key_sha => { + let (uid, _) = try_split_array_at(uid)?; + Some(Uuid::from_bytes(*uid)) + } + _ => None, + }) + .next(); + + Ok(uid) + } + + pub fn delete_api_key(&self, uid: Uuid) -> Result { let mut wtxn = self.env.write_txn()?; - let existing = match self.get_key_id(key.as_ref().as_bytes()) { - Some(id) => { - let existing = self.keys.delete(&mut wtxn, &id)?; - self.delete_key_from_inverted_db(&mut wtxn, &id)?; - existing - } - None => false, - }; + let existing = self.keys.delete(&mut wtxn, uid.as_bytes())?; + self.delete_key_from_inverted_db(&mut wtxn, &uid)?; wtxn.commit()?; Ok(existing) @@ -147,49 +158,37 @@ impl HeedAuthStore { pub fn get_expiration_date( &self, - key: &[u8], + uid: Uuid, action: Action, index: Option<&[u8]>, ) -> Result>> { let rtxn = self.env.read_txn()?; - match self.get_key_id(key) { - Some(id) => { - let tuple = (&id, &action, index); - Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?) - } - None => Ok(None), - } + let tuple = (&uid, &action, index); + Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?) } pub fn prefix_first_expiration_date( &self, - key: &[u8], + uid: Uuid, action: Action, ) -> Result>> { let rtxn = self.env.read_txn()?; - match self.get_key_id(key) { - Some(id) => { - let tuple = (&id, &action, None); - Ok(self - .action_keyid_index_expiration - .prefix_iter(&rtxn, &tuple)? - .next() - .transpose()? - .map(|(_, expiration)| expiration)) - } - None => Ok(None), - } - } + let tuple = (&uid, &action, None); + let exp = self + .action_keyid_index_expiration + .prefix_iter(&rtxn, &tuple)? + .next() + .transpose()? + .map(|(_, expiration)| expiration); - pub fn get_key_id(&self, key: &[u8]) -> Option { - try_split_array_at::<_, KEY_ID_LENGTH>(key).map(|(id, _)| *id) + Ok(exp) } fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> { let mut iter = self .action_keyid_index_expiration .remap_types::() - .prefix_iter_mut(wtxn, key)?; + .prefix_iter_mut(wtxn, key.as_bytes())?; while iter.next().transpose()?.is_some() { // safety: we don't keep references from inside the LMDB database. unsafe { iter.del_current()? }; @@ -207,14 +206,15 @@ impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec { type DItem = (KeyId, Action, Option<&'a [u8]>); fn bytes_decode(bytes: &'a [u8]) -> Option { - let (key_id, action_bytes) = try_split_array_at(bytes)?; + let (key_id_bytes, action_bytes) = try_split_array_at(bytes)?; let (action_bytes, index) = match try_split_array_at(action_bytes)? { (action, []) => (action, None), (action, index) => (action, Some(index)), }; + let key_id = Uuid::from_bytes(*key_id_bytes); let action = Action::from_repr(u8::from_be_bytes(*action_bytes))?; - Some((*key_id, action, index)) + Some((key_id, action, index)) } } @@ -224,7 +224,7 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec { fn bytes_encode((key_id, action, index): &Self::EItem) -> Option> { let mut bytes = Vec::new(); - bytes.extend_from_slice(*key_id); + bytes.extend_from_slice(key_id.as_bytes()); let action_bytes = u8::to_be_bytes(action.repr()); bytes.extend_from_slice(&action_bytes); if let Some(index) = index { @@ -235,6 +235,12 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec { } } +pub fn generate_key(uid: &[u8], master_key: &[u8]) -> String { + let key = [uid, master_key].concat(); + let sha = Sha256::digest(&key); + format!("{:x}", sha) +} + /// Divides one slice into two at an index, returns `None` if mid is out of bounds. pub fn try_split_at(slice: &[T], mid: usize) -> Option<(&[T], &[T])> { if mid <= slice.len() { diff --git a/meilisearch-error/src/lib.rs b/meilisearch-error/src/lib.rs index 11613497c..57882f8e0 100644 --- a/meilisearch-error/src/lib.rs +++ b/meilisearch-error/src/lib.rs @@ -166,6 +166,9 @@ pub enum Code { InvalidApiKeyIndexes, InvalidApiKeyExpiresAt, InvalidApiKeyDescription, + InvalidApiKeyName, + InvalidApiKeyUid, + ApiKeyAlreadyExists, } impl Code { @@ -272,6 +275,9 @@ impl Code { InvalidApiKeyDescription => { ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST) } + InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST), + InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST), + ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT), InvalidMinWordLengthForTypo => { ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST) } diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index c4cd9ef14..cf93d363a 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -132,6 +132,7 @@ pub mod policies { use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation}; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; + use uuid::Uuid; use crate::extractors::authentication::Policy; use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules}; @@ -146,16 +147,16 @@ pub mod policies { validation } - /// Extracts the key prefix used to sign the payload from the payload, without performing any validation. - fn extract_key_prefix(token: &str) -> Option { + /// Extracts the key id used to sign the payload from the payload, without performing any validation. + fn extract_key_id(token: &str) -> Option { let mut validation = tenant_token_validation(); validation.insecure_disable_signature_validation(); let dummy_key = DecodingKey::from_secret(b"secret"); let token_data = decode::(token, &dummy_key, &validation).ok()?; // get token fields without validating it. - let Claims { api_key_prefix, .. } = token_data.claims; - Some(api_key_prefix) + let Claims { uid, .. } = token_data.claims; + Some(uid) } pub struct MasterPolicy; @@ -195,8 +196,10 @@ pub mod policies { return Some(filters); } else if let Some(action) = Action::from_repr(A) { // API key - if let Ok(true) = auth.authenticate(token.as_bytes(), action, index) { - return auth.get_key_filters(token, None).ok(); + if let Ok(Some(uid)) = auth.get_uid_from_sha(token.as_bytes()) { + if let Ok(true) = auth.is_key_authorized(uid, action, index) { + return auth.get_key_filters(uid, None).ok(); + } } } @@ -215,14 +218,11 @@ pub mod policies { return None; } - let api_key_prefix = extract_key_prefix(token)?; + let uid = extract_key_id(token)?; // check if parent key is authorized to do the action. - if auth - .is_key_authorized(api_key_prefix.as_bytes(), Action::Search, index) - .ok()? - { + if auth.is_key_authorized(uid, Action::Search, index).ok()? { // Check if tenant token is valid. - let key = auth.generate_key(&api_key_prefix)?; + let key = auth.generate_key(uid)?; let data = decode::( token, &DecodingKey::from_secret(key.as_bytes()), @@ -245,7 +245,7 @@ pub mod policies { } return auth - .get_key_filters(api_key_prefix, Some(data.claims.search_rules)) + .get_key_filters(uid, Some(data.claims.search_rules)) .ok(); } @@ -258,6 +258,6 @@ pub mod policies { struct Claims { search_rules: SearchRules, exp: Option, - api_key_prefix: String, + uid: Uuid, } } diff --git a/meilisearch-http/src/routes/api_key.rs b/meilisearch-http/src/routes/api_key.rs index 310b09c4d..ba964e5d1 100644 --- a/meilisearch-http/src/routes/api_key.rs +++ b/meilisearch-http/src/routes/api_key.rs @@ -1,4 +1,5 @@ use std::str; +use uuid::Uuid; use actix_web::{web, HttpRequest, HttpResponse}; @@ -20,7 +21,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .route(web::get().to(SeqHandler(list_api_keys))), ) .service( - web::resource("/{api_key}") + web::resource("/{key}") .route(web::get().to(SeqHandler(get_api_key))) .route(web::patch().to(SeqHandler(patch_api_key))) .route(web::delete().to(SeqHandler(delete_api_key))), @@ -65,9 +66,12 @@ pub async fn get_api_key( auth_controller: GuardedData, path: web::Path, ) -> Result { - let api_key = path.into_inner().api_key; + let key = path.into_inner().key; + let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { - let key = auth_controller.get_key(&api_key)?; + let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.try_get_uid_from_sha(&key))?; + let key = auth_controller.get_key(uid)?; + Ok(KeyView::from_key(key, &auth_controller)) }) .await @@ -81,10 +85,12 @@ pub async fn patch_api_key( body: web::Json, path: web::Path, ) -> Result { - let api_key = path.into_inner().api_key; + let key = path.into_inner().key; let body = body.into_inner(); let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { - let key = auth_controller.update_key(&api_key, body)?; + let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.try_get_uid_from_sha(&key))?; + let key = auth_controller.update_key(uid, body)?; + Ok(KeyView::from_key(key, &auth_controller)) }) .await @@ -97,24 +103,29 @@ pub async fn delete_api_key( auth_controller: GuardedData, path: web::Path, ) -> Result { - let api_key = path.into_inner().api_key; - tokio::task::spawn_blocking(move || auth_controller.delete_key(&api_key)) - .await - .map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??; + let key = path.into_inner().key; + tokio::task::spawn_blocking(move || { + let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.try_get_uid_from_sha(&key))?; + auth_controller.delete_key(uid) + }) + .await + .map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??; Ok(HttpResponse::NoContent().finish()) } #[derive(Deserialize)] pub struct AuthParam { - api_key: String, + key: String, } #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] struct KeyView { + name: Option, description: Option, key: String, + uid: Uuid, actions: Vec, indexes: Vec, #[serde(serialize_with = "time::serde::rfc3339::option::serialize")] @@ -127,12 +138,13 @@ struct KeyView { impl KeyView { fn from_key(key: Key, auth: &AuthController) -> Self { - let key_id = str::from_utf8(&key.id).unwrap(); - let generated_key = auth.generate_key(key_id).unwrap_or_default(); + let generated_key = auth.generate_key(key.uid).unwrap_or_default(); KeyView { + name: key.name, description: key.description, key: generated_key, + uid: key.uid, actions: key.actions, indexes: key.indexes, expires_at: key.expires_at, From d54643455c81044f9964aea0cff9c2bb327f6262 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 25 May 2022 10:41:06 +0200 Subject: [PATCH 075/185] Make PATCH only modify name, description, and updated_at fields --- meilisearch-auth/src/key.rs | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/meilisearch-auth/src/key.rs b/meilisearch-auth/src/key.rs index d69c0aed4..bdabb2b21 100644 --- a/meilisearch-auth/src/key.rs +++ b/meilisearch-auth/src/key.rs @@ -97,20 +97,10 @@ impl Key { self.description = des?; } - if let Some(act) = value.get("actions") { - let act = from_value(act.clone()) - .map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone())); - self.actions = act?; - } - - if let Some(ind) = value.get("indexes") { - let ind = from_value(ind.clone()) - .map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone())); - self.indexes = ind?; - } - - if let Some(exp) = value.get("expiresAt") { - self.expires_at = parse_expiration_date(exp)?; + if let Some(des) = value.get("name") { + let des = from_value(des.clone()) + .map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone())); + self.name = des?; } self.updated_at = OffsetDateTime::now_utc(); From 34c8888f5683374d9e021402bbb4d3f3e8f52cc5 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 25 May 2022 15:25:57 +0200 Subject: [PATCH 076/185] Add keys actions --- meilisearch-auth/src/action.rs | 27 +++++++++++++++++++++++--- meilisearch-http/src/routes/api_key.rs | 10 +++++----- 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/meilisearch-auth/src/action.rs b/meilisearch-auth/src/action.rs index 7ffe9b908..088ad6ba7 100644 --- a/meilisearch-auth/src/action.rs +++ b/meilisearch-auth/src/action.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; #[repr(u8)] pub enum Action { #[serde(rename = "*")] - All = 0, + All = actions::ALL, #[serde(rename = "search")] Search = actions::SEARCH, #[serde(rename = "documents.add")] @@ -36,13 +36,21 @@ pub enum Action { DumpsGet = actions::DUMPS_GET, #[serde(rename = "version")] Version = actions::VERSION, + #[serde(rename = "keys.create")] + KeysAdd = actions::KEYS_CREATE, + #[serde(rename = "keys.get")] + KeysGet = actions::KEYS_GET, + #[serde(rename = "keys.update")] + KeysUpdate = actions::KEYS_UPDATE, + #[serde(rename = "keys.delete")] + KeysDelete = actions::KEYS_DELETE, } impl Action { pub fn from_repr(repr: u8) -> Option { use actions::*; match repr { - 0 => Some(Self::All), + ALL => Some(Self::All), SEARCH => Some(Self::Search), DOCUMENTS_ADD => Some(Self::DocumentsAdd), DOCUMENTS_GET => Some(Self::DocumentsGet), @@ -58,6 +66,10 @@ impl Action { DUMPS_CREATE => Some(Self::DumpsCreate), DUMPS_GET => Some(Self::DumpsGet), VERSION => Some(Self::Version), + KEYS_CREATE => Some(Self::KeysAdd), + KEYS_GET => Some(Self::KeysGet), + KEYS_UPDATE => Some(Self::KeysUpdate), + KEYS_DELETE => Some(Self::KeysDelete), _otherwise => None, } } @@ -65,7 +77,7 @@ impl Action { pub fn repr(&self) -> u8 { use actions::*; match self { - Self::All => 0, + Self::All => ALL, Self::Search => SEARCH, Self::DocumentsAdd => DOCUMENTS_ADD, Self::DocumentsGet => DOCUMENTS_GET, @@ -81,11 +93,16 @@ impl Action { Self::DumpsCreate => DUMPS_CREATE, Self::DumpsGet => DUMPS_GET, Self::Version => VERSION, + Self::KeysAdd => KEYS_CREATE, + Self::KeysGet => KEYS_GET, + Self::KeysUpdate => KEYS_UPDATE, + Self::KeysDelete => KEYS_DELETE, } } } pub mod actions { + pub(crate) const ALL: u8 = 0; pub const SEARCH: u8 = 1; pub const DOCUMENTS_ADD: u8 = 2; pub const DOCUMENTS_GET: u8 = 3; @@ -101,4 +118,8 @@ pub mod actions { pub const DUMPS_CREATE: u8 = 13; pub const DUMPS_GET: u8 = 14; pub const VERSION: u8 = 15; + pub const KEYS_CREATE: u8 = 16; + pub const KEYS_GET: u8 = 17; + pub const KEYS_UPDATE: u8 = 18; + pub const KEYS_DELETE: u8 = 19; } diff --git a/meilisearch-http/src/routes/api_key.rs b/meilisearch-http/src/routes/api_key.rs index ba964e5d1..37ff80ec6 100644 --- a/meilisearch-http/src/routes/api_key.rs +++ b/meilisearch-http/src/routes/api_key.rs @@ -29,7 +29,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { } pub async fn create_api_key( - auth_controller: GuardedData, + auth_controller: GuardedData, AuthController>, body: web::Json, _req: HttpRequest, ) -> Result { @@ -45,7 +45,7 @@ pub async fn create_api_key( } pub async fn list_api_keys( - auth_controller: GuardedData, + auth_controller: GuardedData, AuthController>, _req: HttpRequest, ) -> Result { let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { @@ -63,7 +63,7 @@ pub async fn list_api_keys( } pub async fn get_api_key( - auth_controller: GuardedData, + auth_controller: GuardedData, AuthController>, path: web::Path, ) -> Result { let key = path.into_inner().key; @@ -81,7 +81,7 @@ pub async fn get_api_key( } pub async fn patch_api_key( - auth_controller: GuardedData, + auth_controller: GuardedData, AuthController>, body: web::Json, path: web::Path, ) -> Result { @@ -100,7 +100,7 @@ pub async fn patch_api_key( } pub async fn delete_api_key( - auth_controller: GuardedData, + auth_controller: GuardedData, AuthController>, path: web::Path, ) -> Result { let key = path.into_inner().key; From a57b2d95386be28012b026c59647693df8db431f Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 25 May 2022 16:35:00 +0200 Subject: [PATCH 077/185] Restrict master key access to /keys routes --- .../src/extractors/authentication/mod.rs | 26 +++++++------------ 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index cf93d363a..a0e914ec9 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -159,22 +159,9 @@ pub mod policies { Some(uid) } - pub struct MasterPolicy; - - impl Policy for MasterPolicy { - fn authenticate( - auth: AuthController, - token: &str, - _index: Option<&str>, - ) -> Option { - if let Some(master_key) = auth.get_master_key() { - if master_key == token { - return Some(AuthFilter::default()); - } - } - - None - } + fn is_keys_action(action: u8) -> bool { + use actions::*; + matches!(action, KEYS_GET | KEYS_CREATE | KEYS_UPDATE | KEYS_DELETE) } pub struct ActionPolicy; @@ -186,7 +173,12 @@ pub mod policies { index: Option<&str>, ) -> Option { // authenticate if token is the master key. - if auth.get_master_key().map_or(true, |mk| mk == token) { + // master key can only have access to keys routes. + // if master key is None only keys routes are inaccessible. + if auth + .get_master_key() + .map_or_else(|| !is_keys_action(A), |mk| mk == token && is_keys_action(A)) + { return Some(AuthFilter::default()); } From b9a79eb8583807769f8b9a9e27b0001f5f6d43c0 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 30 May 2022 10:59:48 +0200 Subject: [PATCH 078/185] Change apiKeyPrefix to apiKeyUid --- meilisearch-http/src/extractors/authentication/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index a0e914ec9..fdce0f5f0 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -155,8 +155,8 @@ pub mod policies { let token_data = decode::(token, &dummy_key, &validation).ok()?; // get token fields without validating it. - let Claims { uid, .. } = token_data.claims; - Some(uid) + let Claims { api_key_uid, .. } = token_data.claims; + Some(api_key_uid) } fn is_keys_action(action: u8) -> bool { @@ -250,6 +250,6 @@ pub mod policies { struct Claims { search_rules: SearchRules, exp: Option, - uid: Uuid, + api_key_uid: Uuid, } } From 70916d65968ff7697aabbfc0edf00e26ef38f308 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 30 May 2022 14:57:10 +0200 Subject: [PATCH 079/185] Patch dump v4 --- meilisearch-auth/src/dump.rs | 26 ++++++++++++++++++++++++++ meilisearch-lib/src/dump/loaders/v4.rs | 6 +++--- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/meilisearch-auth/src/dump.rs b/meilisearch-auth/src/dump.rs index 77a4aa5ca..1d3bc2139 100644 --- a/meilisearch-auth/src/dump.rs +++ b/meilisearch-auth/src/dump.rs @@ -1,8 +1,11 @@ +use serde_json::{Map, Value}; +use std::fs; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::io::Write; use std::path::Path; +use uuid::Uuid; use crate::{AuthController, HeedAuthStore, Result}; @@ -44,4 +47,27 @@ impl AuthController { Ok(()) } + + pub fn patch_dump_v4(src: impl AsRef, dst: impl AsRef) -> Result<()> { + let keys_file_src = src.as_ref().join(KEYS_PATH); + + if !keys_file_src.exists() { + return Ok(()); + } + + fs::create_dir_all(&dst)?; + let keys_file_dst = dst.as_ref().join(KEYS_PATH); + let mut writer = File::create(&keys_file_dst)?; + + let mut reader = BufReader::new(File::open(&keys_file_src)?).lines(); + while let Some(key) = reader.next().transpose()? { + let mut key: Map = serde_json::from_str(&key)?; + let uid = Uuid::new_v4().to_string(); + key.insert("uid".to_string(), Value::String(uid)); + serde_json::to_writer(&mut writer, &key)?; + writer.write_all(b"\n")?; + } + + Ok(()) + } } diff --git a/meilisearch-lib/src/dump/loaders/v4.rs b/meilisearch-lib/src/dump/loaders/v4.rs index 126300af8..3caa7a9e9 100644 --- a/meilisearch-lib/src/dump/loaders/v4.rs +++ b/meilisearch-lib/src/dump/loaders/v4.rs @@ -6,6 +6,8 @@ use fs_extra::dir::{self, CopyOptions}; use log::info; use tempfile::tempdir; +use meilisearch_auth::AuthController; + use crate::dump::{compat, Metadata}; use crate::options::IndexerOpts; use crate::tasks::task::Task; @@ -43,9 +45,7 @@ pub fn load_dump( patch_updates(&src, &patched_dir)?; // Keys - if src.as_ref().join("keys").exists() { - fs::copy(src.as_ref().join("keys"), patched_dir.path().join("keys"))?; - } + AuthController::patch_dump_v4(&src, patched_dir.path())?; super::v5::load_dump( meta, From 84f52ac17533fa1eb651d19380abb16ae1fac284 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 31 May 2022 13:56:42 +0200 Subject: [PATCH 080/185] Add v4 feature to uuid --- meilisearch-auth/Cargo.toml | 2 +- meilisearch-http/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 9a7ce0d3e..7ffa072e8 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -13,4 +13,4 @@ serde_json = { version = "1.0.79", features = ["preserve_order"] } sha2 = "0.10.2" thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } -uuid = { version = "0.8.2", features = ["serde"] } +uuid = { version = "0.8.2", features = ["serde", "v4"] } diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 75d0ac06e..ba11b20e0 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -75,7 +75,7 @@ thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } tokio = { version = "1.17.0", features = ["full"] } tokio-stream = "0.1.8" -uuid = { version = "0.8.2", features = ["serde"] } +uuid = { version = "0.8.2", features = ["serde", "v4"] } walkdir = "2.3.2" [dev-dependencies] diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 85ae49f64..bb8e628c1 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -52,7 +52,7 @@ tempfile = "3.3.0" thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } tokio = { version = "1.17.0", features = ["full"] } -uuid = { version = "0.8.2", features = ["serde"] } +uuid = { version = "0.8.2", features = ["serde", "v4"] } walkdir = "2.3.2" whoami = { version = "1.2.1", optional = true } From 96152a3d32a4c1c076daaada771b8812043281f9 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 31 May 2022 14:18:34 +0200 Subject: [PATCH 081/185] Change default API keys names and descriptions --- meilisearch-auth/src/key.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/meilisearch-auth/src/key.rs b/meilisearch-auth/src/key.rs index bdabb2b21..f55993607 100644 --- a/meilisearch-auth/src/key.rs +++ b/meilisearch-auth/src/key.rs @@ -112,8 +112,8 @@ impl Key { let now = OffsetDateTime::now_utc(); let uid = Uuid::new_v4(); Self { - name: Some("admin".to_string()), - description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()), + name: Some("Default Admin API Key".to_string()), + description: Some("Use it for all other than search operations. Caution! Do not expose it on a public frontend".to_string()), uid, actions: vec![Action::All], indexes: vec!["*".to_string()], @@ -127,10 +127,8 @@ impl Key { let now = OffsetDateTime::now_utc(); let uid = Uuid::new_v4(); Self { - name: Some("search".to_string()), - description: Some( - "Default Search API Key (Use it to search from the frontend)".to_string(), - ), + name: Some("Default Search API Key".to_string()), + description: Some("Use it to search from the frontend".to_string()), uid, actions: vec![Action::Search], indexes: vec!["*".to_string()], From 151f4941102f6675f15257cebf5d85c528f46e32 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 31 May 2022 15:04:33 +0200 Subject: [PATCH 082/185] Use Stream Deserializer to load dumps --- meilisearch-auth/src/dump.rs | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/meilisearch-auth/src/dump.rs b/meilisearch-auth/src/dump.rs index 1d3bc2139..127e65280 100644 --- a/meilisearch-auth/src/dump.rs +++ b/meilisearch-auth/src/dump.rs @@ -1,7 +1,7 @@ +use serde_json::Deserializer; use serde_json::{Map, Value}; use std::fs; use std::fs::File; -use std::io::BufRead; use std::io::BufReader; use std::io::Write; use std::path::Path; @@ -39,10 +39,9 @@ impl AuthController { return Ok(()); } - let mut reader = BufReader::new(File::open(&keys_file_path)?).lines(); - while let Some(key) = reader.next().transpose()? { - let key = serde_json::from_str(&key)?; - store.put_api_key(key)?; + let reader = BufReader::new(File::open(&keys_file_path)?); + for key in Deserializer::from_reader(reader).into_iter() { + store.put_api_key(key?)?; } Ok(()) @@ -59,11 +58,14 @@ impl AuthController { let keys_file_dst = dst.as_ref().join(KEYS_PATH); let mut writer = File::create(&keys_file_dst)?; - let mut reader = BufReader::new(File::open(&keys_file_src)?).lines(); - while let Some(key) = reader.next().transpose()? { - let mut key: Map = serde_json::from_str(&key)?; - let uid = Uuid::new_v4().to_string(); - key.insert("uid".to_string(), Value::String(uid)); + let reader = BufReader::new(File::open(&keys_file_src)?); + for key in Deserializer::from_reader(reader).into_iter() { + let mut key: Map = key?; + + // generate a new uuid v4 and insert it in the key. + let uid = serde_json::to_value(Uuid::new_v4()).unwrap(); + key.insert("uid".to_string(), uid); + serde_json::to_writer(&mut writer, &key)?; writer.write_all(b"\n")?; } From b3c8915702e758f6adb1615b3b858c506fd1c6ff Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 31 May 2022 15:23:17 +0200 Subject: [PATCH 083/185] Make small changes and renaming --- meilisearch-auth/src/key.rs | 20 ++++++++----------- meilisearch-auth/src/lib.rs | 10 +++++----- .../src/extractors/authentication/mod.rs | 4 ++-- meilisearch-http/src/routes/api_key.rs | 6 +++--- 4 files changed, 18 insertions(+), 22 deletions(-) diff --git a/meilisearch-auth/src/key.rs b/meilisearch-auth/src/key.rs index f55993607..baac68637 100644 --- a/meilisearch-auth/src/key.rs +++ b/meilisearch-auth/src/key.rs @@ -29,21 +29,17 @@ pub struct Key { impl Key { pub fn create_from_value(value: Value) -> Result { let name = match value.get("name") { - Some(Value::Null) => None, - Some(des) => Some( - from_value(des.clone()) - .map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()))?, - ), - None => None, + None | Some(Value::Null) => None, + Some(des) => from_value(des.clone()) + .map(Some) + .map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()))?, }; let description = match value.get("description") { - Some(Value::Null) => None, - Some(des) => Some( - from_value(des.clone()) - .map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?, - ), - None => None, + None | Some(Value::Null) => None, + Some(des) => from_value(des.clone()) + .map(Some) + .map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?, }; let uid = value.get("uid").map_or_else( diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 9f9c59c35..578093abf 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -63,16 +63,16 @@ impl AuthController { .ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string())) } - pub fn get_uid_from_sha(&self, key: &[u8]) -> Result> { + pub fn get_optional_uid_from_sha(&self, sha: &[u8]) -> Result> { match &self.master_key { - Some(master_key) => self.store.get_uid_from_sha(key, master_key.as_bytes()), + Some(master_key) => self.store.get_uid_from_sha(sha, master_key.as_bytes()), None => Ok(None), } } - pub fn try_get_uid_from_sha(&self, key: &str) -> Result { - self.get_uid_from_sha(key.as_bytes())? - .ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.to_string())) + pub fn get_uid_from_sha(&self, sha: &str) -> Result { + self.get_optional_uid_from_sha(sha.as_bytes())? + .ok_or_else(|| AuthControllerError::ApiKeyNotFound(sha.to_string())) } pub fn get_key_filters( diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index fdce0f5f0..a6384492c 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -147,7 +147,7 @@ pub mod policies { validation } - /// Extracts the key id used to sign the payload from the payload, without performing any validation. + /// Extracts the key id used to sign the payload, without performing any validation. fn extract_key_id(token: &str) -> Option { let mut validation = tenant_token_validation(); validation.insecure_disable_signature_validation(); @@ -188,7 +188,7 @@ pub mod policies { return Some(filters); } else if let Some(action) = Action::from_repr(A) { // API key - if let Ok(Some(uid)) = auth.get_uid_from_sha(token.as_bytes()) { + if let Ok(Some(uid)) = auth.get_optional_uid_from_sha(token.as_bytes()) { if let Ok(true) = auth.is_key_authorized(uid, action, index) { return auth.get_key_filters(uid, None).ok(); } diff --git a/meilisearch-http/src/routes/api_key.rs b/meilisearch-http/src/routes/api_key.rs index 37ff80ec6..cfe81b301 100644 --- a/meilisearch-http/src/routes/api_key.rs +++ b/meilisearch-http/src/routes/api_key.rs @@ -69,7 +69,7 @@ pub async fn get_api_key( let key = path.into_inner().key; let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { - let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.try_get_uid_from_sha(&key))?; + let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_sha(&key))?; let key = auth_controller.get_key(uid)?; Ok(KeyView::from_key(key, &auth_controller)) @@ -88,7 +88,7 @@ pub async fn patch_api_key( let key = path.into_inner().key; let body = body.into_inner(); let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { - let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.try_get_uid_from_sha(&key))?; + let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_sha(&key))?; let key = auth_controller.update_key(uid, body)?; Ok(KeyView::from_key(key, &auth_controller)) @@ -105,7 +105,7 @@ pub async fn delete_api_key( ) -> Result { let key = path.into_inner().key; tokio::task::spawn_blocking(move || { - let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.try_get_uid_from_sha(&key))?; + let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_sha(&key))?; auth_controller.delete_key(uid) }) .await From 1f62e832672964b0c72c8fdd3ab787998b07c3bc Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 31 May 2022 15:24:26 +0200 Subject: [PATCH 084/185] Remove error_add_api_key_invalid_index_uid_format --- meilisearch-http/tests/auth/api_keys.rs | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index 7919c8ee9..8e76cdad8 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -362,31 +362,6 @@ async fn error_add_api_key_invalid_parameters_indexes() { assert_eq!(response, expected_response); } -#[ignore] -#[actix_rt::test] -async fn error_add_api_key_invalid_index_uid_format() { - let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); - - let content = json!({ - "description": "Indexing API key", - "indexes": ["inv@lid uid"], - "actions": ["documents.add"], - "expiresAt": "2050-11-13T00:00:00Z" - }); - let (response, code) = server.add_api_key(content).await; - assert_eq!(400, code, "{:?}", &response); - - let expected_response = json!({ - "message": "`inv@lid uid` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", - "code": "invalid_api_key_indexes", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" - }); - - assert_eq!(response, expected_response); -} - #[actix_rt::test] async fn error_add_api_key_invalid_parameters_actions() { let mut server = Server::new_auth().await; From c295924ea2d0832975a73e879e4fd43043680233 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 31 May 2022 15:31:16 +0200 Subject: [PATCH 085/185] Patch tests --- meilisearch-http/tests/auth/api_keys.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index 8e76cdad8..2d6f07d86 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -703,13 +703,15 @@ async fn list_api_keys() { "expiresAt": "2050-11-13T00:00:00Z" }, { - "description": "Default Search API Key (Use it to search from the frontend)", + "name": "Default Search API Key", + "description": "Use it to search from the frontend", "indexes": ["*"], "actions": ["search"], "expiresAt": serde_json::Value::Null, }, { - "description": "Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)", + "name": "Default Admin API Key", + "description": "Use it for all other than search operations. Caution! Do not expose it on a public frontend", "indexes": ["*"], "actions": ["*"], "expiresAt": serde_json::Value::Null, @@ -792,7 +794,7 @@ async fn delete_api_key() { assert_eq!(204, code, "{:?}", &response); // check if API key no longer exist. - let (_response, code) = server.get_api_key(&uid).await; + let (response, code) = server.get_api_key(&uid).await; assert_eq!(404, code, "{:?}", &response); } From 1816db8c1f9328af92336ed05e25f1b1b78ab07d Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 31 May 2022 15:39:09 +0200 Subject: [PATCH 086/185] Move dump v4 patcher into v4.rs --- meilisearch-auth/src/dump.rs | 30 +----------------- meilisearch-lib/src/dump/loaders/v4.rs | 44 +++++++++++++++++++------- 2 files changed, 34 insertions(+), 40 deletions(-) diff --git a/meilisearch-auth/src/dump.rs b/meilisearch-auth/src/dump.rs index 127e65280..7e607e574 100644 --- a/meilisearch-auth/src/dump.rs +++ b/meilisearch-auth/src/dump.rs @@ -1,11 +1,9 @@ use serde_json::Deserializer; -use serde_json::{Map, Value}; -use std::fs; + use std::fs::File; use std::io::BufReader; use std::io::Write; use std::path::Path; -use uuid::Uuid; use crate::{AuthController, HeedAuthStore, Result}; @@ -46,30 +44,4 @@ impl AuthController { Ok(()) } - - pub fn patch_dump_v4(src: impl AsRef, dst: impl AsRef) -> Result<()> { - let keys_file_src = src.as_ref().join(KEYS_PATH); - - if !keys_file_src.exists() { - return Ok(()); - } - - fs::create_dir_all(&dst)?; - let keys_file_dst = dst.as_ref().join(KEYS_PATH); - let mut writer = File::create(&keys_file_dst)?; - - let reader = BufReader::new(File::open(&keys_file_src)?); - for key in Deserializer::from_reader(reader).into_iter() { - let mut key: Map = key?; - - // generate a new uuid v4 and insert it in the key. - let uid = serde_json::to_value(Uuid::new_v4()).unwrap(); - key.insert("uid".to_string(), uid); - - serde_json::to_writer(&mut writer, &key)?; - writer.write_all(b"\n")?; - } - - Ok(()) - } } diff --git a/meilisearch-lib/src/dump/loaders/v4.rs b/meilisearch-lib/src/dump/loaders/v4.rs index 3caa7a9e9..0744df7ea 100644 --- a/meilisearch-lib/src/dump/loaders/v4.rs +++ b/meilisearch-lib/src/dump/loaders/v4.rs @@ -1,12 +1,12 @@ use std::fs::{self, create_dir_all, File}; -use std::io::Write; +use std::io::{BufReader, Write}; use std::path::Path; use fs_extra::dir::{self, CopyOptions}; use log::info; +use serde_json::{Deserializer, Map, Value}; use tempfile::tempdir; - -use meilisearch_auth::AuthController; +use uuid::Uuid; use crate::dump::{compat, Metadata}; use crate::options::IndexerOpts; @@ -26,14 +26,10 @@ pub fn load_dump( let options = CopyOptions::default(); // Indexes - dir::copy(src.as_ref().join("indexes"), patched_dir.path(), &options)?; + dir::copy(src.as_ref().join("indexes"), &patched_dir, &options)?; // Index uuids - dir::copy( - src.as_ref().join("index_uuids"), - patched_dir.path(), - &options, - )?; + dir::copy(src.as_ref().join("index_uuids"), &patched_dir, &options)?; // Metadata fs::copy( @@ -45,11 +41,11 @@ pub fn load_dump( patch_updates(&src, &patched_dir)?; // Keys - AuthController::patch_dump_v4(&src, patched_dir.path())?; + patch_keys(&src, &patched_dir)?; super::v5::load_dump( meta, - patched_dir.path(), + &patched_dir, dst, index_db_size, meta_env_size, @@ -79,3 +75,29 @@ fn patch_updates(src: impl AsRef, dst: impl AsRef) -> anyhow::Result Ok(()) } + +fn patch_keys(src: impl AsRef, dst: impl AsRef) -> anyhow::Result<()> { + let keys_file_src = src.as_ref().join("keys"); + + if !keys_file_src.exists() { + return Ok(()); + } + + fs::create_dir_all(&dst)?; + let keys_file_dst = dst.as_ref().join("keys"); + let mut writer = File::create(&keys_file_dst)?; + + let reader = BufReader::new(File::open(&keys_file_src)?); + for key in Deserializer::from_reader(reader).into_iter() { + let mut key: Map = key?; + + // generate a new uuid v4 and insert it in the key. + let uid = serde_json::to_value(Uuid::new_v4()).unwrap(); + key.insert("uid".to_string(), uid); + + serde_json::to_writer(&mut writer, &key)?; + writer.write_all(b"\n")?; + } + + Ok(()) +} From b2e2dc855896ee17051bca918df961346b4310b1 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 1 Jun 2022 11:47:44 +0200 Subject: [PATCH 087/185] Re-authorize master_key to access to all routes --- .../src/extractors/authentication/mod.rs | 2 +- meilisearch-http/tests/auth/authorization.rs | 31 ++----------------- 2 files changed, 3 insertions(+), 30 deletions(-) diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index a6384492c..99f972984 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -177,7 +177,7 @@ pub mod policies { // if master key is None only keys routes are inaccessible. if auth .get_master_key() - .map_or_else(|| !is_keys_action(A), |mk| mk == token && is_keys_action(A)) + .map_or_else(|| !is_keys_action(A), |mk| mk == token) { return Some(AuthFilter::default()); } diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 81c626215..fde4c61f3 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -188,41 +188,14 @@ async fn error_access_unauthorized_action() { } } -#[actix_rt::test] -#[cfg_attr(target_os = "windows", ignore)] -async fn error_access_master_key() { - let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); - - // master key must only have access to /keys - for ((method, route), _) in AUTHORIZATIONS - .iter() - .filter(|(_, action)| action.iter().all(|a| !a.starts_with("keys."))) - { - let (response, code) = server.dummy_request(method, route).await; - - assert_eq!( - response, - INVALID_RESPONSE.clone(), - "on route: {:?} - {:?}", - method, - route - ); - assert_eq!(403, code, "{:?}", &response); - } -} - #[actix_rt::test] #[cfg_attr(target_os = "windows", ignore)] async fn access_authorized_master_key() { let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); - // master key must only have access to /keys - for ((method, route), _) in AUTHORIZATIONS - .iter() - .filter(|(_, action)| action.iter().any(|a| a.starts_with("keys."))) - { + // master key must have access to all routes. + for ((method, route), _) in AUTHORIZATIONS.iter() { let (response, code) = server.dummy_request(method, route).await; assert_ne!( From 94b32cce01b579ceb2bb22d12a87ee511b0bbcc0 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 1 Jun 2022 14:11:56 +0200 Subject: [PATCH 088/185] Patch errors --- meilisearch-auth/src/error.rs | 7 +- meilisearch-auth/src/key.rs | 24 +++++ meilisearch-error/src/lib.rs | 2 + meilisearch-http/tests/auth/api_keys.rs | 101 ++++--------------- meilisearch-http/tests/auth/authorization.rs | 25 ++--- 5 files changed, 59 insertions(+), 100 deletions(-) diff --git a/meilisearch-auth/src/error.rs b/meilisearch-auth/src/error.rs index dc6301348..dbf28b421 100644 --- a/meilisearch-auth/src/error.rs +++ b/meilisearch-auth/src/error.rs @@ -22,12 +22,14 @@ pub enum AuthControllerError { "`name` field value `{0}` is invalid. It should be a string or specified as a null value." )] InvalidApiKeyName(Value), - #[error("`uid` field value `{0}` is invalid. It should be a valid uuidv4 string or ommited.")] + #[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")] InvalidApiKeyUid(Value), #[error("API key `{0}` not found.")] ApiKeyNotFound(String), - #[error("`uid` field value `{0}` already exists for an API key.")] + #[error("`uid` field value `{0}` is already an existing API key.")] ApiKeyAlreadyExists(String), + #[error("`{0}` field cannot be modified for the given resource.")] + ImmutableField(String), #[error("Internal error: {0}")] Internal(Box), } @@ -51,6 +53,7 @@ impl ErrorCode for AuthControllerError { Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound, Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid, Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists, + Self::ImmutableField(_) => Code::ImmutableField, Self::Internal(_) => Code::Internal, } } diff --git a/meilisearch-auth/src/key.rs b/meilisearch-auth/src/key.rs index baac68637..f6ff7096c 100644 --- a/meilisearch-auth/src/key.rs +++ b/meilisearch-auth/src/key.rs @@ -99,6 +99,30 @@ impl Key { self.name = des?; } + if value.get("uid").is_some() { + return Err(AuthControllerError::ImmutableField("uid".to_string())); + } + + if value.get("actions").is_some() { + return Err(AuthControllerError::ImmutableField("actions".to_string())); + } + + if value.get("indexes").is_some() { + return Err(AuthControllerError::ImmutableField("indexes".to_string())); + } + + if value.get("expiresAt").is_some() { + return Err(AuthControllerError::ImmutableField("expiresAt".to_string())); + } + + if value.get("createdAt").is_some() { + return Err(AuthControllerError::ImmutableField("createdAt".to_string())); + } + + if value.get("updatedAt").is_some() { + return Err(AuthControllerError::ImmutableField("updatedAt".to_string())); + } + self.updated_at = OffsetDateTime::now_utc(); Ok(()) diff --git a/meilisearch-error/src/lib.rs b/meilisearch-error/src/lib.rs index 57882f8e0..6e6273db2 100644 --- a/meilisearch-error/src/lib.rs +++ b/meilisearch-error/src/lib.rs @@ -168,6 +168,7 @@ pub enum Code { InvalidApiKeyDescription, InvalidApiKeyName, InvalidApiKeyUid, + ImmutableField, ApiKeyAlreadyExists, } @@ -278,6 +279,7 @@ impl Code { InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST), InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST), ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT), + ImmutableField => ErrCode::invalid("immutable_field", StatusCode::BAD_REQUEST), InvalidMinWordLengthForTypo => { ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST) } diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index 2d6f07d86..a9f2bf91d 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -470,7 +470,7 @@ async fn error_add_api_key_invalid_parameters_uid() { assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ - "message": r#"`uid` field value `"aaaaabbbbbccc"` is invalid. It should be a valid uuidv4 string or ommited."#, + "message": r#"`uid` field value `"aaaaabbbbbccc"` is invalid. It should be a valid UUID v4 string or omitted."#, "code": "invalid_api_key_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_api_key_uid" @@ -499,7 +499,7 @@ async fn error_add_api_key_parameters_uid_already_exist() { assert_eq!(409, code, "{:?}", &response); let expected_response = json!({ - "message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` already exists for an API key.", + "message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.", "code": "api_key_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#api_key_already_exists" @@ -1112,7 +1112,7 @@ async fn patch_api_key_name() { } #[actix_rt::test] -async fn patch_api_key_indexes_unchanged() { +async fn error_patch_api_key_indexes() { let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); @@ -1143,44 +1143,24 @@ async fn patch_api_key_indexes_unchanged() { assert!(response["updatedAt"].is_string()); let uid = response["uid"].as_str().unwrap(); - let created_at = response["createdAt"].as_str().unwrap(); - let updated_at = response["updatedAt"].as_str().unwrap(); let content = json!({ "indexes": ["products", "prices"] }); thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&uid, content).await; - assert_eq!(200, code, "{:?}", &response); - assert!(response["key"].is_string()); - assert!(response["expiresAt"].is_string()); - assert!(response["createdAt"].is_string()); - assert_ne!(response["updatedAt"].as_str().unwrap(), updated_at); - assert_eq!(response["createdAt"].as_str().unwrap(), created_at); + assert_eq!(400, code, "{:?}", &response); - let expected = json!({ - "description": "Indexing API key", - "indexes": ["products"], - "actions": [ - "search", - "documents.add", - "documents.get", - "documents.delete", - "indexes.create", - "indexes.get", - "indexes.update", - "indexes.delete", - "stats.get", - "dumps.create", - "dumps.get" - ], - "expiresAt": "2050-11-13T00:00:00Z" + let expected = json!({"message": "`indexes` field cannot be modified for the given resource.", + "code": "immutable_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_field" }); assert_json_include!(actual: response, expected: expected); } #[actix_rt::test] -async fn patch_api_key_actions_unchanged() { +async fn error_patch_api_key_actions() { let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); @@ -1212,9 +1192,6 @@ async fn patch_api_key_actions_unchanged() { let uid = response["uid"].as_str().unwrap(); - let created_at = response["createdAt"].as_str().unwrap(); - let updated_at = response["updatedAt"].as_str().unwrap(); - let content = json!({ "actions": [ "search", @@ -1227,37 +1204,19 @@ async fn patch_api_key_actions_unchanged() { thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&uid, content).await; - assert_eq!(200, code, "{:?}", &response); - assert!(response["key"].is_string()); - assert!(response["expiresAt"].is_string()); - assert!(response["createdAt"].is_string()); - assert_ne!(response["updatedAt"].as_str().unwrap(), updated_at); - assert_eq!(response["createdAt"].as_str().unwrap(), created_at); + assert_eq!(400, code, "{:?}", &response); - let expected = json!({ - "description": "Indexing API key", - "indexes": ["products"], - "actions": [ - "search", - "documents.add", - "documents.get", - "documents.delete", - "indexes.create", - "indexes.get", - "indexes.update", - "indexes.delete", - "stats.get", - "dumps.create", - "dumps.get" - ], - "expiresAt": "2050-11-13T00:00:00Z" + let expected = json!({"message": "`actions` field cannot be modified for the given resource.", + "code": "immutable_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_field" }); assert_json_include!(actual: response, expected: expected); } #[actix_rt::test] -async fn patch_api_key_expiration_date_unchanged() { +async fn error_patch_api_key_expiration_date() { let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); @@ -1288,37 +1247,17 @@ async fn patch_api_key_expiration_date_unchanged() { assert!(response["updatedAt"].is_string()); let uid = response["uid"].as_str().unwrap(); - let created_at = response["createdAt"].as_str().unwrap(); - let updated_at = response["updatedAt"].as_str().unwrap(); let content = json!({ "expiresAt": "2055-11-13T00:00:00Z" }); thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&uid, content).await; - assert_eq!(200, code, "{:?}", &response); - assert!(response["key"].is_string()); - assert!(response["expiresAt"].is_string()); - assert!(response["createdAt"].is_string()); - assert_ne!(response["updatedAt"].as_str().unwrap(), updated_at); - assert_eq!(response["createdAt"].as_str().unwrap(), created_at); + assert_eq!(400, code, "{:?}", &response); - let expected = json!({ - "description": "Indexing API key", - "indexes": ["products"], - "actions": [ - "search", - "documents.add", - "documents.get", - "documents.delete", - "indexes.create", - "indexes.get", - "indexes.update", - "indexes.delete", - "stats.get", - "dumps.create", - "dumps.get" - ], - "expiresAt": "2050-11-13T00:00:00Z" + let expected = json!({"message": "`expiresAt` field cannot be modified for the given resource.", + "code": "immutable_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_field" }); assert_json_include!(actual: response, expected: expected); diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index fde4c61f3..2080e2990 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -149,31 +149,22 @@ async fn error_access_unauthorized_index() { #[cfg_attr(target_os = "windows", ignore)] async fn error_access_unauthorized_action() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); - - let content = json!({ - "indexes": ["products"], - "actions": [], - "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), - }); - - let (response, code) = server.add_api_key(content).await; - assert_eq!(201, code, "{:?}", &response); - assert!(response["key"].is_string()); - - let key = response["key"].as_str().unwrap(); - server.use_api_key(&key); for ((method, route), action) in AUTHORIZATIONS.iter() { + // create a new API key letting only the needed action. server.use_api_key("MASTER_KEY"); - // Patch API key letting all rights but the needed one. let content = json!({ + "indexes": ["products"], "actions": ALL_ACTIONS.difference(action).collect::>(), + "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); - let (response, code) = server.patch_api_key(&key, content).await; - assert_eq!(200, code, "{:?}", &response); + let (response, code) = server.add_api_key(content).await; + assert_eq!(201, code, "{:?}", &response); + assert!(response["key"].is_string()); + + let key = response["key"].as_str().unwrap(); server.use_api_key(&key); let (response, code) = server.dummy_request(method, route).await; From 7652295d2c9675da2b3b5297b115478739aff950 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 1 Jun 2022 15:50:13 +0200 Subject: [PATCH 089/185] Encode key in base64 instead of hexa --- Cargo.lock | 1 + meilisearch-auth/Cargo.toml | 1 + meilisearch-auth/src/store.rs | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index f48e6c59d..b72913520 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1973,6 +1973,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" name = "meilisearch-auth" version = "0.27.1" dependencies = [ + "base64", "enum-iterator", "meilisearch-error", "milli", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 7ffa072e8..dafeeef05 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -11,6 +11,7 @@ rand = "0.8.4" serde = { version = "1.0.136", features = ["derive"] } serde_json = { version = "1.0.79", features = ["preserve_order"] } sha2 = "0.10.2" +base64 = "0.13.0" thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } uuid = { version = "0.8.2", features = ["serde", "v4"] } diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 762e707bc..64cf49544 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -238,7 +238,7 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec { pub fn generate_key(uid: &[u8], master_key: &[u8]) -> String { let key = [uid, master_key].concat(); let sha = Sha256::digest(&key); - format!("{:x}", sha) + base64::encode_config(sha, base64::URL_SAFE_NO_PAD) } /// Divides one slice into two at an index, returns `None` if mid is out of bounds. From 0656df3a6d9c71822d21205e93061ab5aaf9f697 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 1 Jun 2022 17:14:13 +0200 Subject: [PATCH 090/185] Fix the dumps tests --- meilisearch-http/tests/dumps/mod.rs | 24 ++++++------------------ 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 2f8938d28..c26b0e06e 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -68,7 +68,7 @@ async fn import_dump_v2_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "after": null }) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -132,7 +132,7 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "after": null }) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -198,10 +198,6 @@ async fn import_dump_v2_rubygems_with_settings() { tasks["results"][0], json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); - assert_eq!( - tasks["results"][92], - json!({"uid": 0, "indexUid": "rubygems", "status": "succeeded", "type": "settingsUpdate", "details": {"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "rankingRules": ["typo", "words", "desc(fame)", "proximity", "attribute", "exactness", "desc(total_downloads)"]}, "duration": "PT0.008886S", "enqueuedAt": "2021-09-08T08:40:28.660188Z", "startedAt": "2021-09-08T08:40:28.660766Z", "finishedAt": "2021-09-08T08:40:28.669652Z"}) - ); // finally we're just going to check that we can still get a few documents by id let (document, code) = index.get_document(188040, None).await; @@ -264,7 +260,7 @@ async fn import_dump_v3_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "after": null }) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -328,7 +324,7 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "after": null }) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) ); // finally we're just going to check that we can["results"] still get a few documents by id @@ -394,10 +390,6 @@ async fn import_dump_v3_rubygems_with_settings() { tasks["results"][0], json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); - assert_eq!( - tasks["results"][92], - json!({"uid": 0, "indexUid": "rubygems", "status": "succeeded", "type": "settingsUpdate", "details": {"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "rankingRules": ["typo", "words", "desc(fame)", "proximity", "attribute", "exactness", "desc(total_downloads)"]}, "duration": "PT0.008886S", "enqueuedAt": "2021-09-08T08:40:28.660188Z", "startedAt": "2021-09-08T08:40:28.660766Z", "finishedAt": "2021-09-08T08:40:28.669652Z"}) - ); // finally we're just going to check that we can still get a few documents by id let (document, code) = index.get_document(188040, None).await; @@ -460,7 +452,7 @@ async fn import_dump_v4_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "after": null }) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -524,7 +516,7 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "after": null }) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -590,10 +582,6 @@ async fn import_dump_v4_rubygems_with_settings() { tasks["results"][0], json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); - assert_eq!( - tasks["results"][92], - json!({ "uid": 0, "indexUid": "rubygems", "status": "succeeded", "type": "settingsUpdate", "details": {"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "rankingRules": ["typo", "words", "desc(fame)", "proximity", "attribute", "exactness", "desc(total_downloads)"]}, "duration": "PT0.008886S", "enqueuedAt": "2021-09-08T08:40:28.660188Z", "startedAt": "2021-09-08T08:40:28.660766Z", "finishedAt": "2021-09-08T08:40:28.669652Z"}) - ); // finally we're just going to check that we can still get a few documents by id let (document, code) = index.get_document(188040, None).await; From df721b2e9eef60064b55622b47bd9af459d3c11c Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 1 Jun 2022 17:16:15 +0200 Subject: [PATCH 091/185] Scheduler must not reverse the order of the fetched tasks --- meilisearch-lib/src/tasks/scheduler.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index dddb6dff9..36534f358 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -346,8 +346,6 @@ impl Scheduler { .fetch_unfinished_tasks(Some(self.next_fetched_task_id)) .await? .into_iter() - // The tasks arrive in reverse order, and we need to insert them in order. - .rev() .for_each(|t| { self.next_fetched_task_id = t.id + 1; self.register_task(t); From e769043576a970d7e589548a90a1a2596eb40760 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 1 Jun 2022 17:59:01 +0200 Subject: [PATCH 092/185] Bump the dependencies --- Cargo.lock | 384 ++++++++++++++++++++++++++--------------------------- 1 file changed, 189 insertions(+), 195 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 39eb78987..998dbd1b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -59,7 +59,7 @@ dependencies = [ "http", "httparse", "httpdate", - "itoa 1.0.1", + "itoa 1.0.2", "language-tags", "local-channel", "log", @@ -78,7 +78,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" dependencies = [ "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -188,7 +188,7 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "itoa 1.0.1", + "itoa 1.0.2", "language-tags", "log", "mime", @@ -211,9 +211,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7525bedf54704abb1d469e88d7e7e9226df73778798a69cea5022d53b2ae91bc" dependencies = [ "actix-router", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -318,9 +318,9 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -329,9 +329,9 @@ version = "0.1.53" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed6aa3524a2dfcf9fe180c51eae2b58738348d819517ceadf95789c51fff7600" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -544,9 +544,9 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562e382481975bc61d11275ac5e62a19abd00b0547d99516a415336f183dcd0e" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -659,9 +659,9 @@ checksum = "fff857943da45f546682664a79488be82e69e43c1a7a2307679ab9afb3a66d2e" [[package]] name = "clap" -version = "3.1.12" +version = "3.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c167e37342afc5f33fd87bbc870cedd020d2a6dffa05d45ccd9241fbdd146db" +checksum = "d2dbdf4bdacb33466e854ce889eee8dfd5729abf7ccd7664d0a2d60cd384440b" dependencies = [ "atty", "bitflags", @@ -676,22 +676,22 @@ dependencies = [ [[package]] name = "clap_derive" -version = "3.1.7" +version = "3.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3aab4734e083b809aaf5794e14e756d1c798d2c69c7f7de7a09a2f5214993c1" +checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c" dependencies = [ "heck", "proc-macro-error", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] name = "clap_lex" -version = "0.1.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "189ddd3b5d32a70b35e7686054371742a937b0d99128e76dde6340210e966669" +checksum = "a37c35f1112dad5e6e0b1adaff798507497a18fceeb30cceb3bae7d1427b9213" dependencies = [ "os_str_bytes", ] @@ -702,9 +702,9 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -732,9 +732,9 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cortex-m" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ff967e867ca14eba0c34ac25cd71ea98c678e741e3915d923999bb2fe7c826" +checksum = "cd20d4ac4aa86f4f75f239d59e542ef67de87cce2c282818dc6e84155d3ea126" dependencies = [ "bare-metal 0.2.5", "bitfield", @@ -889,9 +889,9 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -901,10 +901,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", "rustc_version 0.4.0", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -1060,9 +1060,9 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -1110,19 +1110,17 @@ dependencies = [ [[package]] name = "firestorm" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d3d6188b8804df28032815ea256b6955c9625c24da7525f387a7af02fbb8f01" +checksum = "2c5f6c2c942da57e2aaaa84b8a521489486f14e75e7fa91dab70aba913975f98" [[package]] name = "flate2" -version = "1.0.23" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af" +checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" dependencies = [ - "cfg-if 1.0.0", "crc32fast", - "libc", "miniz_oxide", ] @@ -1231,9 +1229,9 @@ version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -1309,9 +1307,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e45727250e75cc04ff2846a66397da8ef2b3db8e40e0cef4df67950a07621eb9" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -1322,9 +1320,9 @@ checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" [[package]] name = "git2" -version = "0.14.3" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e77a14ffc6ba4ad5188d6cf428894c4fcfda725326b37558f35bb677e712cec" +checksum = "d0155506aab710a86160ddb504a480d2964d7ab5b9e62419be69e0032bc5931c" dependencies = [ "bitflags", "libc", @@ -1341,9 +1339,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "grenad" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d69e46e7b225459e2e0272707d167d7dcaaac89307a848326df6b30ec432151" +checksum = "3e8454188b8caee0627ff58636048963b6abd07e5862b4c9a8f9cfd349d50c26" dependencies = [ "bytemuck", "byteorder", @@ -1396,12 +1394,13 @@ checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" [[package]] name = "heapless" -version = "0.7.10" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d076121838e03f862871315477528debffdb7462fb229216ecef91b1a3eb31eb" +checksum = "8a08e755adbc0ad283725b29f4a4883deee15336f372d5f61fae59efec40f983" dependencies = [ "atomic-polyfill", "hash32", + "rustc_version 0.4.0", "spin 0.9.3", "stable_deref_trait", ] @@ -1463,20 +1462,20 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "http" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" +checksum = "ff8670570af52249509a86f5e3e18a08c60b177071826898fde8997cf5f6bfbb" dependencies = [ "bytes", "fnv", - "itoa 1.0.1", + "itoa 1.0.2", ] [[package]] name = "http-body" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ "bytes", "http", @@ -1503,9 +1502,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.18" +version = "0.14.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b26ae0a80afebe130861d90abf98e3814a4f28a4c6ffeb5ab8ebb2be311e0ef2" +checksum = "42dc3c131584288d375f2d07f822b0cb012d8c6fb899a5b9fdb3cb7eb9b6004f" dependencies = [ "bytes", "futures-channel", @@ -1516,7 +1515,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 1.0.1", + "itoa 1.0.2", "pin-project-lite", "socket2", "tokio", @@ -1551,9 +1550,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee" +checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a" dependencies = [ "autocfg", "hashbrown 0.11.2", @@ -1592,9 +1591,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] name = "itoa" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" +checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" [[package]] name = "jieba-rs" @@ -1674,15 +1673,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.124" +version = "0.2.126" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50" +checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" [[package]] name = "libgit2-sys" -version = "0.13.3+1.4.2" +version = "0.13.4+1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c24d36c3ac9b9996a2418d6bf428cc0bc5d1a814a84303fc60986088c5ed60de" +checksum = "d0fa6563431ede25f5cc7f6d803c6afbc1c5d3ad3d4925d12c882bf2b526f5d1" dependencies = [ "cc", "libc", @@ -1698,9 +1697,9 @@ checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" [[package]] name = "libz-sys" -version = "1.1.6" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e7e15d7610cce1d9752e137625f14e61a28cd45929b6e12e47b50fe154ee2e" +checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf" dependencies = [ "cc", "libc", @@ -1878,9 +1877,9 @@ dependencies = [ [[package]] name = "local-channel" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6246c68cf195087205a0512559c97e15eaf95198bf0e206d662092cdcb03fe9f" +checksum = "7f303ec0e94c6c54447f84f3b0ef7af769858a9c4ef56ef2a986d3dcd4c3fc9c" dependencies = [ "futures-core", "futures-sink", @@ -1890,9 +1889,9 @@ dependencies = [ [[package]] name = "local-waker" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "902eb695eb0591864543cbfbf6d742510642a605a61fc5e97fe6ceb5a30ac4fb" +checksum = "e34f76eb3611940e0e7d53a9aaa4e6a3151f69541a282fd0dad5571420c53ff1" [[package]] name = "lock_api" @@ -1906,9 +1905,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.16" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if 1.0.0", ] @@ -1930,9 +1929,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81" dependencies = [ "log", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -1947,14 +1946,14 @@ dependencies = [ [[package]] name = "manifest-dir-macros" -version = "0.1.14" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b60d42baa153de5741281aa3d8a0ec1866777902f8162c04ce7b7c1e31415e8f" +checksum = "f08150cf2bab1fc47c2196f4f41173a27fcd0f684165e5458c0046b53a472e2f" dependencies = [ "once_cell", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -2150,9 +2149,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" @@ -2241,41 +2240,30 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.5.1" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082" +checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" dependencies = [ "adler", ] [[package]] name = "mio" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52da4364ffb0e4fe33a9841a98a3f3014fb964045ce4f7a45a398243c8d6b0c9" +checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799" dependencies = [ "libc", "log", - "miow", - "ntapi", "wasi 0.11.0+wasi-snapshot-preview1", - "winapi", -] - -[[package]] -name = "miow" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" -dependencies = [ - "winapi", + "windows-sys", ] [[package]] name = "mockall" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d4d70639a72f972725db16350db56da68266ca368b2a1fe26724a903ad3d6b8" +checksum = "5641e476bbaf592a3939a7485fa079f427b4db21407d5ebfd5bba4e07a1f6f4c" dependencies = [ "cfg-if 1.0.0", "downcast", @@ -2288,14 +2276,14 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79ef208208a0dea3f72221e26e904cdc6db2e481d9ade89081ddd494f1dbaa6b" +checksum = "262d56735932ee0240d515656e5a7667af3af2a5b0af4da558c4cff2b2aeb0c7" dependencies = [ "cfg-if 1.0.0", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -2367,9 +2355,9 @@ dependencies = [ [[package]] name = "num-integer" -version = "0.1.44" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" dependencies = [ "autocfg", "num-traits", @@ -2377,9 +2365,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", "libm", @@ -2397,18 +2385,18 @@ dependencies = [ [[package]] name = "num_threads" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aba1801fb138d8e85e11d0fc70baf4fe1cdfffda7c6cd34a854905df588e5ed0" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" dependencies = [ "libc", ] [[package]] name = "object" -version = "0.28.3" +version = "0.28.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40bec70ba014595f99f7aa110b84331ffe1ee9aece7fe6f387cc7e3ecda4d456" +checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424" dependencies = [ "memchr", ] @@ -2421,9 +2409,9 @@ checksum = "f69e48cd7c8e5bb52a1da1287fdbfd877c32673176583ce664cd63b201aba385" [[package]] name = "once_cell" -version = "1.10.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9" +checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" [[package]] name = "ordered-float" @@ -2436,9 +2424,9 @@ dependencies = [ [[package]] name = "os_str_bytes" -version = "6.0.0" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" +checksum = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa" [[package]] name = "page_size" @@ -2452,9 +2440,9 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", "parking_lot_core", @@ -2462,9 +2450,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "995f667a6c822200b0433ac218e05582f0e2efa1b922a3fd2fbaadc5f87bab37" +checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" dependencies = [ "cfg-if 1.0.0", "libc", @@ -2625,9 +2613,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", "version_check", ] @@ -2637,7 +2625,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", "version_check", ] @@ -2653,11 +2641,11 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.37" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec757218438d5fda206afc041538b2f6d889286160d649a86a24d37e1235afd1" +checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f" dependencies = [ - "unicode-xid 0.2.2", + "unicode-ident", ] [[package]] @@ -2727,7 +2715,7 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", ] [[package]] @@ -2771,9 +2759,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.5.2" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd249e82c21598a9a426a4e00dd7adc1d640b22445ec8545feef801d1a74c221" +checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" dependencies = [ "autocfg", "crossbeam-deque", @@ -2783,9 +2771,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f51245e1e62e1f1629cbfec37b5793bbabcaeb90f30e94d2ba03564687353e4" +checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -2815,9 +2803,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.5" +version = "1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" +checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" dependencies = [ "aho-corasick", "memchr", @@ -2832,9 +2820,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.6.25" +version = "0.6.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" +checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" [[package]] name = "remove_dir_all" @@ -2885,9 +2873,9 @@ dependencies = [ [[package]] name = "retain_mut" -version = "0.1.7" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c31b5c4033f8fdde8700e4657be2c497e7288f01515be52168c631e2e4d4086" +checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" [[package]] name = "ring" @@ -2969,14 +2957,14 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.7", + "semver 1.0.9", ] [[package]] name = "rustls" -version = "0.20.4" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fbfeb8d0ddb84706bc597a5574ab8912817c52a397f819e5b614e2265206921" +checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033" dependencies = [ "log", "ring", @@ -3013,9 +3001,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" +checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" [[package]] name = "same-file" @@ -3067,9 +3055,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.7" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d65bd28f48be7196d222d95b9243287f48d27aca604e08497513019ff0502cc4" +checksum = "8cb243bdfdb5936c8dc3c45762a19d12ab4550cdc753bc247637d4ec35a040fd" [[package]] name = "semver-parser" @@ -3079,9 +3067,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.136" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" dependencies = [ "serde_derive", ] @@ -3097,23 +3085,23 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.136" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] name = "serde_json" -version = "1.0.79" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" +checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" dependencies = [ "indexmap", - "itoa 1.0.1", + "itoa 1.0.2", "ryu", "serde", ] @@ -3135,7 +3123,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.1", + "itoa 1.0.2", "ryu", "serde", ] @@ -3295,13 +3283,13 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.91" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b683b2b825c8eef438b77c36a06dc262294da3d5a5813fac20da149241dcd44d" +checksum = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "unicode-xid 0.2.2", + "unicode-ident", ] [[package]] @@ -3319,17 +3307,17 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", - "unicode-xid 0.2.2", + "syn 1.0.95", + "unicode-xid 0.2.3", ] [[package]] name = "sysinfo" -version = "0.23.10" +version = "0.23.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eea2ed6847da2e0c7289f72cb4f285f0bd704694ca067d32be811b2a45ea858" +checksum = "3977ec2e0520829be45c8a2df70db2bf364714d8a748316a10c3c35d4d2b01c9" dependencies = [ "cfg-if 1.0.0", "core-foundation-sys", @@ -3401,9 +3389,9 @@ version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -3443,7 +3431,7 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd" dependencies = [ - "itoa 1.0.1", + "itoa 1.0.2", "libc", "num_threads", "quickcheck", @@ -3474,9 +3462,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.18.0" +version = "1.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f48b6d60512a392e34dbf7fd456249fd2de3c83669ab642e021903f4015185b" +checksum = "4903bf0427cf68dddd5aa6a93220756f8be0c34fcfa9f5e6191e103e15a31395" dependencies = [ "bytes", "libc", @@ -3498,16 +3486,16 @@ version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] name = "tokio-rustls" -version = "0.23.3" +version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4151fda0cf2798550ad0b34bcfc9b9dcc2a9d2471c895c68f3a8818e54f2389e" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ "rustls", "tokio", @@ -3527,9 +3515,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0edfdeb067411dba2044da6d1cb2df793dd35add7888d73c16e3381ded401764" +checksum = "f988a1a1adc2fb21f9c12aa96441da33a1728193ae0b95d2be22dbd17fcb4e5c" dependencies = [ "bytes", "futures-core", @@ -3573,9 +3561,9 @@ version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", ] [[package]] @@ -3614,6 +3602,12 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" +[[package]] +name = "unicode-ident" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee" + [[package]] name = "unicode-normalization" version = "0.1.19" @@ -3637,9 +3631,9 @@ checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" [[package]] name = "unicode-xid" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" [[package]] name = "untrusted" @@ -3711,9 +3705,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vergen" -version = "7.0.0" +version = "7.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4db743914c971db162f35bf46601c5a63ec4452e61461937b4c1ab817a60c12e" +checksum = "626fd028e124b3ee607632d92ba99b5a5a086cfd404ede4af6c19ecd9b75a02d" dependencies = [ "anyhow", "cfg-if 1.0.0", @@ -3807,9 +3801,9 @@ dependencies = [ "bumpalo", "lazy_static", "log", - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", "wasm-bindgen-shared", ] @@ -3841,9 +3835,9 @@ version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b" dependencies = [ - "proc-macro2 1.0.37", + "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.91", + "syn 1.0.95", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3935,9 +3929,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-sys" -version = "0.34.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5acdd78cb4ba54c0045ac14f62d8f94a03d10047904ae2a40afa1e99d8f70825" +checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" dependencies = [ "windows_aarch64_msvc", "windows_i686_gnu", @@ -3948,33 +3942,33 @@ dependencies = [ [[package]] name = "windows_aarch64_msvc" -version = "0.34.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17cffbe740121affb56fad0fc0e421804adf0ae00891205213b5cecd30db881d" +checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" [[package]] name = "windows_i686_gnu" -version = "0.34.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2564fde759adb79129d9b4f54be42b32c89970c18ebf93124ca8870a498688ed" +checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" [[package]] name = "windows_i686_msvc" -version = "0.34.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cd9d32ba70453522332c14d38814bceeb747d80b3958676007acadd7e166956" +checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" [[package]] name = "windows_x86_64_gnu" -version = "0.34.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfce6deae227ee8d356d19effc141a509cc503dfd1f850622ec4b0f84428e1f4" +checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" [[package]] name = "windows_x86_64_msvc" -version = "0.34.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19538ccc21819d01deaf88d6a17eae6596a12e9aafdbb97916fb49896d89de9" +checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" [[package]] name = "winreg" @@ -3987,9 +3981,9 @@ dependencies = [ [[package]] name = "xattr" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "244c3741f4240ef46274860397c7c74e50eb23624996930e484c16679633a54c" +checksum = "6d1526bbe5aaeb5eb06885f4d987bcdfa5e23187055de9b83fe00156a821fabc" dependencies = [ "libc", ] @@ -4016,8 +4010,8 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb" dependencies = [ - "proc-macro2 1.0.37", - "syn 1.0.91", + "proc-macro2 1.0.39", + "syn 1.0.95", "synstructure", ] From 4512eed8f53590397dd23695010ae03168ed58ee Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 1 Jun 2022 18:06:20 +0200 Subject: [PATCH 093/185] Fix PR comments --- meilisearch-auth/Cargo.toml | 2 +- meilisearch-auth/src/key.rs | 2 +- meilisearch-auth/src/lib.rs | 19 ++++++++++--------- meilisearch-auth/src/store.rs | 12 +++++++++--- .../src/extractors/authentication/mod.rs | 2 +- meilisearch-http/src/routes/api_key.rs | 9 ++++++--- meilisearch-http/tests/auth/api_keys.rs | 2 +- 7 files changed, 29 insertions(+), 19 deletions(-) diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index dafeeef05..29fa78a14 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -4,6 +4,7 @@ version = "0.27.1" edition = "2021" [dependencies] +base64 = "0.13.0" enum-iterator = "0.7.0" meilisearch-error = { path = "../meilisearch-error" } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.28.0" } @@ -11,7 +12,6 @@ rand = "0.8.4" serde = { version = "1.0.136", features = ["derive"] } serde_json = { version = "1.0.79", features = ["preserve_order"] } sha2 = "0.10.2" -base64 = "0.13.0" thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } uuid = { version = "0.8.2", features = ["serde", "v4"] } diff --git a/meilisearch-auth/src/key.rs b/meilisearch-auth/src/key.rs index f6ff7096c..0e336a7db 100644 --- a/meilisearch-auth/src/key.rs +++ b/meilisearch-auth/src/key.rs @@ -133,7 +133,7 @@ impl Key { let uid = Uuid::new_v4(); Self { name: Some("Default Admin API Key".to_string()), - description: Some("Use it for all other than search operations. Caution! Do not expose it on a public frontend".to_string()), + description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()), uid, actions: vec![Action::All], indexes: vec!["*".to_string()], diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 578093abf..e41fd92f4 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -4,20 +4,19 @@ pub mod error; mod key; mod store; -use crate::store::generate_key; use std::collections::{HashMap, HashSet}; use std::path::Path; - use std::sync::Arc; -use uuid::Uuid; use serde::{Deserialize, Serialize}; use serde_json::Value; use time::OffsetDateTime; +use uuid::Uuid; pub use action::{actions, Action}; use error::{AuthControllerError, Result}; pub use key::Key; +use store::generate_key_as_base64; pub use store::open_auth_store_env; use store::HeedAuthStore; @@ -63,16 +62,18 @@ impl AuthController { .ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string())) } - pub fn get_optional_uid_from_sha(&self, sha: &[u8]) -> Result> { + pub fn get_optional_uid_from_encoded_key(&self, encoded_key: &[u8]) -> Result> { match &self.master_key { - Some(master_key) => self.store.get_uid_from_sha(sha, master_key.as_bytes()), + Some(master_key) => self + .store + .get_uid_from_encoded_key(encoded_key, master_key.as_bytes()), None => Ok(None), } } - pub fn get_uid_from_sha(&self, sha: &str) -> Result { - self.get_optional_uid_from_sha(sha.as_bytes())? - .ok_or_else(|| AuthControllerError::ApiKeyNotFound(sha.to_string())) + pub fn get_uid_from_encoded_key(&self, encoded_key: &str) -> Result { + self.get_optional_uid_from_encoded_key(encoded_key.as_bytes())? + .ok_or_else(|| AuthControllerError::ApiKeyNotFound(encoded_key.to_string())) } pub fn get_key_filters( @@ -134,7 +135,7 @@ impl AuthController { pub fn generate_key(&self, uid: Uuid) -> Option { self.master_key .as_ref() - .map(|master_key| generate_key(uid.as_bytes(), master_key.as_bytes())) + .map(|master_key| generate_key_as_base64(uid.as_bytes(), master_key.as_bytes())) } /// Check if the provided key is authorized to make a specific action diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 64cf49544..69c4cbd57 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -118,14 +118,20 @@ impl HeedAuthStore { self.keys.get(&rtxn, uid.as_bytes()).map_err(|e| e.into()) } - pub fn get_uid_from_sha(&self, key_sha: &[u8], master_key: &[u8]) -> Result> { + pub fn get_uid_from_encoded_key( + &self, + encoded_key: &[u8], + master_key: &[u8], + ) -> Result> { let rtxn = self.env.read_txn()?; let uid = self .keys .remap_data_type::() .iter(&rtxn)? .filter_map(|res| match res { - Ok((uid, _)) if generate_key(uid, master_key).as_bytes() == key_sha => { + Ok((uid, _)) + if generate_key_as_base64(uid, master_key).as_bytes() == encoded_key => + { let (uid, _) = try_split_array_at(uid)?; Some(Uuid::from_bytes(*uid)) } @@ -235,7 +241,7 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec { } } -pub fn generate_key(uid: &[u8], master_key: &[u8]) -> String { +pub fn generate_key_as_base64(uid: &[u8], master_key: &[u8]) -> String { let key = [uid, master_key].concat(); let sha = Sha256::digest(&key); base64::encode_config(sha, base64::URL_SAFE_NO_PAD) diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index 99f972984..7732bd7fa 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -188,7 +188,7 @@ pub mod policies { return Some(filters); } else if let Some(action) = Action::from_repr(A) { // API key - if let Ok(Some(uid)) = auth.get_optional_uid_from_sha(token.as_bytes()) { + if let Ok(Some(uid)) = auth.get_optional_uid_from_encoded_key(token.as_bytes()) { if let Ok(true) = auth.is_key_authorized(uid, action, index) { return auth.get_key_filters(uid, None).ok(); } diff --git a/meilisearch-http/src/routes/api_key.rs b/meilisearch-http/src/routes/api_key.rs index cfe81b301..831a350d8 100644 --- a/meilisearch-http/src/routes/api_key.rs +++ b/meilisearch-http/src/routes/api_key.rs @@ -69,7 +69,8 @@ pub async fn get_api_key( let key = path.into_inner().key; let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { - let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_sha(&key))?; + let uid = + Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?; let key = auth_controller.get_key(uid)?; Ok(KeyView::from_key(key, &auth_controller)) @@ -88,7 +89,8 @@ pub async fn patch_api_key( let key = path.into_inner().key; let body = body.into_inner(); let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { - let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_sha(&key))?; + let uid = + Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?; let key = auth_controller.update_key(uid, body)?; Ok(KeyView::from_key(key, &auth_controller)) @@ -105,7 +107,8 @@ pub async fn delete_api_key( ) -> Result { let key = path.into_inner().key; tokio::task::spawn_blocking(move || { - let uid = Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_sha(&key))?; + let uid = + Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?; auth_controller.delete_key(uid) }) .await diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index a9f2bf91d..4eb1fdd6f 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -711,7 +711,7 @@ async fn list_api_keys() { }, { "name": "Default Admin API Key", - "description": "Use it for all other than search operations. Caution! Do not expose it on a public frontend", + "description": "Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend", "indexes": ["*"], "actions": ["*"], "expiresAt": serde_json::Value::Null, From ac9e7bdbe3e0179e325bb2423edf3e2f12a62ab3 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 10:21:19 +0200 Subject: [PATCH 094/185] Fix a test that was depending on the speed of the CPU --- meilisearch-http/tests/tasks/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index 1ba7a4936..9d0940562 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -182,7 +182,7 @@ async fn list_tasks_status_and_type_filtered() { let (response, code) = index .filtered_tasks( &["indexCreation", "documentAdditionOrUpdate"], - &["succeeded", "processing"], + &["succeeded", "processing", "enqueued"], ) .await; assert_eq!(code, 200, "{}", response); From 9eea142e2b152de388ccb6a796a275a28cda54a3 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Thu, 2 Jun 2022 11:11:07 +0200 Subject: [PATCH 095/185] feat(API-keys): Change immutable_field error message Change the immutable_field error message to fit the recent changes in the spec: https://github.com/meilisearch/specifications/pull/148/files/aa0a148ee31701c46baf1268d7655d8c9eb0eedc..84a9baff685a459d44292839c9ffc107db374936 --- meilisearch-auth/src/error.rs | 2 +- meilisearch-http/tests/auth/api_keys.rs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/meilisearch-auth/src/error.rs b/meilisearch-auth/src/error.rs index dbf28b421..8be97daaf 100644 --- a/meilisearch-auth/src/error.rs +++ b/meilisearch-auth/src/error.rs @@ -28,7 +28,7 @@ pub enum AuthControllerError { ApiKeyNotFound(String), #[error("`uid` field value `{0}` is already an existing API key.")] ApiKeyAlreadyExists(String), - #[error("`{0}` field cannot be modified for the given resource.")] + #[error("The `{0}` field cannot be modified for the given resource.")] ImmutableField(String), #[error("Internal error: {0}")] Internal(Box), diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index 4eb1fdd6f..d7e7dad0f 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -1150,7 +1150,7 @@ async fn error_patch_api_key_indexes() { let (response, code) = server.patch_api_key(&uid, content).await; assert_eq!(400, code, "{:?}", &response); - let expected = json!({"message": "`indexes` field cannot be modified for the given resource.", + let expected = json!({"message": "The `indexes` field cannot be modified for the given resource.", "code": "immutable_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable_field" @@ -1206,7 +1206,7 @@ async fn error_patch_api_key_actions() { let (response, code) = server.patch_api_key(&uid, content).await; assert_eq!(400, code, "{:?}", &response); - let expected = json!({"message": "`actions` field cannot be modified for the given resource.", + let expected = json!({"message": "The `actions` field cannot be modified for the given resource.", "code": "immutable_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable_field" @@ -1254,7 +1254,7 @@ async fn error_patch_api_key_expiration_date() { let (response, code) = server.patch_api_key(&uid, content).await; assert_eq!(400, code, "{:?}", &response); - let expected = json!({"message": "`expiresAt` field cannot be modified for the given resource.", + let expected = json!({"message": "The `expiresAt` field cannot be modified for the given resource.", "code": "immutable_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable_field" From dfce9ba4683727b27998865cca6205dbed921478 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 11:26:12 +0200 Subject: [PATCH 096/185] Apply suggestions --- meilisearch-http/src/routes/tasks.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 49554858d..2f62615fd 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -138,12 +138,12 @@ async fn get_tasks( // We +1 just to know if there is more after this "page" or not. let limit = limit.saturating_add(1); - let mut tasks_results = meilisearch + let mut tasks_results: Vec<_> = meilisearch .list_tasks(filters, Some(limit), from) .await? .into_iter() .map(TaskView::from) - .collect::>(); + .collect(); // If we were able to fetch the number +1 tasks we asked // it means that there is more to come. From bb405aa729be1c6459cb92efdb9f7fe2cc2353e0 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 11:48:59 +0200 Subject: [PATCH 097/185] Update the /indexes/{indexUid} verb from PUT to PATCH --- meilisearch-http/src/routes/indexes/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-http/src/routes/indexes/mod.rs b/meilisearch-http/src/routes/indexes/mod.rs index 37f4ee7b8..c1d2fabf1 100644 --- a/meilisearch-http/src/routes/indexes/mod.rs +++ b/meilisearch-http/src/routes/indexes/mod.rs @@ -27,7 +27,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .service( web::resource("") .route(web::get().to(SeqHandler(get_index))) - .route(web::put().to(SeqHandler(update_index))) + .route(web::patch().to(SeqHandler(update_index))) .route(web::delete().to(SeqHandler(delete_index))), ) .service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats)))) From f8d3f739ad2aaf69ef9c36910cab9139abdcc0f6 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 11:49:46 +0200 Subject: [PATCH 098/185] Update the /indexes/{indexUid}/settings verb from POST to PATCH --- meilisearch-http/src/routes/indexes/settings.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 222aca580..59a965288 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -271,7 +271,7 @@ macro_rules! generate_configure { use crate::extractors::sequential_extractor::SeqHandler; cfg.service( web::resource("") - .route(web::post().to(SeqHandler(update_all))) + .route(web::patch().to(SeqHandler(update_all))) .route(web::get().to(SeqHandler(get_all))) .route(web::delete().to(SeqHandler(delete_all)))) $(.service($mod::resources()))*; From 10a71fdb102096fdce4d724136d23ab2dbdffda3 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 11:53:16 +0200 Subject: [PATCH 099/185] Update the /indexes/{indexUid}/settings/* verbs by adding a macro parameter --- .../src/routes/indexes/settings.rs | 25 ++++++++++++++++--- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 59a965288..9efa825f8 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -13,7 +13,7 @@ use crate::task::SummarizedTaskView; #[macro_export] macro_rules! make_setting_route { - ($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => { + ($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => { pub mod $attr { use actix_web::{web, HttpRequest, HttpResponse, Resource}; use log::debug; @@ -100,18 +100,27 @@ macro_rules! make_setting_route { pub fn resources() -> Resource { Resource::new($route) .route(web::get().to(SeqHandler(get))) - .route(web::post().to(SeqHandler(update))) + .route(web::$update_verb().to(SeqHandler(update))) .route(web::delete().to(SeqHandler(delete))) } } }; - ($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal) => { - make_setting_route!($route, $type, $attr, $camelcase_attr, _analytics, |_, _| {}); + ($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal) => { + make_setting_route!( + $route, + $update_verb, + $type, + $attr, + $camelcase_attr, + _analytics, + |_, _| {} + ); }; } make_setting_route!( "/filterable-attributes", + put, std::collections::BTreeSet, filterable_attributes, "filterableAttributes", @@ -134,6 +143,7 @@ make_setting_route!( make_setting_route!( "/sortable-attributes", + put, std::collections::BTreeSet, sortable_attributes, "sortableAttributes", @@ -156,6 +166,7 @@ make_setting_route!( make_setting_route!( "/displayed-attributes", + put, Vec, displayed_attributes, "displayedAttributes" @@ -163,6 +174,7 @@ make_setting_route!( make_setting_route!( "/typo-tolerance", + patch, meilisearch_lib::index::updates::TypoSettings, typo_tolerance, "typoTolerance", @@ -204,6 +216,7 @@ make_setting_route!( make_setting_route!( "/searchable-attributes", + put, Vec, searchable_attributes, "searchableAttributes", @@ -225,6 +238,7 @@ make_setting_route!( make_setting_route!( "/stop-words", + put, std::collections::BTreeSet, stop_words, "stopWords" @@ -232,6 +246,7 @@ make_setting_route!( make_setting_route!( "/synonyms", + put, std::collections::BTreeMap>, synonyms, "synonyms" @@ -239,6 +254,7 @@ make_setting_route!( make_setting_route!( "/distinct-attribute", + put, String, distinct_attribute, "distinctAttribute" @@ -246,6 +262,7 @@ make_setting_route!( make_setting_route!( "/ranking-rules", + put, Vec, ranking_rules, "rankingRules", From bcb51905d7f468aea43ee85142aed8e19a9dc870 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 12:16:46 +0200 Subject: [PATCH 100/185] Fix the authorization tests --- Cargo.lock | 1 - meilisearch-http/Cargo.toml | 1 - meilisearch-http/tests/auth/authorization.rs | 20 +++---- meilisearch-http/tests/common/index.rs | 37 ++++++------ meilisearch-http/tests/content_type.rs | 59 ++++++++++++++------ 5 files changed, 70 insertions(+), 48 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ea115c459..1f7d93ed2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2036,7 +2036,6 @@ dependencies = [ "obkv", "once_cell", "parking_lot", - "paste", "pin-project-lite", "platform-dirs", "rand", diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index ba11b20e0..b9771afa2 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -83,7 +83,6 @@ actix-rt = "2.7.0" assert-json-diff = "2.0.1" manifest-dir-macros = "0.1.14" maplit = "1.0.2" -paste = "1.0.6" serde_url_params = "0.2.1" urlencoding = "2.1.0" diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 2080e2990..5d5d53d52 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -18,7 +18,7 @@ pub static AUTHORIZATIONS: Lazy hashset!{"tasks.get", "*"}, ("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "*"}, ("GET", "/tasks/0") => hashset!{"tasks.get", "*"}, - ("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"}, + ("PATCH", "/indexes/products/") => hashset!{"indexes.update", "*"}, ("GET", "/indexes/products/") => hashset!{"indexes.get", "*"}, ("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"}, ("POST", "/indexes") => hashset!{"indexes.create", "*"}, @@ -33,15 +33,15 @@ pub static AUTHORIZATIONS: Lazy hashset!{"settings.get", "*"}, ("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "*"}, ("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"}, - ("POST", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"}, + ("PATCH", "/indexes/products/settings") => hashset!{"settings.update", "*"}, + ("PUT", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"}, + ("PUT", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"}, + ("PUT", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"}, + ("PUT", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"}, + ("PUT", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"}, + ("PUT", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"}, + ("PUT", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"}, + ("PUT", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"}, ("GET", "/indexes/products/stats") => hashset!{"stats.get", "*"}, ("GET", "/stats") => hashset!{"stats.get", "*"}, ("POST", "/dumps") => hashset!{"dumps.create", "*"}, diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index 4be8ad873..edda799e0 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -4,29 +4,12 @@ use std::{ }; use actix_web::http::StatusCode; -use paste::paste; use serde_json::{json, Value}; use tokio::time::sleep; use urlencoding::encode; use super::service::Service; -macro_rules! make_settings_test_routes { - ($($name:ident),+) => { - $(paste! { - pub async fn [](&self, value: Value) -> (Value, StatusCode) { - let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-")); - self.service.post(url, value).await - } - - pub async fn [](&self) -> (Value, StatusCode) { - let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-")); - self.service.get(url).await - } - })* - }; -} - pub struct Index<'a> { pub uid: String, pub service: &'a Service, @@ -198,7 +181,7 @@ impl Index<'_> { pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) { let url = format!("/indexes/{}/settings", encode(self.uid.as_ref())); - self.service.post(url, settings).await + self.service.patch(url, settings).await } pub async fn delete_settings(&self) -> (Value, StatusCode) { @@ -242,7 +225,23 @@ impl Index<'_> { self.service.get(url).await } - make_settings_test_routes!(distinct_attribute); + pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) { + let url = format!( + "/indexes/{}/settings/{}", + encode(self.uid.as_ref()).to_string(), + "distinct-attribute" + ); + self.service.put(url, value).await + } + + pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) { + let url = format!( + "/indexes/{}/settings/{}", + encode(self.uid.as_ref()).to_string(), + "distinct-attribute" + ); + self.service.get(url).await + } } pub struct GetDocumentOptions { diff --git a/meilisearch-http/tests/content_type.rs b/meilisearch-http/tests/content_type.rs index d6b4cbd78..eace67a08 100644 --- a/meilisearch-http/tests/content_type.rs +++ b/meilisearch-http/tests/content_type.rs @@ -7,23 +7,45 @@ use actix_web::test; use meilisearch_http::{analytics, create_app}; use serde_json::{json, Value}; +enum HttpVerb { + Put, + Patch, + Post, + Get, + Delete, +} + +impl HttpVerb { + fn test_request(&self) -> test::TestRequest { + match self { + HttpVerb::Put => test::TestRequest::put(), + HttpVerb::Patch => test::TestRequest::patch(), + HttpVerb::Post => test::TestRequest::post(), + HttpVerb::Get => test::TestRequest::get(), + HttpVerb::Delete => test::TestRequest::delete(), + } + } +} + #[actix_rt::test] async fn error_json_bad_content_type() { + use HttpVerb::{Patch, Post, Put}; + let routes = [ - // all the POST routes except the dumps that can be created without any body or content-type + // all the routes except the dumps that can be created without any body or content-type // and the search that is not a strict json - "/indexes", - "/indexes/doggo/documents/delete-batch", - "/indexes/doggo/search", - "/indexes/doggo/settings", - "/indexes/doggo/settings/displayed-attributes", - "/indexes/doggo/settings/distinct-attribute", - "/indexes/doggo/settings/filterable-attributes", - "/indexes/doggo/settings/ranking-rules", - "/indexes/doggo/settings/searchable-attributes", - "/indexes/doggo/settings/sortable-attributes", - "/indexes/doggo/settings/stop-words", - "/indexes/doggo/settings/synonyms", + (Post, "/indexes"), + (Post, "/indexes/doggo/documents/delete-batch"), + (Post, "/indexes/doggo/search"), + (Patch, "/indexes/doggo/settings"), + (Put, "/indexes/doggo/settings/displayed-attributes"), + (Put, "/indexes/doggo/settings/distinct-attribute"), + (Put, "/indexes/doggo/settings/filterable-attributes"), + (Put, "/indexes/doggo/settings/ranking-rules"), + (Put, "/indexes/doggo/settings/searchable-attributes"), + (Put, "/indexes/doggo/settings/sortable-attributes"), + (Put, "/indexes/doggo/settings/stop-words"), + (Put, "/indexes/doggo/settings/synonyms"), ]; let bad_content_types = [ "application/csv", @@ -45,10 +67,11 @@ async fn error_json_bad_content_type() { analytics::MockAnalytics::new(&server.service.options).0 )) .await; - for route in routes { + for (verb, route) in routes { // Good content-type, we probably have an error since we didn't send anything in the json // so we only ensure we didn't get a bad media type error. - let req = test::TestRequest::post() + let req = verb + .test_request() .uri(route) .set_payload(document) .insert_header(("content-type", "application/json")) @@ -59,7 +82,8 @@ async fn error_json_bad_content_type() { "calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route); // No content-type. - let req = test::TestRequest::post() + let req = verb + .test_request() .uri(route) .set_payload(document) .to_request(); @@ -82,7 +106,8 @@ async fn error_json_bad_content_type() { for bad_content_type in bad_content_types { // Always bad content-type - let req = test::TestRequest::post() + let req = verb + .test_request() .uri(route) .set_payload(document.to_string()) .insert_header(("content-type", bad_content_type)) From ce37f53a1605eaa3de308adfec656f5eff222a24 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 12:17:53 +0200 Subject: [PATCH 101/185] Add typo-tolerance to the authorization tests --- meilisearch-http/tests/auth/authorization.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 5d5d53d52..7846188fb 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -34,6 +34,7 @@ pub static AUTHORIZATIONS: Lazy hashset!{"settings.get", "*"}, ("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "*"}, ("PATCH", "/indexes/products/settings") => hashset!{"settings.update", "*"}, + ("PATCH", "/indexes/products/settings/typo-tolerance") => hashset!{"settings.update", "*"}, ("PUT", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"}, ("PUT", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"}, ("PUT", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"}, From 0258659278820dcb9958c42e14cc4dbcb66cde1e Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 12:24:27 +0200 Subject: [PATCH 102/185] Fix the get_settings tests --- meilisearch-http/tests/common/index.rs | 2 +- meilisearch-http/tests/settings/get_settings.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index edda799e0..0a9b61d25 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -48,7 +48,7 @@ impl Index<'_> { }); let url = format!("/indexes/{}", encode(self.uid.as_ref())); - self.service.put(url, body).await + self.service.patch(url, body).await } pub async fn delete(&self) -> (Value, StatusCode) { diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index 9b3c31b63..e79b3ed26 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -214,7 +214,7 @@ macro_rules! test_setting_routes { .chars() .map(|c| if c == '_' { '-' } else { c }) .collect::()); - let (response, code) = server.service.post(url, serde_json::Value::Null).await; + let (response, code) = server.service.put(url, serde_json::Value::Null).await; assert_eq!(code, 202, "{}", response); server.index("").wait_task(0).await; let (response, code) = server.index("test").get().await; From 419922e475777cd32bf2b47bd5d5e5db97a6d439 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 13:38:23 +0200 Subject: [PATCH 103/185] Make clippy happy --- meilisearch-http/tests/common/index.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index 0a9b61d25..275bec4cd 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -228,7 +228,7 @@ impl Index<'_> { pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) { let url = format!( "/indexes/{}/settings/{}", - encode(self.uid.as_ref()).to_string(), + encode(self.uid.as_ref()), "distinct-attribute" ); self.service.put(url, value).await @@ -237,7 +237,7 @@ impl Index<'_> { pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) { let url = format!( "/indexes/{}/settings/{}", - encode(self.uid.as_ref()).to_string(), + encode(self.uid.as_ref()), "distinct-attribute" ); self.service.get(url).await From 12b5eabd5db7a5e5e529d15b9c0a74bb4bbcd2cf Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 2 Jun 2022 13:31:46 +0200 Subject: [PATCH 104/185] chore(http): unify the pagination of the index and documents route behind a common type --- .../src/routes/indexes/documents.rs | 18 ++--- meilisearch-http/src/routes/indexes/mod.rs | 34 +++----- meilisearch-http/src/routes/mod.rs | 81 +++++++++++++++++++ .../tests/documents/add_documents.rs | 2 +- 4 files changed, 100 insertions(+), 35 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/documents.rs b/meilisearch-http/src/routes/indexes/documents.rs index 4c87044db..f506e587c 100644 --- a/meilisearch-http/src/routes/indexes/documents.rs +++ b/meilisearch-http/src/routes/indexes/documents.rs @@ -14,7 +14,7 @@ use mime::Mime; use once_cell::sync::Lazy; use serde::Deserialize; use serde_cs::vec::CS; -use serde_json::{json, Value}; +use serde_json::Value; use tokio::sync::mpsc; use crate::analytics::Analytics; @@ -22,7 +22,7 @@ use crate::error::MeilisearchHttpError; use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::payload::Payload; use crate::extractors::sequential_extractor::SeqHandler; -use crate::routes::{fold_star_or, StarOr}; +use crate::routes::{fold_star_or, PaginationView, StarOr}; use crate::task::SummarizedTaskView; static ACCEPTED_CONTENT_TYPE: Lazy> = Lazy::new(|| { @@ -122,14 +122,12 @@ pub async fn delete_document( Ok(HttpResponse::Accepted().json(task)) } -const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20; - #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct BrowseQuery { #[serde(default)] offset: usize, - #[serde(default = "PAGINATION_DEFAULT_LIMIT")] + #[serde(default = "crate::routes::PAGINATION_DEFAULT_LIMIT")] limit: usize, fields: Option>>, } @@ -141,8 +139,8 @@ pub async fn get_all_documents( ) -> Result { debug!("called with params: {:?}", params); let BrowseQuery { - offset, limit, + offset, fields, } = params.into_inner(); let attributes_to_retrieve = fields.map(CS::into_inner).and_then(fold_star_or); @@ -151,10 +149,10 @@ pub async fn get_all_documents( .documents(path.into_inner(), offset, limit, attributes_to_retrieve) .await?; - debug!("returns: {:?}", documents); - Ok(HttpResponse::Ok().json(json!( - { "limit": limit, "offset": offset, "total": total, "results": documents } - ))) + let ret = PaginationView::new(offset, limit, total as usize, documents); + + debug!("returns: {:?}", ret); + Ok(HttpResponse::Ok().json(ret)) } #[derive(Deserialize, Debug)] diff --git a/meilisearch-http/src/routes/indexes/mod.rs b/meilisearch-http/src/routes/indexes/mod.rs index 37f4ee7b8..70170ebb7 100644 --- a/meilisearch-http/src/routes/indexes/mod.rs +++ b/meilisearch-http/src/routes/indexes/mod.rs @@ -12,6 +12,8 @@ use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::sequential_extractor::SeqHandler; use crate::task::SummarizedTaskView; +use super::Pagination; + pub mod documents; pub mod search; pub mod settings; @@ -37,38 +39,22 @@ pub fn configure(cfg: &mut web::ServiceConfig) { ); } -const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20; - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct Paginate { - #[serde(default)] - offset: usize, - #[serde(default = "PAGINATION_DEFAULT_LIMIT")] - limit: usize, -} - pub async fn list_indexes( data: GuardedData, MeiliSearch>, - paginate: web::Query, + paginate: web::Query, ) -> Result { let search_rules = &data.filters().search_rules; let indexes: Vec<_> = data.list_indexes().await?; let nb_indexes = indexes.len(); - let indexes: Vec<_> = indexes + let iter = indexes .into_iter() - .filter(|i| search_rules.is_index_authorized(&i.uid)) - .skip(paginate.offset) - .take(paginate.limit) - .collect(); + .filter(|i| search_rules.is_index_authorized(&i.uid)); + let ret = paginate + .into_inner() + .auto_paginate_unsized(nb_indexes, iter); - debug!("returns: {:?}", indexes); - Ok(HttpResponse::Ok().json(json!({ - "results": indexes, - "offset": paginate.offset, - "limit": paginate.limit, - "total": nb_indexes, - }))) + debug!("returns: {:?}", ret); + Ok(HttpResponse::Ok().json(ret)) } #[derive(Debug, Deserialize)] diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index a34b7578d..1b37396e9 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -3,6 +3,7 @@ use std::str::FromStr; use actix_web::{web, HttpResponse}; use log::debug; use serde::{Deserialize, Serialize}; + use time::OffsetDateTime; use meilisearch_error::ResponseError; @@ -58,6 +59,86 @@ pub fn fold_star_or(content: impl IntoIterator>) -> Option usize = || 20; + +#[derive(Debug, Clone, Copy, Deserialize)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct Pagination { + #[serde(default)] + pub offset: usize, + #[serde(default = "PAGINATION_DEFAULT_LIMIT")] + pub limit: usize, +} + +#[derive(Debug, Clone, Serialize)] +pub struct PaginationView { + pub results: Vec, + pub offset: usize, + pub limit: usize, + pub total: usize, +} + +impl Pagination { + /// Given the full data to paginate, returns the selected section. + pub fn auto_paginate_sized( + self, + content: impl IntoIterator + ExactSizeIterator, + ) -> PaginationView + where + T: Serialize, + { + let total = content.len(); + let content: Vec<_> = content + .into_iter() + .skip(self.offset) + .take(self.limit) + .collect(); + self.format_with(total, content) + } + + /// Given an iterator and the total number of elements, returns the selected section. + pub fn auto_paginate_unsized( + self, + total: usize, + content: impl IntoIterator, + ) -> PaginationView + where + T: Serialize, + { + let content: Vec<_> = content + .into_iter() + .skip(self.offset) + .take(self.limit) + .collect(); + self.format_with(total, content) + } + + /// Given the data already paginated + the total number of elements, it stores + /// everything in a [PaginationResult]. + pub fn format_with(self, total: usize, results: Vec) -> PaginationView + where + T: Serialize, + { + PaginationView { + results, + offset: self.offset, + limit: self.limit, + total, + } + } +} + +impl PaginationView { + pub fn new(offset: usize, limit: usize, total: usize, results: Vec) -> Self { + Self { + offset, + limit, + results, + total, + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] #[allow(clippy::large_enum_variant)] #[serde(tag = "name")] diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index 8ef8c54fd..d6235c8b7 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -827,7 +827,7 @@ async fn add_larger_dataset() { ..Default::default() }) .await; - assert_eq!(code, 200); + assert_eq!(code, 200, "failed with `{}`", response); assert_eq!(response["results"].as_array().unwrap().len(), 77); } From 4667c9fe1a92858308a891ea2536059d76733e38 Mon Sep 17 00:00:00 2001 From: Irevoire Date: Wed, 25 May 2022 19:40:43 +0200 Subject: [PATCH 105/185] fix(http): Fix the query parameter in the Documents route --- .../tests/documents/get_documents.rs | 90 ++++++++++++++++++- meilisearch-lib/src/index/index.rs | 42 +++++---- 2 files changed, 106 insertions(+), 26 deletions(-) diff --git a/meilisearch-http/tests/documents/get_documents.rs b/meilisearch-http/tests/documents/get_documents.rs index cad656088..83e433b22 100644 --- a/meilisearch-http/tests/documents/get_documents.rs +++ b/meilisearch-http/tests/documents/get_documents.rs @@ -1,5 +1,4 @@ -use crate::common::Server; -use crate::common::{GetAllDocumentsOptions, GetDocumentOptions}; +use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server}; use serde_json::json; @@ -71,7 +70,6 @@ async fn get_document() { }) ); - /* This currently doesn't work but should be fixed by #2433 let (response, code) = index .get_document( 0, @@ -87,7 +85,6 @@ async fn get_document() { "nested": { "content": "foobar" }, }) ); - */ } #[actix_rt::test] @@ -289,6 +286,91 @@ async fn test_get_all_documents_attributes_to_retrieve() { } } +#[actix_rt::test] +async fn get_document_s_nested_attributes_to_retrieve() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + let documents = json!([ + { + "id": 0, + "content.truc": "foobar", + }, + { + "id": 1, + "content": { + "truc": "foobar", + "machin": "bidule", + }, + }, + ]); + let (_, code) = index.add_documents(documents, None).await; + assert_eq!(code, 202); + index.wait_task(0).await; + + let (response, code) = index + .get_document( + 0, + Some(GetDocumentOptions { + fields: Some(vec!["content"]), + }), + ) + .await; + assert_eq!(code, 200); + assert_eq!(response, json!({})); + let (response, code) = index + .get_document( + 1, + Some(GetDocumentOptions { + fields: Some(vec!["content"]), + }), + ) + .await; + assert_eq!(code, 200); + assert_eq!( + response, + json!({ + "content": { + "truc": "foobar", + "machin": "bidule", + }, + }) + ); + + let (response, code) = index + .get_document( + 0, + Some(GetDocumentOptions { + fields: Some(vec!["content.truc"]), + }), + ) + .await; + assert_eq!(code, 200); + assert_eq!( + response, + json!({ + "content.truc": "foobar", + }) + ); + let (response, code) = index + .get_document( + 1, + Some(GetDocumentOptions { + fields: Some(vec!["content.truc"]), + }), + ) + .await; + assert_eq!(code, 200); + assert_eq!( + response, + json!({ + "content": { + "truc": "foobar", + }, + }) + ); +} + #[actix_rt::test] async fn get_documents_displayed_attributes_is_ignored() { let server = Server::new().await; diff --git a/meilisearch-lib/src/index/index.rs b/meilisearch-lib/src/index/index.rs index bcf94bb0c..9c6150cfb 100644 --- a/meilisearch-lib/src/index/index.rs +++ b/meilisearch-lib/src/index/index.rs @@ -8,7 +8,7 @@ use std::sync::Arc; use fst::IntoStreamer; use milli::heed::{EnvOpenOptions, RoTxn}; use milli::update::{IndexerConfig, Setting}; -use milli::{obkv_to_json, FieldDistribution, FieldId}; +use milli::{obkv_to_json, FieldDistribution}; use serde::{Deserialize, Serialize}; use serde_json::{Map, Value}; use time::OffsetDateTime; @@ -228,7 +228,7 @@ impl Index { let txn = self.read_txn()?; let fields_ids_map = self.fields_ids_map(&txn)?; - let fields_to_display = self.fields_to_display(&attributes_to_retrieve, &fields_ids_map)?; + let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect(); let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit); @@ -236,8 +236,15 @@ impl Index { for entry in iter { let (_id, obkv) = entry?; - let object = obkv_to_json(&fields_to_display, &fields_ids_map, obkv)?; - documents.push(object); + let document = obkv_to_json(&all_fields, &fields_ids_map, obkv)?; + let document = match &attributes_to_retrieve { + Some(attributes_to_retrieve) => permissive_json_pointer::select_values( + &document, + attributes_to_retrieve.iter().map(|s| s.as_ref()), + ), + None => document, + }; + documents.push(document); } let number_of_documents = self.number_of_documents(&txn)?; @@ -253,7 +260,7 @@ impl Index { let txn = self.read_txn()?; let fields_ids_map = self.fields_ids_map(&txn)?; - let fields_to_display = self.fields_to_display(&attributes_to_retrieve, &fields_ids_map)?; + let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect(); let internal_id = self .external_documents_ids(&txn)? @@ -267,7 +274,14 @@ impl Index { .map(|(_, d)| d) .ok_or(IndexError::DocumentNotFound(doc_id))?; - let document = obkv_to_json(&fields_to_display, &fields_ids_map, document)?; + let document = obkv_to_json(&all_fields, &fields_ids_map, document)?; + let document = match &attributes_to_retrieve { + Some(attributes_to_retrieve) => permissive_json_pointer::select_values( + &document, + attributes_to_retrieve.iter().map(|s| s.as_ref()), + ), + None => document, + }; Ok(document) } @@ -276,22 +290,6 @@ impl Index { self.env.size() } - fn fields_to_display>( - &self, - attributes_to_retrieve: &Option>, - fields_ids_map: &milli::FieldsIdsMap, - ) -> Result> { - let attributes_to_retrieve_ids = match attributes_to_retrieve { - Some(attrs) => attrs - .iter() - .filter_map(|f| fields_ids_map.id(f.as_ref())) - .collect(), - None => fields_ids_map.iter().map(|(id, _)| id).collect(), - }; - - Ok(attributes_to_retrieve_ids) - } - pub fn snapshot(&self, path: impl AsRef) -> Result<()> { let mut dst = path.as_ref().join(format!("indexes/{}/", self.uuid)); create_dir_all(&dst)?; From 0c5352fc222b303645938dfd8bb49db394add12e Mon Sep 17 00:00:00 2001 From: ad hoc Date: Tue, 31 May 2022 17:18:40 +0200 Subject: [PATCH 106/185] move index_uid from task to task_content --- meilisearch-http/src/routes/tasks.rs | 4 +- meilisearch-http/src/task.rs | 26 +- meilisearch-lib/src/dump/compat/v3.rs | 16 +- meilisearch-lib/src/dump/compat/v4.rs | 84 +++- meilisearch-lib/src/index_controller/mod.rs | 42 +- meilisearch-lib/src/index_resolver/mod.rs | 414 +++++++++--------- .../tasks/handlers/index_resolver_handler.rs | 4 +- meilisearch-lib/src/tasks/handlers/mod.rs | 4 +- meilisearch-lib/src/tasks/scheduler.rs | 84 ++-- meilisearch-lib/src/tasks/task.rs | 38 +- meilisearch-lib/src/tasks/task_store/mod.rs | 33 +- meilisearch-lib/src/tasks/task_store/store.rs | 48 +- 12 files changed, 452 insertions(+), 345 deletions(-) diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 2f62615fd..0ab4678b7 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -38,9 +38,9 @@ fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool { matches!((type_, content), (TaskType::IndexCreation, TaskContent::IndexCreation { .. }) | (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. }) - | (TaskType::IndexDeletion, TaskContent::IndexDeletion) + | (TaskType::IndexDeletion, TaskContent::IndexDeletion { .. }) | (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. }) - | (TaskType::DocumentDeletion, TaskContent::DocumentDeletion(_)) + | (TaskType::DocumentDeletion, TaskContent::DocumentDeletion{ .. }) | (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. }) ) } diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index f8ba941d8..d9360039d 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -30,9 +30,9 @@ impl From for TaskType { match other { TaskContent::IndexCreation { .. } => TaskType::IndexCreation, TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, - TaskContent::IndexDeletion => TaskType::IndexDeletion, + TaskContent::IndexDeletion { .. } => TaskType::IndexDeletion, TaskContent::DocumentAddition { .. } => TaskType::DocumentAdditionOrUpdate, - TaskContent::DocumentDeletion(_) => TaskType::DocumentDeletion, + TaskContent::DocumentDeletion { .. } => TaskType::DocumentDeletion, TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate, TaskContent::Dump { .. } => TaskType::DumpCreation, } @@ -203,9 +203,9 @@ pub struct TaskView { impl From for TaskView { fn from(task: Task) -> Self { + let index_uid = task.index_uid().map(String::from); let Task { id, - index_uid, content, events, } = task; @@ -221,20 +221,26 @@ impl From for TaskView { (TaskType::DocumentAdditionOrUpdate, Some(details)) } - TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => ( + TaskContent::DocumentDeletion { + deletion: DocumentDeletion::Ids(ids), + .. + } => ( TaskType::DocumentDeletion, Some(TaskDetails::DocumentDeletion { received_document_ids: ids.len(), deleted_documents: None, }), ), - TaskContent::DocumentDeletion(DocumentDeletion::Clear) => ( + TaskContent::DocumentDeletion { + deletion: DocumentDeletion::Clear, + .. + } => ( TaskType::DocumentDeletion, Some(TaskDetails::ClearAll { deleted_documents: None, }), ), - TaskContent::IndexDeletion => ( + TaskContent::IndexDeletion { .. } => ( TaskType::IndexDeletion, Some(TaskDetails::ClearAll { deleted_documents: None, @@ -244,11 +250,11 @@ impl From for TaskView { TaskType::SettingsUpdate, Some(TaskDetails::Settings { settings }), ), - TaskContent::IndexCreation { primary_key } => ( + TaskContent::IndexCreation { primary_key, .. } => ( TaskType::IndexCreation, Some(TaskDetails::IndexInfo { primary_key }), ), - TaskContent::IndexUpdate { primary_key } => ( + TaskContent::IndexUpdate { primary_key, .. } => ( TaskType::IndexUpdate, Some(TaskDetails::IndexInfo { primary_key }), ), @@ -353,7 +359,7 @@ impl From for TaskView { Self { uid: id, - index_uid: index_uid.map(|u| u.into_inner()), + index_uid, status, task_type, details, @@ -402,7 +408,7 @@ impl From for SummarizedTaskView { Self { task_uid: other.id, - index_uid: other.index_uid.map(|u| u.into_inner()), + index_uid: other.index_uid().map(String::from), status: TaskStatus::Enqueued, task_type: other.content.into(), enqueued_at, diff --git a/meilisearch-lib/src/dump/compat/v3.rs b/meilisearch-lib/src/dump/compat/v3.rs index 164b7153d..2044e3b60 100644 --- a/meilisearch-lib/src/dump/compat/v3.rs +++ b/meilisearch-lib/src/dump/compat/v3.rs @@ -4,10 +4,10 @@ use serde::{Deserialize, Serialize}; use time::OffsetDateTime; use uuid::Uuid; -use super::v4::{Task, TaskEvent}; +use super::v4::{Task, TaskContent, TaskEvent}; use crate::index::{Settings, Unchecked}; use crate::index_resolver::IndexUid; -use crate::tasks::task::{DocumentDeletion, TaskContent, TaskId, TaskResult}; +use crate::tasks::task::{DocumentDeletion, TaskId, TaskResult}; use super::v2; @@ -59,9 +59,9 @@ pub enum Update { ClearDocuments, } -impl From for TaskContent { - fn from(other: Update) -> Self { - match other { +impl From for super::v4::TaskContent { + fn from(update: Update) -> Self { + match update { Update::DeleteDocuments(ids) => { TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) } @@ -186,10 +186,10 @@ impl Failed { impl From<(UpdateStatus, String, TaskId)> for Task { fn from((update, uid, task_id): (UpdateStatus, String, TaskId)) -> Self { // Dummy task - let mut task = Task { + let mut task = super::v4::Task { id: task_id, - index_uid: IndexUid::new(uid).unwrap(), - content: TaskContent::IndexDeletion, + index_uid: IndexUid::new_unchecked(uid), + content: super::v4::TaskContent::IndexDeletion, events: Vec::new(), }; diff --git a/meilisearch-lib/src/dump/compat/v4.rs b/meilisearch-lib/src/dump/compat/v4.rs index 6fa0e582a..867bc7b63 100644 --- a/meilisearch-lib/src/dump/compat/v4.rs +++ b/meilisearch-lib/src/dump/compat/v4.rs @@ -1,9 +1,14 @@ use meilisearch_error::ResponseError; +use milli::update::IndexDocumentsMethod; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; +use uuid::Uuid; +use crate::index::{Settings, Unchecked}; use crate::tasks::batch::BatchId; -use crate::tasks::task::{TaskContent, TaskEvent as NewTaskEvent, TaskId, TaskResult}; +use crate::tasks::task::{ + DocumentDeletion, TaskContent as NewTaskContent, TaskEvent as NewTaskEvent, TaskId, TaskResult, +}; use crate::IndexUid; #[derive(Debug, Serialize, Deserialize)] @@ -18,8 +23,7 @@ impl From for crate::tasks::task::Task { fn from(other: Task) -> Self { Self { id: other.id, - index_uid: Some(other.index_uid), - content: other.content, + content: NewTaskContent::from((other.index_uid, other.content)), events: other.events.into_iter().map(Into::into).collect(), } } @@ -65,3 +69,77 @@ impl From for NewTaskEvent { } } } + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +#[allow(clippy::large_enum_variant)] +pub enum TaskContent { + DocumentAddition { + content_uuid: Uuid, + merge_strategy: IndexDocumentsMethod, + primary_key: Option, + documents_count: usize, + allow_index_creation: bool, + }, + DocumentDeletion(DocumentDeletion), + SettingsUpdate { + settings: Settings, + /// Indicates whether the task was a deletion + is_deletion: bool, + allow_index_creation: bool, + }, + IndexDeletion, + IndexCreation { + primary_key: Option, + }, + IndexUpdate { + primary_key: Option, + }, + Dump { + uid: String, + }, +} + +impl From<(IndexUid, TaskContent)> for NewTaskContent { + fn from((index_uid, content): (IndexUid, TaskContent)) -> Self { + match content { + TaskContent::DocumentAddition { + content_uuid, + merge_strategy, + primary_key, + documents_count, + allow_index_creation, + } => NewTaskContent::DocumentAddition { + index_uid, + content_uuid, + merge_strategy, + primary_key, + documents_count, + allow_index_creation, + }, + TaskContent::DocumentDeletion(deletion) => NewTaskContent::DocumentDeletion { + index_uid, + deletion, + }, + TaskContent::SettingsUpdate { + settings, + is_deletion, + allow_index_creation, + } => NewTaskContent::SettingsUpdate { + index_uid, + settings, + is_deletion, + allow_index_creation, + }, + TaskContent::IndexDeletion => NewTaskContent::IndexDeletion { index_uid }, + TaskContent::IndexCreation { primary_key } => NewTaskContent::IndexCreation { + index_uid, + primary_key, + }, + TaskContent::IndexUpdate { primary_key } => NewTaskContent::IndexUpdate { + index_uid, + primary_key, + }, + TaskContent::Dump { uid } => NewTaskContent::Dump { uid }, + } + } +} diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index c872b60c5..ecca9ac63 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -356,12 +356,16 @@ where } pub async fn register_update(&self, uid: String, update: Update) -> Result { - let uid = IndexUid::new(uid)?; + let index_uid = IndexUid::new(uid)?; let content = match update { - Update::DeleteDocuments(ids) => { - TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) - } - Update::ClearDocuments => TaskContent::DocumentDeletion(DocumentDeletion::Clear), + Update::DeleteDocuments(ids) => TaskContent::DocumentDeletion { + index_uid, + deletion: DocumentDeletion::Ids(ids), + }, + Update::ClearDocuments => TaskContent::DocumentDeletion { + index_uid, + deletion: DocumentDeletion::Clear, + }, Update::Settings { settings, is_deletion, @@ -370,6 +374,7 @@ where settings, is_deletion, allow_index_creation, + index_uid, }, Update::DocumentAddition { mut payload, @@ -409,14 +414,21 @@ where primary_key, documents_count, allow_index_creation, + index_uid, } } - Update::DeleteIndex => TaskContent::IndexDeletion, - Update::CreateIndex { primary_key } => TaskContent::IndexCreation { primary_key }, - Update::UpdateIndex { primary_key } => TaskContent::IndexUpdate { primary_key }, + Update::DeleteIndex => TaskContent::IndexDeletion { index_uid }, + Update::CreateIndex { primary_key } => TaskContent::IndexCreation { + primary_key, + index_uid, + }, + Update::UpdateIndex { primary_key } => TaskContent::IndexUpdate { + primary_key, + index_uid, + }, }; - let task = self.task_store.register(Some(uid), content).await?; + let task = self.task_store.register(content).await?; self.scheduler.read().await.notify(); Ok(task) @@ -425,7 +437,7 @@ where pub async fn register_dump_task(&self) -> Result { let uid = dump::generate_uid(); let content = TaskContent::Dump { uid }; - let task = self.task_store.register(None, content).await?; + let task = self.task_store.register(content).await?; self.scheduler.read().await.notify(); Ok(task) } @@ -569,13 +581,7 @@ where // Check if the currently indexing update is from our index. let is_indexing = processing_tasks .first() - .map(|task| { - task.index_uid - .as_ref() - .map(|u| u.as_str() == uid) - .unwrap_or(false) - }) - .unwrap_or_default(); + .map_or(false, |task| task.index_uid().map_or(false, |u| u == uid)); let index = self.index_resolver.get_index(uid).await?; let mut stats = spawn_blocking(move || index.stats()).await??; @@ -610,7 +616,7 @@ where // Check if the currently indexing update is from our index. stats.is_indexing = processing_tasks .first() - .and_then(|p| p.index_uid.as_ref().map(|u| u.as_str() == index_uid)) + .and_then(|p| p.index_uid().map(|u| u == index_uid)) .or(Some(false)); indexes.insert(index_uid, stats); diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 33b480f61..ac82f7a3d 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -58,7 +58,6 @@ impl IndexUid { } } - #[cfg(test)] pub fn new_unchecked(s: impl AsRef) -> Self { Self(s.as_ref().to_string()) } @@ -151,13 +150,13 @@ where match tasks.first() { Some(Task { - index_uid: Some(ref index_uid), id, content: TaskContent::DocumentAddition { merge_strategy, primary_key, allow_index_creation, + index_uid, .. }, .. @@ -227,12 +226,14 @@ where } pub async fn process_task(&self, task: &Task) -> Result { - let index_uid = task.index_uid.clone(); match &task.content { TaskContent::DocumentAddition { .. } => panic!("updates should be handled by batch"), - TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => { + TaskContent::DocumentDeletion { + deletion: DocumentDeletion::Ids(ids), + index_uid, + } => { let ids = ids.clone(); - let index = self.get_index(index_uid.unwrap().into_inner()).await?; + let index = self.get_index(index_uid.clone().into_inner()).await?; let DocumentDeletionResult { deleted_documents, .. @@ -240,8 +241,11 @@ where Ok(TaskResult::DocumentDeletion { deleted_documents }) } - TaskContent::DocumentDeletion(DocumentDeletion::Clear) => { - let index = self.get_index(index_uid.unwrap().into_inner()).await?; + TaskContent::DocumentDeletion { + deletion: DocumentDeletion::Clear, + index_uid, + } => { + let index = self.get_index(index_uid.clone().into_inner()).await?; let deleted_documents = spawn_blocking(move || -> IndexResult { let number_documents = index.stats()?.number_of_documents; index.clear_documents()?; @@ -255,12 +259,12 @@ where settings, is_deletion, allow_index_creation, + index_uid, } => { let index = if *is_deletion || !*allow_index_creation { - self.get_index(index_uid.unwrap().into_inner()).await? + self.get_index(index_uid.clone().into_inner()).await? } else { - self.get_or_create_index(index_uid.unwrap(), task.id) - .await? + self.get_or_create_index(index_uid.clone(), task.id).await? }; let settings = settings.clone(); @@ -268,8 +272,8 @@ where Ok(TaskResult::Other) } - TaskContent::IndexDeletion => { - let index = self.delete_index(index_uid.unwrap().into_inner()).await?; + TaskContent::IndexDeletion { index_uid } => { + let index = self.delete_index(index_uid.clone().into_inner()).await?; let deleted_documents = spawn_blocking(move || -> IndexResult { Ok(index.stats()?.number_of_documents) @@ -278,8 +282,11 @@ where Ok(TaskResult::ClearAll { deleted_documents }) } - TaskContent::IndexCreation { primary_key } => { - let index = self.create_index(index_uid.unwrap(), task.id).await?; + TaskContent::IndexCreation { + primary_key, + index_uid, + } => { + let index = self.create_index(index_uid.clone(), task.id).await?; if let Some(primary_key) = primary_key { let primary_key = primary_key.clone(); @@ -288,8 +295,11 @@ where Ok(TaskResult::Other) } - TaskContent::IndexUpdate { primary_key } => { - let index = self.get_index(index_uid.unwrap().into_inner()).await?; + TaskContent::IndexUpdate { + primary_key, + index_uid, + } => { + let index = self.get_index(index_uid.clone().into_inner()).await?; if let Some(primary_key) = primary_key { let primary_key = primary_key.clone(); @@ -411,193 +421,193 @@ where #[cfg(test)] mod test { - use std::{collections::BTreeMap, vec::IntoIter}; - - use super::*; - - use futures::future::ok; - use milli::update::{DocumentAdditionResult, IndexDocumentsMethod}; - use nelson::Mocker; - use proptest::prelude::*; - - use crate::{ - index::{ - error::{IndexError, Result as IndexResult}, - Checked, IndexMeta, IndexStats, Settings, - }, - tasks::{batch::Batch, BatchHandler}, - }; - use index_store::MockIndexStore; - use meta_store::MockIndexMetaStore; + // use std::{collections::BTreeMap, vec::IntoIter}; + // + // use super::*; + // + // use futures::future::ok; + // use milli::update::{DocumentAdditionResult, IndexDocumentsMethod}; + // use nelson::Mocker; + // use proptest::prelude::*; + // + // use crate::{ + // index::{ + // error::{IndexError, Result as IndexResult}, + // Checked, IndexMeta, IndexStats, Settings, + // }, + // tasks::{batch::Batch, BatchHandler}, + // }; + // use index_store::MockIndexStore; + // use meta_store::MockIndexMetaStore; // TODO: ignoring this test, it has become too complex to maintain, and rather implement // handler logic test. - proptest! { - #[test] - #[ignore] - fn test_process_task( - task in any::().prop_filter("IndexUid should be Some", |s| s.index_uid.is_some()), - index_exists in any::(), - index_op_fails in any::(), - any_int in any::(), - ) { - actix_rt::System::new().block_on(async move { - let uuid = Uuid::new_v4(); - let mut index_store = MockIndexStore::new(); - - let mocker = Mocker::default(); - - // Return arbitrary data from index call. - match &task.content { - TaskContent::DocumentAddition{primary_key, ..} => { - let result = move || if !index_op_fails { - Ok(DocumentAdditionResult { indexed_documents: any_int, number_of_documents: any_int }) - } else { - // return this error because it's easy to generate... - Err(IndexError::DocumentNotFound("a doc".into())) - }; - if primary_key.is_some() { - mocker.when::>("update_primary_key") - .then(move |_| Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None })); - } - mocker.when::<(IndexDocumentsMethod, Option, UpdateFileStore, IntoIter), IndexResult>("update_documents") - .then(move |(_, _, _, _)| result()); - } - TaskContent::SettingsUpdate{..} => { - let result = move || if !index_op_fails { - Ok(()) - } else { - // return this error because it's easy to generate... - Err(IndexError::DocumentNotFound("a doc".into())) - }; - mocker.when::<&Settings, IndexResult<()>>("update_settings") - .then(move |_| result()); - } - TaskContent::DocumentDeletion(DocumentDeletion::Ids(_ids)) => { - let result = move || if !index_op_fails { - Ok(DocumentDeletionResult { deleted_documents: any_int as u64, remaining_documents: any_int as u64 }) - } else { - // return this error because it's easy to generate... - Err(IndexError::DocumentNotFound("a doc".into())) - }; - - mocker.when::<&[String], IndexResult>("delete_documents") - .then(move |_| result()); - }, - TaskContent::DocumentDeletion(DocumentDeletion::Clear) => { - let result = move || if !index_op_fails { - Ok(()) - } else { - // return this error because it's easy to generate... - Err(IndexError::DocumentNotFound("a doc".into())) - }; - mocker.when::<(), IndexResult<()>>("clear_documents") - .then(move |_| result()); - }, - TaskContent::IndexDeletion => { - mocker.when::<(), ()>("close") - .times(index_exists as usize) - .then(move |_| ()); - } - TaskContent::IndexUpdate { primary_key } - | TaskContent::IndexCreation { primary_key } => { - if primary_key.is_some() { - let result = move || if !index_op_fails { - Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None }) - } else { - // return this error because it's easy to generate... - Err(IndexError::DocumentNotFound("a doc".into())) - }; - mocker.when::>("update_primary_key") - .then(move |_| result()); - } - } - TaskContent::Dump { .. } => { } - } - - mocker.when::<(), IndexResult>("stats") - .then(|()| Ok(IndexStats { size: 0, number_of_documents: 0, is_indexing: Some(false), field_distribution: BTreeMap::new() })); - - let index = Index::mock(mocker); - - match &task.content { - // an unexisting index should trigger an index creation in the folllowing cases: - TaskContent::DocumentAddition { allow_index_creation: true, .. } - | TaskContent::SettingsUpdate { allow_index_creation: true, is_deletion: false, .. } - | TaskContent::IndexCreation { .. } if !index_exists => { - index_store - .expect_create() - .once() - .withf(move |&found| !index_exists || found == uuid) - .returning(move |_| Box::pin(ok(index.clone()))); - }, - TaskContent::IndexDeletion => { - index_store - .expect_delete() - // this is called only if the index.exists - .times(index_exists as usize) - .withf(move |&found| !index_exists || found == uuid) - .returning(move |_| Box::pin(ok(Some(index.clone())))); - } - // if index already exists, create index will return an error - TaskContent::IndexCreation { .. } if index_exists => (), - TaskContent::Dump { .. } => (), - // The index exists and get should be called - _ if index_exists => { - index_store - .expect_get() - .once() - .withf(move |&found| found == uuid) - .returning(move |_| Box::pin(ok(Some(index.clone())))); - }, - // the index doesn't exist and shouldn't be created, the uuidstore will return an error, and get_index will never be called. - _ => (), - } - - let mut uuid_store = MockIndexMetaStore::new(); - uuid_store - .expect_get() - .returning(move |uid| { - Box::pin(ok((uid, index_exists.then(|| crate::index_resolver::meta_store::IndexMeta {uuid, creation_task_id: 0 })))) - }); - - // we sould only be creating an index if the index doesn't alredy exist - uuid_store - .expect_insert() - .withf(move |_, _| !index_exists) - .returning(|_, _| Box::pin(ok(()))); - - uuid_store - .expect_delete() - .times(matches!(task.content, TaskContent::IndexDeletion) as usize) - .returning(move |_| Box::pin(ok(index_exists.then(|| crate::index_resolver::meta_store::IndexMeta { uuid, creation_task_id: 0})))); - - let mocker = Mocker::default(); - let update_file_store = UpdateFileStore::mock(mocker); - let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store); - - let batch = Batch { id: Some(1), created_at: OffsetDateTime::now_utc(), content: crate::tasks::batch::BatchContent::IndexUpdate(task.clone()) }; - if index_resolver.accept(&batch) { - let result = index_resolver.process_batch(batch).await; - - // Test for some expected output scenarios: - // Index creation and deletion cannot fail because of a failed index op, since they - // don't perform index ops. - if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None } | TaskContent::Dump { .. }) - || (index_exists && matches!(task.content, TaskContent::IndexCreation { .. })) - || (!index_exists && matches!(task.content, TaskContent::IndexDeletion - | TaskContent::DocumentDeletion(_) - | TaskContent::SettingsUpdate { is_deletion: true, ..} - | TaskContent::SettingsUpdate { allow_index_creation: false, ..} - | TaskContent::DocumentAddition { allow_index_creation: false, ..} - | TaskContent::IndexUpdate { .. } )) - { - assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Failed { .. }), "{:?}", result); - } else { - assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Succeeded { .. }), "{:?}", result); - } - } - }); - } - } + // proptest! { + // #[test] + // #[ignore] + // fn test_process_task( + // task in any::().prop_filter("IndexUid should be Some", |s| s.index_uid.is_some()), + // index_exists in any::(), + // index_op_fails in any::(), + // any_int in any::(), + // ) { + // actix_rt::System::new().block_on(async move { + // let uuid = Uuid::new_v4(); + // let mut index_store = MockIndexStore::new(); + // + // let mocker = Mocker::default(); + // + // // Return arbitrary data from index call. + // match &task.content { + // TaskContent::DocumentAddition{primary_key, ..} => { + // let result = move || if !index_op_fails { + // Ok(DocumentAdditionResult { indexed_documents: any_int, number_of_documents: any_int }) + // } else { + // // return this error because it's easy to generate... + // Err(IndexError::DocumentNotFound("a doc".into())) + // }; + // if primary_key.is_some() { + // mocker.when::>("update_primary_key") + // .then(move |_| Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None })); + // } + // mocker.when::<(IndexDocumentsMethod, Option, UpdateFileStore, IntoIter), IndexResult>("update_documents") + // .then(move |(_, _, _, _)| result()); + // } + // TaskContent::SettingsUpdate{..} => { + // let result = move || if !index_op_fails { + // Ok(()) + // } else { + // // return this error because it's easy to generate... + // Err(IndexError::DocumentNotFound("a doc".into())) + // }; + // mocker.when::<&Settings, IndexResult<()>>("update_settings") + // .then(move |_| result()); + // } + // TaskContent::DocumentDeletion(DocumentDeletion::Ids(_ids)) => { + // let result = move || if !index_op_fails { + // Ok(DocumentDeletionResult { deleted_documents: any_int as u64, remaining_documents: any_int as u64 }) + // } else { + // // return this error because it's easy to generate... + // Err(IndexError::DocumentNotFound("a doc".into())) + // }; + // + // mocker.when::<&[String], IndexResult>("delete_documents") + // .then(move |_| result()); + // }, + // TaskContent::DocumentDeletion(DocumentDeletion::Clear) => { + // let result = move || if !index_op_fails { + // Ok(()) + // } else { + // // return this error because it's easy to generate... + // Err(IndexError::DocumentNotFound("a doc".into())) + // }; + // mocker.when::<(), IndexResult<()>>("clear_documents") + // .then(move |_| result()); + // }, + // TaskContent::IndexDeletion => { + // mocker.when::<(), ()>("close") + // .times(index_exists as usize) + // .then(move |_| ()); + // } + // TaskContent::IndexUpdate { primary_key } + // | TaskContent::IndexCreation { primary_key } => { + // if primary_key.is_some() { + // let result = move || if !index_op_fails { + // Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None }) + // } else { + // // return this error because it's easy to generate... + // Err(IndexError::DocumentNotFound("a doc".into())) + // }; + // mocker.when::>("update_primary_key") + // .then(move |_| result()); + // } + // } + // TaskContent::Dump { .. } => { } + // } + // + // mocker.when::<(), IndexResult>("stats") + // .then(|()| Ok(IndexStats { size: 0, number_of_documents: 0, is_indexing: Some(false), field_distribution: BTreeMap::new() })); + // + // let index = Index::mock(mocker); + // + // match &task.content { + // // an unexisting index should trigger an index creation in the folllowing cases: + // TaskContent::DocumentAddition { allow_index_creation: true, .. } + // | TaskContent::SettingsUpdate { allow_index_creation: true, is_deletion: false, .. } + // | TaskContent::IndexCreation { .. } if !index_exists => { + // index_store + // .expect_create() + // .once() + // .withf(move |&found| !index_exists || found == uuid) + // .returning(move |_| Box::pin(ok(index.clone()))); + // }, + // TaskContent::IndexDeletion => { + // index_store + // .expect_delete() + // // this is called only if the index.exists + // .times(index_exists as usize) + // .withf(move |&found| !index_exists || found == uuid) + // .returning(move |_| Box::pin(ok(Some(index.clone())))); + // } + // // if index already exists, create index will return an error + // TaskContent::IndexCreation { .. } if index_exists => (), + // TaskContent::Dump { .. } => (), + // // The index exists and get should be called + // _ if index_exists => { + // index_store + // .expect_get() + // .once() + // .withf(move |&found| found == uuid) + // .returning(move |_| Box::pin(ok(Some(index.clone())))); + // }, + // // the index doesn't exist and shouldn't be created, the uuidstore will return an error, and get_index will never be called. + // _ => (), + // } + // + // let mut uuid_store = MockIndexMetaStore::new(); + // uuid_store + // .expect_get() + // .returning(move |uid| { + // Box::pin(ok((uid, index_exists.then(|| crate::index_resolver::meta_store::IndexMeta {uuid, creation_task_id: 0 })))) + // }); + // + // // we sould only be creating an index if the index doesn't alredy exist + // uuid_store + // .expect_insert() + // .withf(move |_, _| !index_exists) + // .returning(|_, _| Box::pin(ok(()))); + // + // uuid_store + // .expect_delete() + // .times(matches!(task.content, TaskContent::IndexDeletion) as usize) + // .returning(move |_| Box::pin(ok(index_exists.then(|| crate::index_resolver::meta_store::IndexMeta { uuid, creation_task_id: 0})))); + // + // let mocker = Mocker::default(); + // let update_file_store = UpdateFileStore::mock(mocker); + // let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store); + // + // let batch = Batch { id: Some(1), created_at: OffsetDateTime::now_utc(), content: crate::tasks::batch::BatchContent::IndexUpdate(task.clone()) }; + // if index_resolver.accept(&batch) { + // let result = index_resolver.process_batch(batch).await; + // + // // Test for some expected output scenarios: + // // Index creation and deletion cannot fail because of a failed index op, since they + // // don't perform index ops. + // if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None } | TaskContent::Dump { .. }) + // || (index_exists && matches!(task.content, TaskContent::IndexCreation { .. })) + // || (!index_exists && matches!(task.content, TaskContent::IndexDeletion + // | TaskContent::DocumentDeletion(_) + // | TaskContent::SettingsUpdate { is_deletion: true, ..} + // | TaskContent::SettingsUpdate { allow_index_creation: false, ..} + // | TaskContent::DocumentAddition { allow_index_creation: false, ..} + // | TaskContent::IndexUpdate { .. } )) + // { + // assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Failed { .. }), "{:?}", result); + // } else { + // assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Succeeded { .. }), "{:?}", result); + // } + // } + // }); + // } + // } } diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index e0471567b..75f0623b2 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -55,6 +55,7 @@ mod test { task::{Task, TaskContent}, }; use crate::update_file_store::{Result as FileStoreResult, UpdateFileStore}; + use crate::IndexUid; use super::*; use milli::update::IndexDocumentsMethod; @@ -103,13 +104,13 @@ mod test { let task = Task { id: 1, - index_uid: None, content: TaskContent::DocumentAddition { content_uuid, merge_strategy: IndexDocumentsMethod::ReplaceDocuments, primary_key: None, documents_count: 100, allow_index_creation: true, + index_uid: IndexUid::new_unchecked("test"), }, events: Vec::new(), }; @@ -130,7 +131,6 @@ mod test { let task = Task { id: 1, - index_uid: None, content: TaskContent::Dump { uid: String::from("hello"), }, diff --git a/meilisearch-lib/src/tasks/handlers/mod.rs b/meilisearch-lib/src/tasks/handlers/mod.rs index 6e28636ed..8f02de8b9 100644 --- a/meilisearch-lib/src/tasks/handlers/mod.rs +++ b/meilisearch-lib/src/tasks/handlers/mod.rs @@ -17,9 +17,9 @@ mod test { TaskContent::DocumentAddition { .. } => { BatchContent::DocumentsAdditionBatch(vec![task]) } - TaskContent::DocumentDeletion(_) + TaskContent::DocumentDeletion { .. } | TaskContent::SettingsUpdate { .. } - | TaskContent::IndexDeletion + | TaskContent::IndexDeletion { .. } | TaskContent::IndexCreation { .. } | TaskContent::IndexUpdate { .. } => BatchContent::IndexUpdate(task), TaskContent::Dump { .. } => BatchContent::Dump(task), diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 36534f358..76294b6e7 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -131,6 +131,22 @@ enum TaskListIdentifier { Dump, } +impl From<&Task> for TaskListIdentifier { + fn from(task: &Task) -> Self { + match &task.content { + TaskContent::DocumentAddition { index_uid, .. } + | TaskContent::DocumentDeletion { index_uid, .. } + | TaskContent::SettingsUpdate { index_uid, .. } + | TaskContent::IndexDeletion { index_uid } + | TaskContent::IndexCreation { index_uid, .. } + | TaskContent::IndexUpdate { index_uid, .. } => { + TaskListIdentifier::Index(index_uid.as_str().to_string()) + } + TaskContent::Dump { .. } => TaskListIdentifier::Dump, + } + } +} + #[derive(Default)] struct TaskQueue { /// Maps index uids to their TaskList, for quick access @@ -142,11 +158,8 @@ struct TaskQueue { impl TaskQueue { fn insert(&mut self, task: Task) { let id = task.id; - let uid = match task.index_uid { - Some(uid) => TaskListIdentifier::Index(uid.into_inner()), - None if matches!(task.content, TaskContent::Dump { .. }) => TaskListIdentifier::Dump, - None => unreachable!("invalid task state"), - }; + let uid = TaskListIdentifier::from(&task); + let kind = match task.content { TaskContent::DocumentAddition { documents_count, @@ -163,9 +176,9 @@ impl TaskQueue { number: documents_count, }, TaskContent::Dump { .. } => TaskType::Dump, - TaskContent::DocumentDeletion(_) + TaskContent::DocumentDeletion { .. } | TaskContent::SettingsUpdate { .. } - | TaskContent::IndexDeletion + | TaskContent::IndexDeletion { .. } | TaskContent::IndexCreation { .. } | TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, _ => unreachable!("unhandled task type"), @@ -528,25 +541,25 @@ mod test { use super::*; - fn gen_task(id: TaskId, index_uid: Option<&str>, content: TaskContent) -> Task { + fn gen_task(id: TaskId, content: TaskContent) -> Task { Task { id, - index_uid: index_uid.map(IndexUid::new_unchecked), content, events: vec![], } } #[test] + #[rustfmt::skip] fn register_updates_multiples_indexes() { let mut queue = TaskQueue::default(); - queue.insert(gen_task(0, Some("test1"), TaskContent::IndexDeletion)); - queue.insert(gen_task(1, Some("test2"), TaskContent::IndexDeletion)); - queue.insert(gen_task(2, Some("test2"), TaskContent::IndexDeletion)); - queue.insert(gen_task(3, Some("test2"), TaskContent::IndexDeletion)); - queue.insert(gen_task(4, Some("test1"), TaskContent::IndexDeletion)); - queue.insert(gen_task(5, Some("test1"), TaskContent::IndexDeletion)); - queue.insert(gen_task(6, Some("test2"), TaskContent::IndexDeletion)); + queue.insert(gen_task(0, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test1") })); + queue.insert(gen_task(1, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test2") })); + queue.insert(gen_task(2, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test2") })); + queue.insert(gen_task(3, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test2") })); + queue.insert(gen_task(4, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test1") })); + queue.insert(gen_task(5, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test1") })); + queue.insert(gen_task(6, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test2") })); let test1_tasks = queue .head_mut(|tasks| tasks.drain().map(|t| t.id).collect::>()) @@ -564,31 +577,30 @@ mod test { assert!(queue.queue.is_empty()); } - #[test] - fn test_make_batch() { - let mut queue = TaskQueue::default(); - let content = TaskContent::DocumentAddition { + fn gen_doc_addition_task_content(index_uid: &str) -> TaskContent { + TaskContent::DocumentAddition { content_uuid: Uuid::new_v4(), merge_strategy: IndexDocumentsMethod::ReplaceDocuments, primary_key: Some("test".to_string()), documents_count: 0, allow_index_creation: true, - }; - queue.insert(gen_task(0, Some("test1"), content.clone())); - queue.insert(gen_task(1, Some("test2"), content.clone())); - queue.insert(gen_task(2, Some("test2"), TaskContent::IndexDeletion)); - queue.insert(gen_task(3, Some("test2"), content.clone())); - queue.insert(gen_task(4, Some("test1"), content.clone())); - queue.insert(gen_task(5, Some("test1"), TaskContent::IndexDeletion)); - queue.insert(gen_task(6, Some("test2"), content.clone())); - queue.insert(gen_task(7, Some("test1"), content)); - queue.insert(gen_task( - 8, - None, - TaskContent::Dump { - uid: "adump".to_owned(), - }, - )); + index_uid: IndexUid::new_unchecked(index_uid), + } + } + + #[test] + #[rustfmt::skip] + fn test_make_batch() { + let mut queue = TaskQueue::default(); + queue.insert(gen_task(0, gen_doc_addition_task_content("test1"))); + queue.insert(gen_task(1, gen_doc_addition_task_content("test2"))); + queue.insert(gen_task(2, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test2")})); + queue.insert(gen_task(3, gen_doc_addition_task_content("test2"))); + queue.insert(gen_task(4, gen_doc_addition_task_content("test1"))); + queue.insert(gen_task(5, TaskContent::IndexDeletion { index_uid: IndexUid::new_unchecked("test1")})); + queue.insert(gen_task(6, gen_doc_addition_task_content("test2"))); + queue.insert(gen_task(7, gen_doc_addition_task_content("test1"))); + queue.insert(gen_task(8, TaskContent::Dump { uid: "adump".to_owned() })); let config = SchedulerConfig::default(); diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index 3b94cd991..f19f6cbfe 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -5,10 +5,8 @@ use time::OffsetDateTime; use uuid::Uuid; use super::batch::BatchId; -use crate::{ - index::{Settings, Unchecked}, - index_resolver::IndexUid, -}; +use crate::index::{Settings, Unchecked}; +use crate::index_resolver::IndexUid; pub type TaskId = u32; @@ -90,13 +88,6 @@ pub struct Task { /// then this is None // TODO: when next forward breaking dumps, it would be a good idea to move this field inside of // the TaskContent. - #[cfg_attr( - test, - proptest( - strategy = "proptest::option::weighted(proptest::option::Probability::new(0.99), IndexUid::arbitrary())" - ) - )] - pub index_uid: Option, pub content: TaskContent, pub events: Vec, } @@ -123,6 +114,18 @@ impl Task { _ => None, } } + + pub fn index_uid(&self) -> Option<&str> { + match &self.content { + TaskContent::DocumentAddition { index_uid, .. } + | TaskContent::DocumentDeletion { index_uid, .. } + | TaskContent::SettingsUpdate { index_uid, .. } + | TaskContent::IndexDeletion { index_uid } + | TaskContent::IndexCreation { index_uid, .. } + | TaskContent::IndexUpdate { index_uid, .. } => Some(index_uid.as_str()), + TaskContent::Dump { .. } => None, + } + } } #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] @@ -137,6 +140,7 @@ pub enum DocumentDeletion { #[allow(clippy::large_enum_variant)] pub enum TaskContent { DocumentAddition { + index_uid: IndexUid, #[cfg_attr(test, proptest(value = "Uuid::new_v4()"))] content_uuid: Uuid, #[cfg_attr(test, proptest(strategy = "test::index_document_method_strategy()"))] @@ -145,18 +149,26 @@ pub enum TaskContent { documents_count: usize, allow_index_creation: bool, }, - DocumentDeletion(DocumentDeletion), + DocumentDeletion { + index_uid: IndexUid, + deletion: DocumentDeletion, + }, SettingsUpdate { + index_uid: IndexUid, settings: Settings, /// Indicates whether the task was a deletion is_deletion: bool, allow_index_creation: bool, }, - IndexDeletion, + IndexDeletion { + index_uid: IndexUid, + }, IndexCreation { + index_uid: IndexUid, primary_key: Option, }, IndexUpdate { + index_uid: IndexUid, primary_key: Option, }, Dump { diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index 6c7584683..33f4bfb50 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -14,7 +14,6 @@ use super::error::TaskError; use super::scheduler::Processing; use super::task::{Task, TaskContent, TaskId}; use super::Result; -use crate::index_resolver::IndexUid; use crate::tasks::task::TaskEvent; use crate::update_file_store::UpdateFileStore; @@ -32,11 +31,11 @@ pub struct TaskFilter { impl TaskFilter { fn pass(&self, task: &Task) -> bool { - match task.index_uid { - Some(ref index_uid) => self + match task.index_uid() { + Some(index_uid) => self .indexes .as_ref() - .map_or(true, |indexes| indexes.contains(index_uid.as_str())), + .map_or(true, |indexes| indexes.contains(index_uid)), None => false, } } @@ -75,11 +74,7 @@ impl TaskStore { Ok(Self { store }) } - pub async fn register( - &self, - index_uid: Option, - content: TaskContent, - ) -> Result { + pub async fn register(&self, content: TaskContent) -> Result { debug!("registering update: {:?}", content); let store = self.store.clone(); let task = tokio::task::spawn_blocking(move || -> Result { @@ -88,7 +83,6 @@ impl TaskStore { let created_at = TaskEvent::Created(OffsetDateTime::now_utc()); let task = Task { id: next_task_id, - index_uid, content, events: vec![created_at], }; @@ -273,7 +267,10 @@ impl TaskStore { #[cfg(test)] pub mod test { - use crate::tasks::{scheduler::Processing, task_store::store::test::tmp_env}; + use crate::{ + tasks::{scheduler::Processing, task_store::store::test::tmp_env}, + IndexUid, + }; use super::*; @@ -359,13 +356,9 @@ pub mod test { } } - pub async fn register( - &self, - index_uid: Option, - content: TaskContent, - ) -> Result { + pub async fn register(&self, content: TaskContent) -> Result { match self { - Self::Real(s) => s.register(index_uid, content).await, + Self::Real(s) => s.register(content).await, Self::Mock(_m) => todo!(), } } @@ -393,8 +386,10 @@ pub mod test { let gen_task = |id: TaskId| Task { id, - index_uid: Some(IndexUid::new_unchecked("test")), - content: TaskContent::IndexCreation { primary_key: None }, + content: TaskContent::IndexCreation { + primary_key: None, + index_uid: IndexUid::new_unchecked("test"), + }, events: Vec::new(), }; diff --git a/meilisearch-lib/src/tasks/task_store/store.rs b/meilisearch-lib/src/tasks/task_store/store.rs index 5b17da716..f044bd077 100644 --- a/meilisearch-lib/src/tasks/task_store/store.rs +++ b/meilisearch-lib/src/tasks/task_store/store.rs @@ -77,7 +77,7 @@ impl Store { pub fn put(&self, txn: &mut RwTxn, task: &Task) -> Result<()> { self.tasks.put(txn, &BEU32::new(task.id), task)?; // only add the task to the indexes index if it has an index_uid - if let Some(index_uid) = &task.index_uid { + if let Some(index_uid) = task.index_uid() { let mut tasks_set = self .index_uid_task_ids .get(txn, index_uid)? @@ -287,8 +287,9 @@ pub mod test { let tasks = (0..100) .map(|_| Task { id: rand::random(), - index_uid: Some(IndexUid::new_unchecked("test")), - content: TaskContent::IndexDeletion, + content: TaskContent::IndexDeletion { + index_uid: IndexUid::new_unchecked("test"), + }, events: vec![], }) .collect::>(); @@ -318,15 +319,17 @@ pub mod test { let task_1 = Task { id: 1, - index_uid: Some(IndexUid::new_unchecked("test")), - content: TaskContent::IndexDeletion, + content: TaskContent::IndexDeletion { + index_uid: IndexUid::new_unchecked("test"), + }, events: vec![], }; let task_2 = Task { id: 0, - index_uid: Some(IndexUid::new_unchecked("test1")), - content: TaskContent::IndexDeletion, + content: TaskContent::IndexDeletion { + index_uid: IndexUid::new_unchecked("test1"), + }, events: vec![], }; @@ -341,29 +344,21 @@ pub mod test { txn.abort().unwrap(); assert_eq!(tasks.len(), 1); - assert_eq!( - tasks - .first() - .as_ref() - .unwrap() - .index_uid - .as_ref() - .unwrap() - .as_str(), - "test" - ); + assert_eq!(tasks.first().as_ref().unwrap().index_uid().unwrap(), "test"); // same thing but invert the ids let task_1 = Task { id: 0, - index_uid: Some(IndexUid::new_unchecked("test")), - content: TaskContent::IndexDeletion, + content: TaskContent::IndexDeletion { + index_uid: IndexUid::new_unchecked("test"), + }, events: vec![], }; let task_2 = Task { id: 1, - index_uid: Some(IndexUid::new_unchecked("test1")), - content: TaskContent::IndexDeletion, + content: TaskContent::IndexDeletion { + index_uid: IndexUid::new_unchecked("test1"), + }, events: vec![], }; @@ -378,14 +373,7 @@ pub mod test { assert_eq!(tasks.len(), 1); assert_eq!( - &*tasks - .first() - .as_ref() - .unwrap() - .index_uid - .as_ref() - .unwrap() - .as_str(), + &*tasks.first().as_ref().unwrap().index_uid().unwrap(), "test" ); } From 3e465430608dfb4a2464f0fa3cde11fbbef3fa78 Mon Sep 17 00:00:00 2001 From: Ryan Russell Date: Sat, 4 Jun 2022 20:42:53 -0500 Subject: [PATCH 107/185] Improve Store Readability Signed-off-by: Ryan Russell --- meilisearch-auth/src/store.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 69c4cbd57..48ff6e259 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -205,7 +205,7 @@ impl HeedAuthStore { } /// Codec allowing to retrieve the expiration date of an action, -/// optionnally on a spcific index, for a given key. +/// optionally on a specific index, for a given key. pub struct KeyIdActionCodec; impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec { From 4b6c3e72ffff99870412cccf44f9c160b7caf31e Mon Sep 17 00:00:00 2001 From: Ryan Russell Date: Sat, 4 Jun 2022 21:38:04 -0500 Subject: [PATCH 108/185] Improve Lib Readability Signed-off-by: Ryan Russell --- meilisearch-error/src/lib.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/meilisearch-error/src/lib.rs b/meilisearch-error/src/lib.rs index 6e6273db2..56ac65f9e 100644 --- a/meilisearch-error/src/lib.rs +++ b/meilisearch-error/src/lib.rs @@ -73,12 +73,12 @@ impl aweb::error::ResponseError for ResponseError { pub trait ErrorCode: std::error::Error { fn error_code(&self) -> Code; - /// returns the HTTP status code ascociated with the error + /// returns the HTTP status code associated with the error fn http_status(&self) -> StatusCode { self.error_code().http() } - /// returns the doc url ascociated with the error + /// returns the doc url associated with the error fn error_url(&self) -> String { self.error_code().url() } @@ -173,7 +173,7 @@ pub enum Code { } impl Code { - /// ascociate a `Code` variant to the actual ErrCode + /// associate a `Code` variant to the actual ErrCode fn err_code(&self) -> ErrCode { use Code::*; @@ -286,7 +286,7 @@ impl Code { } } - /// return the HTTP status code ascociated with the `Code` + /// return the HTTP status code associated with the `Code` fn http(&self) -> StatusCode { self.err_code().status_code } @@ -301,7 +301,7 @@ impl Code { self.err_code().error_type.to_string() } - /// return the doc url ascociated with the error + /// return the doc url associated with the error fn url(&self) -> String { format!("https://docs.meilisearch.com/errors#{}", self.name()) } From 80c156df3fae586b80ca222ea9a90cbee2bc4c35 Mon Sep 17 00:00:00 2001 From: walter Date: Sun, 5 Jun 2022 00:49:36 -0400 Subject: [PATCH 109/185] Add custom TaskTypeError for TaskType --- meilisearch-http/src/task.rs | 37 ++++++++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index d9360039d..d098d6710 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -1,4 +1,5 @@ -use std::fmt::Write; +use std::error::Error; +use std::fmt::{self, Write}; use std::str::FromStr; use std::write; @@ -39,10 +40,29 @@ impl From for TaskType { } } -impl FromStr for TaskType { - type Err = String; +#[derive(Debug)] +pub struct TaskTypeError { + invalid_type: String, +} - fn from_str(status: &str) -> Result { +impl fmt::Display for TaskTypeError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "invalid task type `{}`, expecting one of: \ + indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \ + documentDeletion, settingsUpdate, dumpCreation", + self.invalid_type + ) + } +} + +impl Error for TaskTypeError {} + +impl FromStr for TaskType { + type Err = TaskTypeError; + + fn from_str(status: &str) -> Result { if status.eq_ignore_ascii_case("indexCreation") { Ok(TaskType::IndexCreation) } else if status.eq_ignore_ascii_case("indexUpdate") { @@ -58,12 +78,9 @@ impl FromStr for TaskType { } else if status.eq_ignore_ascii_case("dumpCreation") { Ok(TaskType::DumpCreation) } else { - Err(format!( - "invalid task type `{}`, expecting one of: \ - indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \ - documentDeletion, settingsUpdate, dumpCreation", - status - )) + Err(TaskTypeError { + invalid_type: status.to_string(), + }) } } } From 0e7e16ae72bcd3254d1bf4c214a4e966ea31fbcc Mon Sep 17 00:00:00 2001 From: walter Date: Sun, 5 Jun 2022 00:49:57 -0400 Subject: [PATCH 110/185] Add custom TaskStatusError for TaskStatus --- meilisearch-http/src/task.rs | 32 ++++++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index d098d6710..27105ba5b 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -94,10 +94,28 @@ pub enum TaskStatus { Failed, } -impl FromStr for TaskStatus { - type Err = String; +#[derive(Debug)] +pub struct TaskStatusError { + invalid_status: String, +} - fn from_str(status: &str) -> Result { +impl fmt::Display for TaskStatusError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "invalid task status `{}`, expecting one of: \ + enqueued, processing, succeeded, or failed", + self.invalid_status, + ) + } +} + +impl Error for TaskStatusError {} + +impl FromStr for TaskStatus { + type Err = TaskStatusError; + + fn from_str(status: &str) -> Result { if status.eq_ignore_ascii_case("enqueued") { Ok(TaskStatus::Enqueued) } else if status.eq_ignore_ascii_case("processing") { @@ -107,11 +125,9 @@ impl FromStr for TaskStatus { } else if status.eq_ignore_ascii_case("failed") { Ok(TaskStatus::Failed) } else { - Err(format!( - "invalid task status `{}`, expecting one of: \ - enqueued, processing, succeeded, or failed", - status, - )) + Err(TaskStatusError { + invalid_status: status.to_string(), + }) } } } From ba55905377041a35adade34a20eb0aaec0dcb315 Mon Sep 17 00:00:00 2001 From: walter Date: Sun, 5 Jun 2022 02:26:48 -0400 Subject: [PATCH 111/185] Add custom IndexUidFormatError for IndexUid --- meilisearch-lib/src/index_resolver/mod.rs | 49 ++++++++++++++++++++--- 1 file changed, 43 insertions(+), 6 deletions(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index ac82f7a3d..a7991e8ef 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -3,6 +3,8 @@ pub mod index_store; pub mod meta_store; use std::convert::{TryFrom, TryInto}; +use std::error::Error; +use std::fmt; use std::path::Path; use std::str::FromStr; use std::sync::Arc; @@ -81,18 +83,53 @@ impl std::ops::Deref for IndexUid { } impl TryInto for String { - type Error = IndexResolverError; + type Error = IndexUidFormatError; - fn try_into(self) -> Result { - IndexUid::new(self) + fn try_into(self) -> std::result::Result { + IndexUid::from_str(&self) + } +} + +#[derive(Debug)] +pub struct IndexUidFormatError { + invalid_uid: String, +} + +impl fmt::Display for IndexUidFormatError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "invalid index uid `{}`, the uid must be an integer \ + or a string containing only alphanumeric characters \ + a-z A-Z 0-9, hyphens - and underscores _.", + self.invalid_uid, + ) + } +} + +impl Error for IndexUidFormatError {} + +impl From for IndexResolverError { + fn from(error: IndexUidFormatError) -> Self { + Self::BadlyFormatted(error.invalid_uid) } } impl FromStr for IndexUid { - type Err = IndexResolverError; + type Err = IndexUidFormatError; - fn from_str(s: &str) -> Result { - IndexUid::new(s.to_string()) + fn from_str(uid: &str) -> std::result::Result { + if !uid + .chars() + .all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') + || !(1..=400).contains(&uid.len()) + { + Err(IndexUidFormatError { + invalid_uid: uid.to_string(), + }) + } else { + Ok(IndexUid(uid.to_string())) + } } } From 10d3b367dc7e68f8ae15778a39901e6811449ad6 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 10:48:02 +0200 Subject: [PATCH 112/185] Simplify the const default values --- meilisearch-http/src/routes/indexes/search.rs | 14 +++---- meilisearch-lib/src/index/mod.rs | 3 +- meilisearch-lib/src/index/search.rs | 39 +++++-------------- meilisearch-lib/src/index_controller/mod.rs | 8 ++-- 4 files changed, 22 insertions(+), 42 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/search.rs b/meilisearch-http/src/routes/indexes/search.rs index 869bc4931..eb4ee6d34 100644 --- a/meilisearch-http/src/routes/indexes/search.rs +++ b/meilisearch-http/src/routes/indexes/search.rs @@ -3,8 +3,8 @@ use log::debug; use meilisearch_auth::IndexSearchRules; use meilisearch_error::ResponseError; use meilisearch_lib::index::{ - default_crop_length, default_crop_marker, default_highlight_post_tag, - default_highlight_pre_tag, SearchQuery, DEFAULT_SEARCH_LIMIT, + SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, + DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, }; use meilisearch_lib::MeiliSearch; use serde::Deserialize; @@ -30,7 +30,7 @@ pub struct SearchQueryGet { limit: Option, attributes_to_retrieve: Option, attributes_to_crop: Option, - #[serde(default = "default_crop_length")] + #[serde(default = "DEFAULT_CROP_LENGTH")] crop_length: usize, attributes_to_highlight: Option, filter: Option, @@ -38,11 +38,11 @@ pub struct SearchQueryGet { #[serde(default = "Default::default")] show_matches_position: bool, facets: Option, - #[serde(default = "default_highlight_pre_tag")] + #[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")] highlight_pre_tag: String, - #[serde(default = "default_highlight_post_tag")] + #[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")] highlight_post_tag: String, - #[serde(default = "default_crop_marker")] + #[serde(default = "DEFAULT_CROP_MARKER")] crop_marker: String, } @@ -77,7 +77,7 @@ impl From for SearchQuery { Self { q: other.q, offset: other.offset, - limit: other.limit.unwrap_or(DEFAULT_SEARCH_LIMIT), + limit: other.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT), attributes_to_retrieve, attributes_to_crop, crop_length: other.crop_length, diff --git a/meilisearch-lib/src/index/mod.rs b/meilisearch-lib/src/index/mod.rs index b46d97849..e6c831a01 100644 --- a/meilisearch-lib/src/index/mod.rs +++ b/meilisearch-lib/src/index/mod.rs @@ -1,6 +1,5 @@ pub use search::{ - default_crop_length, default_crop_marker, default_highlight_post_tag, - default_highlight_pre_tag, SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, + SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, }; pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecked}; diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index 097a91570..91a46600f 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -20,30 +20,11 @@ use super::index::Index; pub type Document = serde_json::Map; type MatchesPosition = BTreeMap>; -pub const DEFAULT_SEARCH_LIMIT: usize = 20; -const fn default_search_limit() -> usize { - DEFAULT_SEARCH_LIMIT -} - -pub const DEFAULT_CROP_LENGTH: usize = 10; -pub const fn default_crop_length() -> usize { - DEFAULT_CROP_LENGTH -} - -pub const DEFAULT_CROP_MARKER: &str = "…"; -pub fn default_crop_marker() -> String { - DEFAULT_CROP_MARKER.to_string() -} - -pub const DEFAULT_HIGHLIGHT_PRE_TAG: &str = ""; -pub fn default_highlight_pre_tag() -> String { - DEFAULT_HIGHLIGHT_PRE_TAG.to_string() -} - -pub const DEFAULT_HIGHLIGHT_POST_TAG: &str = ""; -pub fn default_highlight_post_tag() -> String { - DEFAULT_HIGHLIGHT_POST_TAG.to_string() -} +pub const DEFAULT_SEARCH_LIMIT: fn() -> usize = || 20; +pub const DEFAULT_CROP_LENGTH: fn() -> usize = || 10; +pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string(); +pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "".to_string(); +pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "".to_string(); /// The maximimum number of results that the engine /// will be able to return in one search call. @@ -54,11 +35,11 @@ pub const HARD_RESULT_LIMIT: usize = 1000; pub struct SearchQuery { pub q: Option, pub offset: Option, - #[serde(default = "default_search_limit")] + #[serde(default = "DEFAULT_SEARCH_LIMIT")] pub limit: usize, pub attributes_to_retrieve: Option>, pub attributes_to_crop: Option>, - #[serde(default = "default_crop_length")] + #[serde(default = "DEFAULT_CROP_LENGTH")] pub crop_length: usize, pub attributes_to_highlight: Option>, // Default to false @@ -67,11 +48,11 @@ pub struct SearchQuery { pub filter: Option, pub sort: Option>, pub facets: Option>, - #[serde(default = "default_highlight_pre_tag")] + #[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")] pub highlight_pre_tag: String, - #[serde(default = "default_highlight_post_tag")] + #[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")] pub highlight_post_tag: String, - #[serde(default = "default_crop_marker")] + #[serde(default = "DEFAULT_CROP_MARKER")] pub crop_marker: String, } diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index ecca9ac63..7eb4f985b 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -652,7 +652,7 @@ mod test { use crate::index::error::Result as IndexResult; use crate::index::Index; use crate::index::{ - default_crop_marker, default_highlight_post_tag, default_highlight_pre_tag, + DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, }; use crate::index_resolver::index_store::MockIndexStore; use crate::index_resolver::meta_store::MockIndexMetaStore; @@ -692,9 +692,9 @@ mod test { filter: None, sort: None, facets: None, - highlight_pre_tag: default_highlight_pre_tag(), - highlight_post_tag: default_highlight_post_tag(), - crop_marker: default_crop_marker(), + highlight_pre_tag: DEFAULT_HIGHLIGHT_PRE_TAG(), + highlight_post_tag: DEFAULT_HIGHLIGHT_POST_TAG(), + crop_marker: DEFAULT_CROP_MARKER(), }; let result = SearchResult { From 64b5b2e1f817f919bf2eb89285c92ac5530760f8 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 2 Jun 2022 11:14:46 +0200 Subject: [PATCH 113/185] Use serde-cs::CS with StarOr to reduce the logic duplication --- meilisearch-http/src/routes/indexes/search.rs | 23 +++++++++++-------- meilisearch-http/src/routes/mod.rs | 5 +++- meilisearch-http/src/routes/tasks.rs | 6 ++--- 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/search.rs b/meilisearch-http/src/routes/indexes/search.rs index eb4ee6d34..2f3f4a83b 100644 --- a/meilisearch-http/src/routes/indexes/search.rs +++ b/meilisearch-http/src/routes/indexes/search.rs @@ -8,11 +8,13 @@ use meilisearch_lib::index::{ }; use meilisearch_lib::MeiliSearch; use serde::Deserialize; +use serde_cs::vec::CS; use serde_json::Value; use crate::analytics::{Analytics, SearchAggregator}; use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::sequential_extractor::SeqHandler; +use crate::routes::{fold_star_or, StarOr}; pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service( @@ -28,16 +30,16 @@ pub struct SearchQueryGet { q: Option, offset: Option, limit: Option, - attributes_to_retrieve: Option, - attributes_to_crop: Option, + attributes_to_retrieve: Option>>, + attributes_to_crop: Option>>, #[serde(default = "DEFAULT_CROP_LENGTH")] crop_length: usize, - attributes_to_highlight: Option, + attributes_to_highlight: Option>>, filter: Option, sort: Option, #[serde(default = "Default::default")] show_matches_position: bool, - facets: Option, + facets: Option>>, #[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")] highlight_pre_tag: String, #[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")] @@ -50,19 +52,20 @@ impl From for SearchQuery { fn from(other: SearchQueryGet) -> Self { let attributes_to_retrieve = other .attributes_to_retrieve - .map(|attrs| attrs.split(',').map(String::from).collect()); + .map(CS::into_inner) + .and_then(fold_star_or); let attributes_to_crop = other .attributes_to_crop - .map(|attrs| attrs.split(',').map(String::from).collect()); + .map(CS::into_inner) + .and_then(fold_star_or); let attributes_to_highlight = other .attributes_to_highlight - .map(|attrs| attrs.split(',').map(String::from).collect()); + .map(CS::into_inner) + .and_then(fold_star_or); - let facets = other - .facets - .map(|attrs| attrs.split(',').map(String::from).collect()); + let facets = other.facets.map(CS::into_inner).and_then(fold_star_or); let filter = match other.filter { Some(f) => match serde_json::from_str(&f) { diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index 1b37396e9..a438d12d7 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -49,7 +49,10 @@ impl FromStr for StarOr { /// Extracts the raw values from the `StarOr` types and /// return None if a `StarOr::Star` is encountered. -pub fn fold_star_or(content: impl IntoIterator>) -> Option> { +pub fn fold_star_or(content: impl IntoIterator>) -> Option +where + O: FromIterator, +{ content .into_iter() .map(|value| match value { diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 0ab4678b7..b13c04dc7 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -81,9 +81,9 @@ async fn get_tasks( // We first transform a potential indexUid=* into a "not specified indexUid filter" // for every one of the filters: type, status, and indexUid. - let type_ = type_.map(CS::into_inner).and_then(fold_star_or); - let status = status.map(CS::into_inner).and_then(fold_star_or); - let index_uid = index_uid.map(CS::into_inner).and_then(fold_star_or); + let type_: Option> = type_.map(CS::into_inner).and_then(fold_star_or); + let status: Option> = status.map(CS::into_inner).and_then(fold_star_or); + let index_uid: Option> = index_uid.map(CS::into_inner).and_then(fold_star_or); // Then we filter on potential indexes and make sure that the search filter // restrictions are also applied. From 277a0a79677432c7b91a31d3126b15e3b70b37ec Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 6 Jun 2022 10:17:33 +0200 Subject: [PATCH 114/185] Bump serde-cs to simplify our usage of the star_or function --- Cargo.lock | 4 +-- meilisearch-http/Cargo.toml | 2 +- .../src/routes/indexes/documents.rs | 4 +-- meilisearch-http/src/routes/indexes/search.rs | 29 ++++--------------- meilisearch-http/src/routes/tasks.rs | 6 ++-- 5 files changed, 13 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 317ae620a..a1be24517 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3078,9 +3078,9 @@ dependencies = [ [[package]] name = "serde-cs" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d5b0435c9139761fbe5abeb1283234bcfbde88fadc2ae432579648fbce72ad" +checksum = "8202c9f3f58762d274952790ff8a1f1f625b5664f75e5dc1952c8dcacc64a925" dependencies = [ "serde", ] diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index b9771afa2..ac4fed7b1 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -62,7 +62,7 @@ rustls = "0.20.4" rustls-pemfile = "0.3.0" segment = { version = "0.2.0", optional = true } serde = { version = "1.0.136", features = ["derive"] } -serde-cs = "0.2.2" +serde-cs = "0.2.3" serde_json = { version = "1.0.79", features = ["preserve_order"] } sha2 = "0.10.2" siphasher = "0.3.10" diff --git a/meilisearch-http/src/routes/indexes/documents.rs b/meilisearch-http/src/routes/indexes/documents.rs index f506e587c..b5f578a56 100644 --- a/meilisearch-http/src/routes/indexes/documents.rs +++ b/meilisearch-http/src/routes/indexes/documents.rs @@ -99,7 +99,7 @@ pub async fn get_document( let index = path.index_uid.clone(); let id = path.document_id.clone(); let GetDocument { fields } = params.into_inner(); - let attributes_to_retrieve = fields.map(CS::into_inner).and_then(fold_star_or); + let attributes_to_retrieve = fields.and_then(fold_star_or); let document = meilisearch .document(index, id, attributes_to_retrieve) @@ -143,7 +143,7 @@ pub async fn get_all_documents( offset, fields, } = params.into_inner(); - let attributes_to_retrieve = fields.map(CS::into_inner).and_then(fold_star_or); + let attributes_to_retrieve = fields.and_then(fold_star_or); let (total, documents) = meilisearch .documents(path.into_inner(), offset, limit, attributes_to_retrieve) diff --git a/meilisearch-http/src/routes/indexes/search.rs b/meilisearch-http/src/routes/indexes/search.rs index 2f3f4a83b..4eaa65b9d 100644 --- a/meilisearch-http/src/routes/indexes/search.rs +++ b/meilisearch-http/src/routes/indexes/search.rs @@ -50,23 +50,6 @@ pub struct SearchQueryGet { impl From for SearchQuery { fn from(other: SearchQueryGet) -> Self { - let attributes_to_retrieve = other - .attributes_to_retrieve - .map(CS::into_inner) - .and_then(fold_star_or); - - let attributes_to_crop = other - .attributes_to_crop - .map(CS::into_inner) - .and_then(fold_star_or); - - let attributes_to_highlight = other - .attributes_to_highlight - .map(CS::into_inner) - .and_then(fold_star_or); - - let facets = other.facets.map(CS::into_inner).and_then(fold_star_or); - let filter = match other.filter { Some(f) => match serde_json::from_str(&f) { Ok(v) => Some(v), @@ -75,20 +58,18 @@ impl From for SearchQuery { None => None, }; - let sort = other.sort.map(|attr| fix_sort_query_parameters(&attr)); - Self { q: other.q, offset: other.offset, limit: other.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT), - attributes_to_retrieve, - attributes_to_crop, + attributes_to_retrieve: other.attributes_to_retrieve.and_then(fold_star_or), + attributes_to_crop: other.attributes_to_crop.and_then(fold_star_or), crop_length: other.crop_length, - attributes_to_highlight, + attributes_to_highlight: other.attributes_to_highlight.and_then(fold_star_or), filter, - sort, + sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)), show_matches_position: other.show_matches_position, - facets, + facets: other.facets.and_then(fold_star_or), highlight_pre_tag: other.highlight_pre_tag, highlight_post_tag: other.highlight_post_tag, crop_marker: other.crop_marker, diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index b13c04dc7..14716ff6b 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -81,9 +81,9 @@ async fn get_tasks( // We first transform a potential indexUid=* into a "not specified indexUid filter" // for every one of the filters: type, status, and indexUid. - let type_: Option> = type_.map(CS::into_inner).and_then(fold_star_or); - let status: Option> = status.map(CS::into_inner).and_then(fold_star_or); - let index_uid: Option> = index_uid.map(CS::into_inner).and_then(fold_star_or); + let type_: Option> = type_.and_then(fold_star_or); + let status: Option> = status.and_then(fold_star_or); + let index_uid: Option> = index_uid.and_then(fold_star_or); // Then we filter on potential indexes and make sure that the search filter // restrictions are also applied. From e5b760c59a256fdd3877a493a8341b76eb2a48c8 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 6 Jun 2022 10:44:46 +0200 Subject: [PATCH 115/185] Fix the segment analytics tests --- meilisearch-http/src/analytics/segment_analytics.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/meilisearch-http/src/analytics/segment_analytics.rs b/meilisearch-http/src/analytics/segment_analytics.rs index 20df96942..562b99c16 100644 --- a/meilisearch-http/src/analytics/segment_analytics.rs +++ b/meilisearch-http/src/analytics/segment_analytics.rs @@ -426,10 +426,10 @@ impl SearchAggregator { ret.max_limit = query.limit; ret.max_offset = query.offset.unwrap_or_default(); - ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG; - ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG; - ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER; - ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH; + ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG(); + ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG(); + ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER(); + ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH(); ret.show_matches_position = query.show_matches_position; ret From fd1190392025db36e4e2022e05e38f4c8c119c41 Mon Sep 17 00:00:00 2001 From: Thearas Date: Tue, 7 Jun 2022 03:38:23 +0800 Subject: [PATCH 116/185] remove the connection timeout --- meilisearch-http/src/main.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/meilisearch-http/src/main.rs b/meilisearch-http/src/main.rs index 498bbb82d..9a603947a 100644 --- a/meilisearch-http/src/main.rs +++ b/meilisearch-http/src/main.rs @@ -2,6 +2,7 @@ use std::env; use std::sync::Arc; use actix_web::HttpServer; +use actix_web::http::KeepAlive; use clap::Parser; use meilisearch_auth::AuthController; use meilisearch_http::analytics; @@ -83,7 +84,8 @@ async fn run_http( ) }) // Disable signals allows the server to terminate immediately when a user enter CTRL-C - .disable_signals(); + .disable_signals() + .keep_alive(KeepAlive::Os); if let Some(config) = opt.get_ssl_config()? { http_server From 9b9cbc815baf3c7fc1d5f79b879e5424cdacf1a3 Mon Sep 17 00:00:00 2001 From: Thearas Date: Tue, 7 Jun 2022 03:50:39 +0800 Subject: [PATCH 117/185] fmt --- meilisearch-http/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-http/src/main.rs b/meilisearch-http/src/main.rs index 9a603947a..f903663eb 100644 --- a/meilisearch-http/src/main.rs +++ b/meilisearch-http/src/main.rs @@ -1,8 +1,8 @@ use std::env; use std::sync::Arc; -use actix_web::HttpServer; use actix_web::http::KeepAlive; +use actix_web::HttpServer; use clap::Parser; use meilisearch_auth::AuthController; use meilisearch_http::analytics; From bbd685af5e84691da59b4d8f294ef1b5207557b5 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Wed, 1 Jun 2022 09:53:07 +0200 Subject: [PATCH 118/185] move IndexResolver to real module --- meilisearch-lib/src/index_resolver/mod.rs | 574 +++++++++++----------- 1 file changed, 291 insertions(+), 283 deletions(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index ac82f7a3d..7eb564376 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -27,6 +27,8 @@ use self::meta_store::IndexMeta; pub type HardStateIndexResolver = IndexResolver; +pub use real::IndexResolver; + /// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] @@ -96,326 +98,332 @@ impl FromStr for IndexUid { } } -pub struct IndexResolver { - index_uuid_store: U, - index_store: I, - pub file_store: UpdateFileStore, -} +mod real { + use super::*; -impl IndexResolver { - pub fn load_dump( - src: impl AsRef, - dst: impl AsRef, - index_db_size: usize, - env: Arc, - indexer_opts: &IndexerOpts, - ) -> anyhow::Result<()> { - HeedMetaStore::load_dump(&src, env)?; - let indexes_path = src.as_ref().join("indexes"); - let indexes = indexes_path.read_dir()?; - let indexer_config = IndexerConfig::try_from(indexer_opts)?; - for index in indexes { - Index::load_dump(&index?.path(), &dst, index_db_size, &indexer_config)?; - } - - Ok(()) + pub struct IndexResolver { + index_uuid_store: U, + index_store: I, + pub file_store: UpdateFileStore, } -} -impl IndexResolver -where - U: IndexMetaStore, - I: IndexStore, -{ - pub fn new(index_uuid_store: U, index_store: I, file_store: UpdateFileStore) -> Self { - Self { - index_uuid_store, - index_store, - file_store, + impl IndexResolver { + pub fn load_dump( + src: impl AsRef, + dst: impl AsRef, + index_db_size: usize, + env: Arc, + indexer_opts: &IndexerOpts, + ) -> anyhow::Result<()> { + HeedMetaStore::load_dump(&src, env)?; + let indexes_path = src.as_ref().join("indexes"); + let indexes = indexes_path.read_dir()?; + let indexer_config = IndexerConfig::try_from(indexer_opts)?; + for index in indexes { + Index::load_dump(&index?.path(), &dst, index_db_size, &indexer_config)?; + } + + Ok(()) } } - pub async fn process_document_addition_batch(&self, mut tasks: Vec) -> Vec { - fn get_content_uuid(task: &Task) -> Uuid { - match task { - Task { - content: TaskContent::DocumentAddition { content_uuid, .. }, - .. - } => *content_uuid, - _ => panic!("unexpected task in the document addition batch"), + impl IndexResolver + where + U: IndexMetaStore, + I: IndexStore, + { + pub fn new(index_uuid_store: U, index_store: I, file_store: UpdateFileStore) -> Self { + Self { + index_uuid_store, + index_store, + file_store, } } - let content_uuids = tasks.iter().map(get_content_uuid).collect::>(); - - match tasks.first() { - Some(Task { - id, - content: - TaskContent::DocumentAddition { - merge_strategy, - primary_key, - allow_index_creation, - index_uid, + pub async fn process_document_addition_batch(&self, mut tasks: Vec) -> Vec { + fn get_content_uuid(task: &Task) -> Uuid { + match task { + Task { + content: TaskContent::DocumentAddition { content_uuid, .. }, .. - }, - .. - }) => { - let primary_key = primary_key.clone(); - let method = *merge_strategy; + } => *content_uuid, + _ => panic!("unexpected task in the document addition batch"), + } + } - let index = if *allow_index_creation { - self.get_or_create_index(index_uid.clone(), *id).await - } else { - self.get_index(index_uid.as_str().to_string()).await - }; + let content_uuids = tasks.iter().map(get_content_uuid).collect::>(); - // If the index doesn't exist and we are not allowed to create it with the first - // task, we must fails the whole batch. - let now = OffsetDateTime::now_utc(); - let index = match index { - Ok(index) => index, - Err(e) => { - let error = ResponseError::from(e); - for task in tasks.iter_mut() { - task.events.push(TaskEvent::Failed { - error: error.clone(), - timestamp: now, - }); - } - return tasks; - } - }; - - let file_store = self.file_store.clone(); - let result = spawn_blocking(move || { - index.update_documents( - method, - primary_key, - file_store, - content_uuids.into_iter(), - ) - }) - .await; - - let event = match result { - Ok(Ok(result)) => TaskEvent::Succeeded { - timestamp: OffsetDateTime::now_utc(), - result: TaskResult::DocumentAddition { - indexed_documents: result.indexed_documents, + match tasks.first() { + Some(Task { + id, + content: + TaskContent::DocumentAddition { + merge_strategy, + primary_key, + allow_index_creation, + index_uid, + .. }, - }, - Ok(Err(e)) => TaskEvent::Failed { - timestamp: OffsetDateTime::now_utc(), - error: e.into(), - }, - Err(e) => TaskEvent::Failed { - timestamp: OffsetDateTime::now_utc(), - error: IndexResolverError::from(e).into(), - }, - }; - - for task in tasks.iter_mut() { - task.events.push(event.clone()); - } - - tasks - } - _ => panic!("invalid batch!"), - } - } - - pub async fn process_task(&self, task: &Task) -> Result { - match &task.content { - TaskContent::DocumentAddition { .. } => panic!("updates should be handled by batch"), - TaskContent::DocumentDeletion { - deletion: DocumentDeletion::Ids(ids), - index_uid, - } => { - let ids = ids.clone(); - let index = self.get_index(index_uid.clone().into_inner()).await?; - - let DocumentDeletionResult { - deleted_documents, .. - } = spawn_blocking(move || index.delete_documents(&ids)).await??; - - Ok(TaskResult::DocumentDeletion { deleted_documents }) - } - TaskContent::DocumentDeletion { - deletion: DocumentDeletion::Clear, - index_uid, - } => { - let index = self.get_index(index_uid.clone().into_inner()).await?; - let deleted_documents = spawn_blocking(move || -> IndexResult { - let number_documents = index.stats()?.number_of_documents; - index.clear_documents()?; - Ok(number_documents) - }) - .await??; - - Ok(TaskResult::ClearAll { deleted_documents }) - } - TaskContent::SettingsUpdate { - settings, - is_deletion, - allow_index_creation, - index_uid, - } => { - let index = if *is_deletion || !*allow_index_creation { - self.get_index(index_uid.clone().into_inner()).await? - } else { - self.get_or_create_index(index_uid.clone(), task.id).await? - }; - - let settings = settings.clone(); - spawn_blocking(move || index.update_settings(&settings.check())).await??; - - Ok(TaskResult::Other) - } - TaskContent::IndexDeletion { index_uid } => { - let index = self.delete_index(index_uid.clone().into_inner()).await?; - - let deleted_documents = spawn_blocking(move || -> IndexResult { - Ok(index.stats()?.number_of_documents) - }) - .await??; - - Ok(TaskResult::ClearAll { deleted_documents }) - } - TaskContent::IndexCreation { - primary_key, - index_uid, - } => { - let index = self.create_index(index_uid.clone(), task.id).await?; - - if let Some(primary_key) = primary_key { + .. + }) => { let primary_key = primary_key.clone(); - spawn_blocking(move || index.update_primary_key(primary_key)).await??; - } + let method = *merge_strategy; - Ok(TaskResult::Other) - } - TaskContent::IndexUpdate { - primary_key, - index_uid, - } => { - let index = self.get_index(index_uid.clone().into_inner()).await?; + let index = if *allow_index_creation { + self.get_or_create_index(index_uid.clone(), *id).await + } else { + self.get_index(index_uid.as_str().to_string()).await + }; - if let Some(primary_key) = primary_key { - let primary_key = primary_key.clone(); - spawn_blocking(move || index.update_primary_key(primary_key)).await??; - } - - Ok(TaskResult::Other) - } - _ => unreachable!("Invalid task for index resolver"), - } - } - - pub async fn dump(&self, path: impl AsRef) -> Result<()> { - for (_, index) in self.list().await? { - index.dump(&path)?; - } - self.index_uuid_store.dump(path.as_ref().to_owned()).await?; - Ok(()) - } - - async fn create_index(&self, uid: IndexUid, creation_task_id: TaskId) -> Result { - match self.index_uuid_store.get(uid.into_inner()).await? { - (uid, Some(_)) => Err(IndexResolverError::IndexAlreadyExists(uid)), - (uid, None) => { - let uuid = Uuid::new_v4(); - let index = self.index_store.create(uuid).await?; - match self - .index_uuid_store - .insert( - uid, - IndexMeta { - uuid, - creation_task_id, - }, - ) - .await - { - Err(e) => { - match self.index_store.delete(uuid).await { - Ok(Some(index)) => { - index.close(); + // If the index doesn't exist and we are not allowed to create it with the first + // task, we must fails the whole batch. + let now = OffsetDateTime::now_utc(); + let index = match index { + Ok(index) => index, + Err(e) => { + let error = ResponseError::from(e); + for task in tasks.iter_mut() { + task.events.push(TaskEvent::Failed { + error: error.clone(), + timestamp: now, + }); } - Ok(None) => (), - Err(e) => log::error!("Error while deleting index: {:?}", e), + return tasks; } - Err(e) + }; + + let file_store = self.file_store.clone(); + let result = spawn_blocking(move || { + index.update_documents( + method, + primary_key, + file_store, + content_uuids.into_iter(), + ) + }) + .await; + + let event = match result { + Ok(Ok(result)) => TaskEvent::Succeeded { + timestamp: OffsetDateTime::now_utc(), + result: TaskResult::DocumentAddition { + indexed_documents: result.indexed_documents, + }, + }, + Ok(Err(e)) => TaskEvent::Failed { + timestamp: OffsetDateTime::now_utc(), + error: e.into(), + }, + Err(e) => TaskEvent::Failed { + timestamp: OffsetDateTime::now_utc(), + error: IndexResolverError::from(e).into(), + }, + }; + + for task in tasks.iter_mut() { + task.events.push(event.clone()); } - Ok(()) => Ok(index), + + tasks } + _ => panic!("invalid batch!"), } } - } - /// Get or create an index with name `uid`. - pub async fn get_or_create_index(&self, uid: IndexUid, task_id: TaskId) -> Result { - match self.create_index(uid, task_id).await { - Ok(index) => Ok(index), - Err(IndexResolverError::IndexAlreadyExists(uid)) => self.get_index(uid).await, - Err(e) => Err(e), + pub async fn process_task(&self, task: &Task) -> Result { + match &task.content { + TaskContent::DocumentAddition { .. } => { + panic!("updates should be handled by batch") + } + TaskContent::DocumentDeletion { + deletion: DocumentDeletion::Ids(ids), + index_uid, + } => { + let ids = ids.clone(); + let index = self.get_index(index_uid.clone().into_inner()).await?; + + let DocumentDeletionResult { + deleted_documents, .. + } = spawn_blocking(move || index.delete_documents(&ids)).await??; + + Ok(TaskResult::DocumentDeletion { deleted_documents }) + } + TaskContent::DocumentDeletion { + deletion: DocumentDeletion::Clear, + index_uid, + } => { + let index = self.get_index(index_uid.clone().into_inner()).await?; + let deleted_documents = spawn_blocking(move || -> IndexResult { + let number_documents = index.stats()?.number_of_documents; + index.clear_documents()?; + Ok(number_documents) + }) + .await??; + + Ok(TaskResult::ClearAll { deleted_documents }) + } + TaskContent::SettingsUpdate { + settings, + is_deletion, + allow_index_creation, + index_uid, + } => { + let index = if *is_deletion || !*allow_index_creation { + self.get_index(index_uid.clone().into_inner()).await? + } else { + self.get_or_create_index(index_uid.clone(), task.id).await? + }; + + let settings = settings.clone(); + spawn_blocking(move || index.update_settings(&settings.check())).await??; + + Ok(TaskResult::Other) + } + TaskContent::IndexDeletion { index_uid } => { + let index = self.delete_index(index_uid.clone().into_inner()).await?; + + let deleted_documents = spawn_blocking(move || -> IndexResult { + Ok(index.stats()?.number_of_documents) + }) + .await??; + + Ok(TaskResult::ClearAll { deleted_documents }) + } + TaskContent::IndexCreation { + primary_key, + index_uid, + } => { + let index = self.create_index(index_uid.clone(), task.id).await?; + + if let Some(primary_key) = primary_key { + let primary_key = primary_key.clone(); + spawn_blocking(move || index.update_primary_key(primary_key)).await??; + } + + Ok(TaskResult::Other) + } + TaskContent::IndexUpdate { + primary_key, + index_uid, + } => { + let index = self.get_index(index_uid.clone().into_inner()).await?; + + if let Some(primary_key) = primary_key { + let primary_key = primary_key.clone(); + spawn_blocking(move || index.update_primary_key(primary_key)).await??; + } + + Ok(TaskResult::Other) + } + _ => unreachable!("Invalid task for index resolver"), + } } - } - pub async fn list(&self) -> Result> { - let uuids = self.index_uuid_store.list().await?; - let mut indexes = Vec::new(); - for (name, IndexMeta { uuid, .. }) in uuids { - match self.index_store.get(uuid).await? { - Some(index) => indexes.push((name, index)), - None => { - // we found an unexisting index, we remove it from the uuid store - let _ = self.index_uuid_store.delete(name).await; + pub async fn dump(&self, path: impl AsRef) -> Result<()> { + for (_, index) in self.list().await? { + index.dump(&path)?; + } + self.index_uuid_store.dump(path.as_ref().to_owned()).await?; + Ok(()) + } + + async fn create_index(&self, uid: IndexUid, creation_task_id: TaskId) -> Result { + match self.index_uuid_store.get(uid.into_inner()).await? { + (uid, Some(_)) => Err(IndexResolverError::IndexAlreadyExists(uid)), + (uid, None) => { + let uuid = Uuid::new_v4(); + let index = self.index_store.create(uuid).await?; + match self + .index_uuid_store + .insert( + uid, + IndexMeta { + uuid, + creation_task_id, + }, + ) + .await + { + Err(e) => { + match self.index_store.delete(uuid).await { + Ok(Some(index)) => { + index.close(); + } + Ok(None) => (), + Err(e) => log::error!("Error while deleting index: {:?}", e), + } + Err(e) + } + Ok(()) => Ok(index), + } } } } - Ok(indexes) - } - - pub async fn delete_index(&self, uid: String) -> Result { - match self.index_uuid_store.delete(uid.clone()).await? { - Some(IndexMeta { uuid, .. }) => match self.index_store.delete(uuid).await? { - Some(index) => { - index.clone().close(); - Ok(index) - } - None => Err(IndexResolverError::UnexistingIndex(uid)), - }, - None => Err(IndexResolverError::UnexistingIndex(uid)), + /// Get or create an index with name `uid`. + pub async fn get_or_create_index(&self, uid: IndexUid, task_id: TaskId) -> Result { + match self.create_index(uid, task_id).await { + Ok(index) => Ok(index), + Err(IndexResolverError::IndexAlreadyExists(uid)) => self.get_index(uid).await, + Err(e) => Err(e), + } } - } - pub async fn get_index(&self, uid: String) -> Result { - match self.index_uuid_store.get(uid).await? { - (name, Some(IndexMeta { uuid, .. })) => { + pub async fn list(&self) -> Result> { + let uuids = self.index_uuid_store.list().await?; + let mut indexes = Vec::new(); + for (name, IndexMeta { uuid, .. }) in uuids { match self.index_store.get(uuid).await? { - Some(index) => Ok(index), + Some(index) => indexes.push((name, index)), None => { - // For some reason we got a uuid to an unexisting index, we return an error, - // and remove the uuid from the uuid store. - let _ = self.index_uuid_store.delete(name.clone()).await; - Err(IndexResolverError::UnexistingIndex(name)) + // we found an unexisting index, we remove it from the uuid store + let _ = self.index_uuid_store.delete(name).await; } } } - (name, _) => Err(IndexResolverError::UnexistingIndex(name)), - } - } - pub async fn get_index_creation_task_id(&self, index_uid: String) -> Result { - let (uid, meta) = self.index_uuid_store.get(index_uid).await?; - meta.map( - |IndexMeta { - creation_task_id, .. - }| creation_task_id, - ) - .ok_or(IndexResolverError::UnexistingIndex(uid)) + Ok(indexes) + } + + pub async fn delete_index(&self, uid: String) -> Result { + match self.index_uuid_store.delete(uid.clone()).await? { + Some(IndexMeta { uuid, .. }) => match self.index_store.delete(uuid).await? { + Some(index) => { + index.clone().close(); + Ok(index) + } + None => Err(IndexResolverError::UnexistingIndex(uid)), + }, + None => Err(IndexResolverError::UnexistingIndex(uid)), + } + } + + pub async fn get_index(&self, uid: String) -> Result { + match self.index_uuid_store.get(uid).await? { + (name, Some(IndexMeta { uuid, .. })) => { + match self.index_store.get(uuid).await? { + Some(index) => Ok(index), + None => { + // For some reason we got a uuid to an unexisting index, we return an error, + // and remove the uuid from the uuid store. + let _ = self.index_uuid_store.delete(name.clone()).await; + Err(IndexResolverError::UnexistingIndex(name)) + } + } + } + (name, _) => Err(IndexResolverError::UnexistingIndex(name)), + } + } + + pub async fn get_index_creation_task_id(&self, index_uid: String) -> Result { + let (uid, meta) = self.index_uuid_store.get(index_uid).await?; + meta.map( + |IndexMeta { + creation_task_id, .. + }| creation_task_id, + ) + .ok_or(IndexResolverError::UnexistingIndex(uid)) + } } } From df61ca9cae5876dcefff57e0dc929a249dd45f5f Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 2 Jun 2022 11:00:07 +0200 Subject: [PATCH 119/185] add mocker to IndexResolver --- meilisearch-lib/src/dump/handler.rs | 7 +- meilisearch-lib/src/index_resolver/error.rs | 5 +- meilisearch-lib/src/index_resolver/mod.rs | 114 +++++++++++++++++- .../tasks/handlers/index_resolver_handler.rs | 2 +- 4 files changed, 118 insertions(+), 10 deletions(-) diff --git a/meilisearch-lib/src/dump/handler.rs b/meilisearch-lib/src/dump/handler.rs index 4adb7011a..830bf4d0d 100644 --- a/meilisearch-lib/src/dump/handler.rs +++ b/meilisearch-lib/src/dump/handler.rs @@ -121,7 +121,6 @@ mod real { #[cfg(test)] mod test { - use std::marker::PhantomData; use std::path::PathBuf; use std::sync::Arc; @@ -137,12 +136,12 @@ mod test { pub enum MockDumpHandler { Real(super::real::DumpHandler), - Mock(Mocker, PhantomData<(U, I)>), + Mock(Mocker), } impl MockDumpHandler { pub fn mock(mocker: Mocker) -> Self { - Self::Mock(mocker, PhantomData) + Self::Mock(mocker) } } @@ -173,7 +172,7 @@ mod test { pub async fn run(&self, uid: String) -> Result<()> { match self { DumpHandler::Real(real) => real.run(uid).await, - DumpHandler::Mock(mocker, _) => unsafe { mocker.get("run").call(uid) }, + DumpHandler::Mock(mocker) => unsafe { mocker.get("run").call(uid) }, } } } diff --git a/meilisearch-lib/src/index_resolver/error.rs b/meilisearch-lib/src/index_resolver/error.rs index 6c86aa6b8..610ec6c7c 100644 --- a/meilisearch-lib/src/index_resolver/error.rs +++ b/meilisearch-lib/src/index_resolver/error.rs @@ -5,7 +5,7 @@ use tokio::sync::mpsc::error::SendError as MpscSendError; use tokio::sync::oneshot::error::RecvError as OneshotRecvError; use uuid::Uuid; -use crate::{error::MilliError, index::error::IndexError}; +use crate::{error::MilliError, index::error::IndexError, update_file_store::UpdateFileStoreError}; pub type Result = std::result::Result; @@ -49,7 +49,8 @@ internal_error!( uuid::Error, std::io::Error, tokio::task::JoinError, - serde_json::Error + serde_json::Error, + UpdateFileStoreError ); impl ErrorCode for IndexResolverError { diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 7eb564376..3d76f3b6c 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -27,8 +27,12 @@ use self::meta_store::IndexMeta; pub type HardStateIndexResolver = IndexResolver; +#[cfg(not(test))] pub use real::IndexResolver; +#[cfg(test)] +pub use test::MockIndexResolver as IndexResolver; + /// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] @@ -102,9 +106,9 @@ mod real { use super::*; pub struct IndexResolver { - index_uuid_store: U, - index_store: I, - pub file_store: UpdateFileStore, + pub(super) index_uuid_store: U, + pub(super) index_store: I, + pub(super) file_store: UpdateFileStore, } impl IndexResolver { @@ -230,6 +234,11 @@ mod real { } } + pub async fn delete_content_file(&self, content_uuid: Uuid) -> Result<()> { + self.file_store.delete(content_uuid).await?; + Ok(()) + } + pub async fn process_task(&self, task: &Task) -> Result { match &task.content { TaskContent::DocumentAddition { .. } => { @@ -448,6 +457,105 @@ mod test { // use index_store::MockIndexStore; // use meta_store::MockIndexMetaStore; + pub enum MockIndexResolver { + Real(super::real::IndexResolver), + Mock(Mocker), + } + + impl MockIndexResolver { + pub fn load_dump( + src: impl AsRef, + dst: impl AsRef, + index_db_size: usize, + env: Arc, + indexer_opts: &IndexerOpts, + ) -> anyhow::Result<()> { + super::real::IndexResolver::load_dump(src, dst, index_db_size, env, indexer_opts) + } + } + + impl MockIndexResolver + where + U: IndexMetaStore, + I: IndexStore, + { + pub fn new(index_uuid_store: U, index_store: I, file_store: UpdateFileStore) -> Self { + Self::Real(super::real::IndexResolver { + index_uuid_store, + index_store, + file_store, + }) + } + + pub fn mock(mocker: Mocker) -> Self { + Self::Mock(mocker) + } + + pub async fn process_document_addition_batch(&self, tasks: Vec) -> Vec { + match self { + IndexResolver::Real(r) => r.process_document_addition_batch(tasks).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn process_task(&self, task: &Task) -> Result { + match self { + IndexResolver::Real(r) => r.process_task(task).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn dump(&self, path: impl AsRef) -> Result<()> { + match self { + IndexResolver::Real(r) => r.dump(path).await, + IndexResolver::Mock(_) => todo!(), + } + } + + /// Get or create an index with name `uid`. + pub async fn get_or_create_index(&self, uid: IndexUid, task_id: TaskId) -> Result { + match self { + IndexResolver::Real(r) => r.get_or_create_index(uid, task_id).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn list(&self) -> Result> { + match self { + IndexResolver::Real(r) => r.list().await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn delete_index(&self, uid: String) -> Result { + match self { + IndexResolver::Real(r) => r.delete_index(uid).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn get_index(&self, uid: String) -> Result { + match self { + IndexResolver::Real(r) => r.get_index(uid).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn get_index_creation_task_id(&self, index_uid: String) -> Result { + match self { + IndexResolver::Real(r) => r.get_index_creation_task_id(index_uid).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn delete_content_file(&self, content_uuid: Uuid) -> Result<()> { + match self { + IndexResolver::Real(r) => r.delete_content_file(content_uuid).await, + IndexResolver::Mock(_) => todo!(), + } + } + } + // TODO: ignoring this test, it has become too complex to maintain, and rather implement // handler logic test. // proptest! { diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index 75f0623b2..d313ea33d 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -38,7 +38,7 @@ where if let BatchContent::DocumentsAdditionBatch(ref tasks) = batch.content { for task in tasks { if let Some(content_uuid) = task.get_content_uuid() { - if let Err(e) = self.file_store.delete(content_uuid).await { + if let Err(e) = self.delete_content_file(content_uuid).await { log::error!("error deleting update file: {}", e); } } From 6ac8675c6dec19936716405f370acdec71a9a0cd Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 2 Jun 2022 12:15:36 +0200 Subject: [PATCH 120/185] add IndexResolver BatchHandler tests --- meilisearch-lib/src/index_resolver/mod.rs | 31 +++------- .../tasks/handlers/index_resolver_handler.rs | 62 ++++++++++++++++++- 2 files changed, 70 insertions(+), 23 deletions(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 3d76f3b6c..32970fc37 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -438,24 +438,9 @@ mod real { #[cfg(test)] mod test { - // use std::{collections::BTreeMap, vec::IntoIter}; - // - // use super::*; - // - // use futures::future::ok; - // use milli::update::{DocumentAdditionResult, IndexDocumentsMethod}; - // use nelson::Mocker; - // use proptest::prelude::*; - // - // use crate::{ - // index::{ - // error::{IndexError, Result as IndexResult}, - // Checked, IndexMeta, IndexStats, Settings, - // }, - // tasks::{batch::Batch, BatchHandler}, - // }; - // use index_store::MockIndexStore; - // use meta_store::MockIndexMetaStore; + use super::*; + + use nelson::Mocker; pub enum MockIndexResolver { Real(super::real::IndexResolver), @@ -494,14 +479,16 @@ mod test { pub async fn process_document_addition_batch(&self, tasks: Vec) -> Vec { match self { IndexResolver::Real(r) => r.process_document_addition_batch(tasks).await, - IndexResolver::Mock(_) => todo!(), + IndexResolver::Mock(m) => unsafe { + m.get("process_document_addition_batch").call(tasks) + }, } } pub async fn process_task(&self, task: &Task) -> Result { match self { IndexResolver::Real(r) => r.process_task(task).await, - IndexResolver::Mock(_) => todo!(), + IndexResolver::Mock(m) => unsafe { m.get("process_task").call(task) }, } } @@ -551,7 +538,9 @@ mod test { pub async fn delete_content_file(&self, content_uuid: Uuid) -> Result<()> { match self { IndexResolver::Real(r) => r.delete_content_file(content_uuid).await, - IndexResolver::Mock(_) => todo!(), + IndexResolver::Mock(m) => unsafe { + m.get("delete_content_file").call(content_uuid) + }, } } } diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index d313ea33d..de624106c 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -49,7 +49,12 @@ where #[cfg(test)] mod test { - use crate::index_resolver::{index_store::MockIndexStore, meta_store::MockIndexMetaStore}; + use crate::index_resolver::index_store::MapIndexStore; + use crate::index_resolver::meta_store::HeedMetaStore; + use crate::index_resolver::{ + error::Result as IndexResult, index_store::MockIndexStore, meta_store::MockIndexMetaStore, + }; + use crate::tasks::task::TaskResult; use crate::tasks::{ handlers::test::task_to_batch, task::{Task, TaskContent}, @@ -142,5 +147,58 @@ mod test { index_resolver.process_batch(batch).await; } - // TODO: test perform_batch. We need a Mocker for IndexResolver. + proptest! { + #[test] + fn index_document_task_deletes_update_file( + task in any::(), + ) { + let rt = tokio::runtime::Runtime::new().unwrap(); + let handle = rt.spawn(async { + let mocker = Mocker::default(); + + if let TaskContent::DocumentAddition{ .. } = task.content { + mocker.when::>("delete_content_file").then(|_| Ok(())); + } + + let index_resolver: IndexResolver = IndexResolver::mock(mocker); + + let batch = task_to_batch(task); + + index_resolver.finish(&batch).await; + }); + + rt.block_on(handle).unwrap(); + } + + #[test] + fn test_handle_batch(task in any::()) { + let rt = tokio::runtime::Runtime::new().unwrap(); + let handle = rt.spawn(async { + let mocker = Mocker::default(); + match task.content { + TaskContent::DocumentAddition { .. } => { + mocker.when::, Vec>("process_document_addition_batch").then(|tasks| tasks); + } + TaskContent::Dump { .. } => (), + _ => { + mocker.when::<&Task, IndexResult>("process_task").then(|_| Ok(TaskResult::Other)); + } + } + let index_resolver: IndexResolver = IndexResolver::mock(mocker); + + + let batch = task_to_batch(task); + + if index_resolver.accept(&batch) { + index_resolver.process_batch(batch).await; + } + }); + + if let Err(e) = rt.block_on(handle) { + if e.is_panic() { + std::panic::resume_unwind(e.into_panic()); + } + } + } + } } From cbd27d313c27efc1de750f91e6b3c69e0d877c44 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Tue, 7 Jun 2022 10:07:40 +0200 Subject: [PATCH 121/185] fix blocking writing of meta file in dump --- meilisearch-lib/src/dump/handler.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/meilisearch-lib/src/dump/handler.rs b/meilisearch-lib/src/dump/handler.rs index 4adb7011a..16bfd6a55 100644 --- a/meilisearch-lib/src/dump/handler.rs +++ b/meilisearch-lib/src/dump/handler.rs @@ -22,6 +22,7 @@ mod real { use meilisearch_auth::AuthController; use milli::heed::Env; use tokio::fs::create_dir_all; + use tokio::io::AsyncWriteExt; use crate::analytics; use crate::compression::to_tar_gz; @@ -78,9 +79,11 @@ mod real { let meta = MetadataVersion::new_v5(self.index_db_size, self.task_store_size); let meta_path = temp_dump_path.join(META_FILE_NAME); - // TODO: blocking - let mut meta_file = File::create(&meta_path)?; - serde_json::to_writer(&mut meta_file, &meta)?; + + let meta_bytes = serde_json::to_vec(&meta)?; + let mut meta_file = tokio::fs::File::create(&meta_path).await?; + meta_file.write_all(&meta_bytes).await?; + analytics::copy_user_id(&self.db_path, &temp_dump_path); create_dir_all(&temp_dump_path.join("indexes")).await?; From 3b01ed4fe82488ff3a921301051d334a1476cb50 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 7 Jun 2022 10:49:28 +0200 Subject: [PATCH 122/185] feat(auth): remove `dumps.get` action from keys --- meilisearch-auth/src/action.rs | 5 ----- meilisearch-http/tests/auth/api_keys.rs | 20 -------------------- 2 files changed, 25 deletions(-) diff --git a/meilisearch-auth/src/action.rs b/meilisearch-auth/src/action.rs index 088ad6ba7..fab5263ec 100644 --- a/meilisearch-auth/src/action.rs +++ b/meilisearch-auth/src/action.rs @@ -32,8 +32,6 @@ pub enum Action { StatsGet = actions::STATS_GET, #[serde(rename = "dumps.create")] DumpsCreate = actions::DUMPS_CREATE, - #[serde(rename = "dumps.get")] - DumpsGet = actions::DUMPS_GET, #[serde(rename = "version")] Version = actions::VERSION, #[serde(rename = "keys.create")] @@ -64,7 +62,6 @@ impl Action { SETTINGS_UPDATE => Some(Self::SettingsUpdate), STATS_GET => Some(Self::StatsGet), DUMPS_CREATE => Some(Self::DumpsCreate), - DUMPS_GET => Some(Self::DumpsGet), VERSION => Some(Self::Version), KEYS_CREATE => Some(Self::KeysAdd), KEYS_GET => Some(Self::KeysGet), @@ -91,7 +88,6 @@ impl Action { Self::SettingsUpdate => SETTINGS_UPDATE, Self::StatsGet => STATS_GET, Self::DumpsCreate => DUMPS_CREATE, - Self::DumpsGet => DUMPS_GET, Self::Version => VERSION, Self::KeysAdd => KEYS_CREATE, Self::KeysGet => KEYS_GET, @@ -116,7 +112,6 @@ pub mod actions { pub const SETTINGS_UPDATE: u8 = 11; pub const STATS_GET: u8 = 12; pub const DUMPS_CREATE: u8 = 13; - pub const DUMPS_GET: u8 = 14; pub const VERSION: u8 = 15; pub const KEYS_CREATE: u8 = 16; pub const KEYS_GET: u8 = 17; diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index d7e7dad0f..9ba583b07 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -27,7 +27,6 @@ async fn add_valid_api_key() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -58,7 +57,6 @@ async fn add_valid_api_key() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -88,7 +86,6 @@ async fn add_valid_api_key_expired_at() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13" }); @@ -117,7 +114,6 @@ async fn add_valid_api_key_expired_at() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -532,7 +528,6 @@ async fn get_api_key() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -562,7 +557,6 @@ async fn get_api_key() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -667,7 +661,6 @@ async fn list_api_keys() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -698,7 +691,6 @@ async fn list_api_keys() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }, @@ -778,7 +770,6 @@ async fn delete_api_key() { "settings.update", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -875,7 +866,6 @@ async fn patch_api_key_description() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -917,7 +907,6 @@ async fn patch_api_key_description() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -947,7 +936,6 @@ async fn patch_api_key_description() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -976,7 +964,6 @@ async fn patch_api_key_description() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -1002,7 +989,6 @@ async fn patch_api_key_name() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -1044,7 +1030,6 @@ async fn patch_api_key_name() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -1074,7 +1059,6 @@ async fn patch_api_key_name() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -1103,7 +1087,6 @@ async fn patch_api_key_name() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -1130,7 +1113,6 @@ async fn error_patch_api_key_indexes() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -1178,7 +1160,6 @@ async fn error_patch_api_key_actions() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -1234,7 +1215,6 @@ async fn error_patch_api_key_expiration_date() { "indexes.delete", "stats.get", "dumps.create", - "dumps.get" ], "expiresAt": "2050-11-13T00:00:00Z" }); From a7bff35e495a9afbb5884da8b96a12531b840504 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 7 Jun 2022 13:56:55 +0200 Subject: [PATCH 123/185] fix(test): Reduce default index size in tests --- meilisearch-http/tests/common/server.rs | 4 ++-- meilisearch-http/tests/index/get_index.rs | 9 ++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/meilisearch-http/tests/common/server.rs b/meilisearch-http/tests/common/server.rs index 2dd235e8f..c4ba8484f 100644 --- a/meilisearch-http/tests/common/server.rs +++ b/meilisearch-http/tests/common/server.rs @@ -150,8 +150,8 @@ pub fn default_settings(dir: impl AsRef) -> Opt { env: "development".to_owned(), #[cfg(all(not(debug_assertions), feature = "analytics"))] no_analytics: true, - max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(), - max_task_db_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(), + max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(), + max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(), http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(), snapshot_dir: ".".into(), indexer_options: IndexerOpts { diff --git a/meilisearch-http/tests/index/get_index.rs b/meilisearch-http/tests/index/get_index.rs index a8c036483..91cb1a6d5 100644 --- a/meilisearch-http/tests/index/get_index.rs +++ b/meilisearch-http/tests/index/get_index.rs @@ -77,13 +77,12 @@ async fn get_and_paginate_indexes() { const NB_INDEXES: usize = 50; for i in 0..NB_INDEXES { server.index(&format!("test_{i:02}")).create(None).await; + server + .index(&format!("test_{i:02}")) + .wait_task(i as u64) + .await; } - server - .index(&format!("test_{NB_INDEXES}")) - .wait_task(NB_INDEXES as u64 - 1) - .await; - // basic let (response, code) = server.list_indexes(None, None).await; assert_eq!(code, 200); From 6ffa222218ef1dd4ace20b285cb20488832af844 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 7 Jun 2022 12:03:10 +0200 Subject: [PATCH 124/185] feat(auth): Paginate API keys listing - [x] Update tests - [x] Use Pagination helpers to paginate API keys fixes #2442 --- meilisearch-http/src/routes/api_key.rs | 37 +++++------- meilisearch-http/tests/auth/api_keys.rs | 76 +++++++++++++------------ 2 files changed, 54 insertions(+), 59 deletions(-) diff --git a/meilisearch-http/src/routes/api_key.rs b/meilisearch-http/src/routes/api_key.rs index 831a350d8..87db3f890 100644 --- a/meilisearch-http/src/routes/api_key.rs +++ b/meilisearch-http/src/routes/api_key.rs @@ -1,18 +1,19 @@ use std::str; -use uuid::Uuid; use actix_web::{web, HttpRequest, HttpResponse}; - -use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key}; use serde::{Deserialize, Serialize}; use serde_json::Value; use time::OffsetDateTime; +use uuid::Uuid; + +use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key}; +use meilisearch_error::{Code, ResponseError}; use crate::extractors::{ authentication::{policies::*, GuardedData}, sequential_extractor::SeqHandler, }; -use meilisearch_error::{Code, ResponseError}; +use crate::routes::Pagination; pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service( @@ -46,20 +47,21 @@ pub async fn create_api_key( pub async fn list_api_keys( auth_controller: GuardedData, AuthController>, - _req: HttpRequest, + paginate: web::Query, ) -> Result { - let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { + let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { let keys = auth_controller.list_keys()?; - let res: Vec<_> = keys - .into_iter() - .map(|k| KeyView::from_key(k, &auth_controller)) - .collect(); - Ok(res) + let page_view = paginate.auto_paginate_sized( + keys.into_iter() + .map(|k| KeyView::from_key(k, &auth_controller)), + ); + + Ok(page_view) }) .await .map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??; - Ok(HttpResponse::Ok().json(KeyListView::from(res))) + Ok(HttpResponse::Ok().json(page_view)) } pub async fn get_api_key( @@ -156,14 +158,3 @@ impl KeyView { } } } - -#[derive(Debug, Serialize)] -struct KeyListView { - results: Vec, -} - -impl From> for KeyListView { - fn from(results: Vec) -> Self { - Self { results } - } -} diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index 9ba583b07..28be81c91 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -673,42 +673,46 @@ async fn list_api_keys() { assert_eq!(200, code, "{:?}", &response); let expected_response = json!({ "results": - [ - { - "description": "Indexing API key", - "indexes": ["products"], - "actions": [ - "search", - "documents.add", - "documents.get", - "documents.delete", - "indexes.create", - "indexes.get", - "indexes.update", - "indexes.delete", - "tasks.get", - "settings.get", - "settings.update", - "stats.get", - "dumps.create", - ], - "expiresAt": "2050-11-13T00:00:00Z" - }, - { - "name": "Default Search API Key", - "description": "Use it to search from the frontend", - "indexes": ["*"], - "actions": ["search"], - "expiresAt": serde_json::Value::Null, - }, - { - "name": "Default Admin API Key", - "description": "Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend", - "indexes": ["*"], - "actions": ["*"], - "expiresAt": serde_json::Value::Null, - } - ]}); + [ + { + "description": "Indexing API key", + "indexes": ["products"], + "actions": [ + "search", + "documents.add", + "documents.get", + "documents.delete", + "indexes.create", + "indexes.get", + "indexes.update", + "indexes.delete", + "tasks.get", + "settings.get", + "settings.update", + "stats.get", + "dumps.create", + ], + "expiresAt": "2050-11-13T00:00:00Z" + }, + { + "name": "Default Search API Key", + "description": "Use it to search from the frontend", + "indexes": ["*"], + "actions": ["search"], + "expiresAt": serde_json::Value::Null, + }, + { + "name": "Default Admin API Key", + "description": "Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend", + "indexes": ["*"], + "actions": ["*"], + "expiresAt": serde_json::Value::Null, + } + ], + "limit": 20, + "offset": 0, + "total": 3, + }); assert_json_include!(actual: response, expected: expected_response); } From 987a7f892639281bd8d3fe6f98474ec74af0a11c Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 8 Jun 2022 14:04:45 +0200 Subject: [PATCH 125/185] Wrap sha256 in HMAC instead of directly use sha256 --- Cargo.lock | 17 +++++++++++++++++ meilisearch-auth/Cargo.toml | 1 + meilisearch-auth/src/store.rs | 11 +++++++---- 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1be24517..d4d977ab3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -927,6 +927,7 @@ checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" dependencies = [ "block-buffer", "crypto-common", + "subtle", ] [[package]] @@ -1460,6 +1461,15 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + [[package]] name = "http" version = "0.2.7" @@ -1974,6 +1984,7 @@ version = "0.27.1" dependencies = [ "base64", "enum-iterator", + "hmac", "meilisearch-error", "milli", "rand", @@ -3272,6 +3283,12 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + [[package]] name = "syn" version = "0.15.44" diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 29fa78a14..bb4a9382c 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -6,6 +6,7 @@ edition = "2021" [dependencies] base64 = "0.13.0" enum-iterator = "0.7.0" +hmac = "0.12.1" meilisearch-error = { path = "../meilisearch-error" } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.28.0" } rand = "0.8.4" diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 48ff6e259..dd976fd29 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -8,9 +8,10 @@ use std::str; use std::sync::Arc; use enum_iterator::IntoEnumIterator; +use hmac::{Hmac, Mac}; use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; use milli::heed::{Database, Env, EnvOpenOptions, RwTxn}; -use sha2::{Digest, Sha256}; +use sha2::Sha256; use time::OffsetDateTime; use uuid::Uuid; @@ -242,9 +243,11 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec { } pub fn generate_key_as_base64(uid: &[u8], master_key: &[u8]) -> String { - let key = [uid, master_key].concat(); - let sha = Sha256::digest(&key); - base64::encode_config(sha, base64::URL_SAFE_NO_PAD) + let mut mac = Hmac::::new_from_slice(master_key).unwrap(); + mac.update(uid); + + let result = mac.finalize(); + base64::encode_config(result.into_bytes(), base64::URL_SAFE_NO_PAD) } /// Divides one slice into two at an index, returns `None` if mid is out of bounds. From 8d0977233402c093095d8e7e7046cd5d0e6d7aed Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 6 Jun 2022 15:52:51 +0200 Subject: [PATCH 126/185] Update milli --- Cargo.lock | 99 +++++++++++++++++++------------------ meilisearch-auth/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 3 files changed, 53 insertions(+), 50 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1be24517..647c3ce4f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -643,12 +643,33 @@ dependencies = [ ] [[package]] -name = "character_converter" -version = "1.0.0" +name = "charabia" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e48477ece09d6a21c033cb604968524a37782532727055d6f6faafac1781e5c" +checksum = "4a26a3df4d9c9231eb1e757fe6b1c66c471e0c2cd5410265e7c3109a726663c4" +dependencies = [ + "character_converter", + "cow-utils", + "deunicode", + "fst", + "jieba-rs", + "lindera", + "lindera-core", + "once_cell", + "slice-group-by", + "unicode-segmentation", + "whatlang", +] + +[[package]] +name = "character_converter" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7064c6e919124b6541c52fef59d88c3c3eabdf4bc97c13b14551df775aead02" dependencies = [ "bincode", + "fst", + "once_cell", ] [[package]] @@ -1101,8 +1122,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.28.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.28.0#19dac01c5ca81543b751f66ad51fcff61608d969" +version = "0.29.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.1#7313d6c5331e7dc13e9ded70b60b1f56dd7e583c" dependencies = [ "nom", "nom_locate", @@ -1126,8 +1147,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.28.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.28.0#19dac01c5ca81543b751f66ad51fcff61608d969" +version = "0.29.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.1#7313d6c5331e7dc13e9ded70b60b1f56dd7e583c" dependencies = [ "serde_json", ] @@ -1630,8 +1651,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.28.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.28.0#19dac01c5ca81543b751f66ad51fcff61608d969" +version = "0.29.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.1#7313d6c5331e7dc13e9ded70b60b1f56dd7e583c" dependencies = [ "serde_json", ] @@ -1709,9 +1730,9 @@ dependencies = [ [[package]] name = "lindera" -version = "0.12.6" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dea10df226936ff54f16d3922500e08ef4be2ba7c0070bec9ad4a1474316111" +checksum = "7d1c5db4b1d12637aa316dc1adb215f78fe79025080af750942516c5ff17d1a0" dependencies = [ "anyhow", "bincode", @@ -1731,9 +1752,9 @@ dependencies = [ [[package]] name = "lindera-cc-cedict-builder" -version = "0.12.6" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4392785248c3d8755c6fae9d0086d27ad7a1d6810155a2494fe5206e2021f471" +checksum = "73a3509fb497340571d49feddb57e1db2ce5248c4d449f2548d0ee8cb745eb1e" dependencies = [ "anyhow", "bincode", @@ -1751,9 +1772,9 @@ dependencies = [ [[package]] name = "lindera-core" -version = "0.12.6" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af63a4484334d4b83277621f1ba62fb83472858cc37fb4ab2181a4c19eebcb38" +checksum = "5d20d1b2c085393aed58625d741beca69410e1143fc35bc67ebc35c9885f9f74" dependencies = [ "anyhow", "bincode", @@ -1767,9 +1788,9 @@ dependencies = [ [[package]] name = "lindera-decompress" -version = "0.12.6" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "817ee62bc8973ec2457805df83796c59f074e49a4a0ee9baffe2663fe157f54a" +checksum = "b96b8050cded13927a99bcb8cbb0987f89fc8f35429fc153b4bc05ddc7a53a44" dependencies = [ "anyhow", "lzma-rs", @@ -1778,9 +1799,9 @@ dependencies = [ [[package]] name = "lindera-dictionary" -version = "0.12.6" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd57501ee44a6aba0431d043c7926347e29883a79d8fc3955b8837e4ad1fee3c" +checksum = "5abe3dddc22303402957edb4472ab0c996e0d93b3b00643de3bee8b28c2f9297" dependencies = [ "anyhow", "bincode", @@ -1790,9 +1811,9 @@ dependencies = [ [[package]] name = "lindera-ipadic" -version = "0.12.6" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade3bd3faa5f0db629c26264663e901dee5f46221eb04c2c7b592bd7485d44f9" +checksum = "b8f4c111f6ad9eb9e015d02061af2ed36fc0255f29359294415c7c2f1ea5b5b6" dependencies = [ "bincode", "byteorder", @@ -1807,9 +1828,9 @@ dependencies = [ [[package]] name = "lindera-ipadic-builder" -version = "0.12.6" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee61f8dd6566738c5fd0ee9b1c11212ffc2d1f97af69c08a02cbb5c49995250a" +checksum = "a2b9893f22a4a7511ac70ff7d96cda9b8d7259b7d7121784183c73bc593ce6e7" dependencies = [ "anyhow", "bincode", @@ -1827,9 +1848,9 @@ dependencies = [ [[package]] name = "lindera-ko-dic-builder" -version = "0.12.6" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01f05950d9adc7aa42aa8b16be1616f9625576c867179ac29372714eaed6993d" +checksum = "14282600ebfe7ab6fd4f3042143024ff9d74c09d58fd983d0c587839cf940d4a" dependencies = [ "anyhow", "bincode", @@ -1847,9 +1868,9 @@ dependencies = [ [[package]] name = "lindera-unidic-builder" -version = "0.12.6" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3836c1278b8309ebf209c67bc7a935f4ce7c9246a578b250540398806a40b81d" +checksum = "b20825d46c95854e47c532c3e548dfec07c8f187c1ed89383cb6c35790338088" dependencies = [ "anyhow", "bincode", @@ -2131,24 +2152,6 @@ dependencies = [ "whoami", ] -[[package]] -name = "meilisearch-tokenizer" -version = "0.2.9" -source = "git+https://github.com/meilisearch/tokenizer.git?tag=v0.2.9#1dfc8ad9f5b338c39c3bc5fd5b2d0c1328314ddc" -dependencies = [ - "character_converter", - "cow-utils", - "deunicode", - "fst", - "jieba-rs", - "lindera", - "lindera-core", - "once_cell", - "slice-group-by", - "unicode-segmentation", - "whatlang", -] - [[package]] name = "memchr" version = "2.5.0" @@ -2175,13 +2178,14 @@ dependencies = [ [[package]] name = "milli" -version = "0.28.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.28.0#19dac01c5ca81543b751f66ad51fcff61608d969" +version = "0.29.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.1#7313d6c5331e7dc13e9ded70b60b1f56dd7e583c" dependencies = [ "bimap", "bincode", "bstr", "byteorder", + "charabia", "concat-arrays", "crossbeam-channel", "csv", @@ -2198,7 +2202,6 @@ dependencies = [ "levenshtein_automata", "log", "logging_timer", - "meilisearch-tokenizer", "memmap2", "obkv", "once_cell", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 29fa78a14..709898b22 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" base64 = "0.13.0" enum-iterator = "0.7.0" meilisearch-error = { path = "../meilisearch-error" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.28.0" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.1" } rand = "0.8.4" serde = { version = "1.0.136", features = ["derive"] } serde_json = { version = "1.0.79", features = ["preserve_order"] } diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index e02882b39..730061675 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -30,7 +30,7 @@ lazy_static = "1.4.0" log = "0.4.14" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-error = { path = "../meilisearch-error" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.28.0" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.1" } mime = "0.3.16" num_cpus = "1.13.1" obkv = "0.2.0" From 173eea06e18d7b5df4cb965ae3113aa235612f96 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 6 Jun 2022 15:53:28 +0200 Subject: [PATCH 127/185] Replace old tokenizer by charabia --- meilisearch-lib/src/index/search.rs | 30 +++++++---------------------- 1 file changed, 7 insertions(+), 23 deletions(-) diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index 91a46600f..6a4a0a672 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -4,7 +4,7 @@ use std::str::FromStr; use std::time::Instant; use either::Either; -use milli::tokenizer::{Analyzer, AnalyzerConfig}; +use milli::tokenizer::TokenizerBuilder; use milli::{ AscDesc, FieldId, FieldsIdsMap, Filter, FormatOptions, MatchBounds, MatcherBuilder, SortError, }; @@ -175,12 +175,9 @@ impl Index { &displayed_ids, ); - let stop_words = fst::Set::default(); - let mut config = AnalyzerConfig::default(); - config.stop_words(&stop_words); - let analyzer = Analyzer::new(config); + let tokenizer = TokenizerBuilder::default().build(); - let mut formatter_builder = MatcherBuilder::from_matching_words(matching_words); + let mut formatter_builder = MatcherBuilder::new(matching_words, tokenizer); formatter_builder.crop_marker(query.crop_marker); formatter_builder.highlight_prefix(query.highlight_pre_tag); formatter_builder.highlight_suffix(query.highlight_post_tag); @@ -204,7 +201,6 @@ impl Index { &displayed_document, &fields_ids_map, &formatter_builder, - &analyzer, &formatted_options, query.show_matches_position, &displayed_ids, @@ -414,8 +410,7 @@ fn make_document( fn format_fields<'a, A: AsRef<[u8]>>( document: &Document, field_ids_map: &FieldsIdsMap, - builder: &MatcherBuilder, - analyzer: &'a Analyzer<'a, A>, + builder: &MatcherBuilder<'a, A>, formatted_options: &BTreeMap, compute_matches: bool, displayable_ids: &BTreeSet, @@ -446,7 +441,6 @@ fn format_fields<'a, A: AsRef<[u8]>>( std::mem::take(value), builder, format, - analyzer, &mut infos, compute_matches, ); @@ -470,19 +464,14 @@ fn format_fields<'a, A: AsRef<[u8]>>( fn format_value<'a, A: AsRef<[u8]>>( value: Value, - builder: &MatcherBuilder, + builder: &MatcherBuilder<'a, A>, format_options: Option, - analyzer: &'a Analyzer<'a, A>, infos: &mut Vec, compute_matches: bool, ) -> Value { match value { Value::String(old_string) => { - // this will be removed with charabia - let analyzed = analyzer.analyze(&old_string); - let tokens: Vec<_> = analyzed.tokens().collect(); - - let mut matcher = builder.build(&tokens[..], &old_string); + let mut matcher = builder.build(&old_string); if compute_matches { let matches = matcher.matches(); infos.extend_from_slice(&matches[..]); @@ -507,7 +496,6 @@ fn format_value<'a, A: AsRef<[u8]>>( highlight: format_options.highlight, crop: None, }), - analyzer, infos, compute_matches, ) @@ -527,7 +515,6 @@ fn format_value<'a, A: AsRef<[u8]>>( highlight: format_options.highlight, crop: None, }), - analyzer, infos, compute_matches, ), @@ -536,12 +523,9 @@ fn format_value<'a, A: AsRef<[u8]>>( .collect(), ), Value::Number(number) => { - // this will be removed with charabia let s = number.to_string(); - let analyzed = analyzer.analyze(&s); - let tokens: Vec<_> = analyzed.tokens().collect(); - let mut matcher = builder.build(&tokens[..], &s); + let mut matcher = builder.build(&s); if compute_matches { let matches = matcher.matches(); infos.extend_from_slice(&matches[..]); From f5306eb5b0a66520fd59aa5a75cc5b86aa1108a4 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 6 Jun 2022 15:54:50 +0200 Subject: [PATCH 128/185] Set disabled_words to default when Index::exact_words returns None --- meilisearch-lib/src/index/index.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/meilisearch-lib/src/index/index.rs b/meilisearch-lib/src/index/index.rs index 9c6150cfb..1fe191c41 100644 --- a/meilisearch-lib/src/index/index.rs +++ b/meilisearch-lib/src/index/index.rs @@ -175,12 +175,10 @@ impl Index { two_typos: Setting::Set(self.min_word_len_two_typos(txn)?), }; - let disabled_words = self - .exact_words(txn)? - .into_stream() - .into_strs()? - .into_iter() - .collect(); + let disabled_words = match self.exact_words(txn)? { + Some(fst) => fst.into_stream().into_strs()?.into_iter().collect(), + None => BTreeSet::new(), + }; let disabled_attributes = self .exact_attributes(txn)? From 09938c9b6fc0dc004c87030a26d0b0dc7f628cf3 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 6 Jun 2022 18:33:07 +0200 Subject: [PATCH 129/185] Patch ranking rules error test --- meilisearch-http/tests/settings/get_settings.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index e79b3ed26..d3ac47625 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -283,7 +283,7 @@ async fn error_set_invalid_ranking_rules() { assert_eq!(response["status"], "failed"); let expected_error = json!({ - "message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are Words, Typo, Sort, Proximity, Attribute, Exactness and custom ranking rules."#, + "message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules."#, "code": "invalid_ranking_rule", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_ranking_rule" From 17f30c2b2dfeb55a47e9b997d6a580de562169ec Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 8 Jun 2022 14:52:32 +0200 Subject: [PATCH 130/185] Fix(auth): Authorization test were not testing keys unrestricted on index --- meilisearch-http/tests/auth/authorization.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 7846188fb..e5826a675 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -249,7 +249,7 @@ async fn access_authorized_no_index_restriction() { server.use_api_key("MASTER_KEY"); let content = json!({ - "indexes": ["products"], + "indexes": ["*"], "actions": [action], "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), }); From 1a7631c8073cbc8fe27f8aff69b44d849a0a7848 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 8 Jun 2022 14:14:30 +0200 Subject: [PATCH 131/185] Hash master_key before passing it to HMAC --- meilisearch-auth/src/store.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index dd976fd29..d1af1b4ab 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -11,7 +11,7 @@ use enum_iterator::IntoEnumIterator; use hmac::{Hmac, Mac}; use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; use milli::heed::{Database, Env, EnvOpenOptions, RwTxn}; -use sha2::Sha256; +use sha2::{Digest, Sha256}; use time::OffsetDateTime; use uuid::Uuid; @@ -243,7 +243,8 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec { } pub fn generate_key_as_base64(uid: &[u8], master_key: &[u8]) -> String { - let mut mac = Hmac::::new_from_slice(master_key).unwrap(); + let master_key_sha = Sha256::digest(master_key); + let mut mac = Hmac::::new_from_slice(master_key_sha.as_slice()).unwrap(); mac.update(uid); let result = mac.finalize(); From 0a16f71563c1709e07858d515e82bd31931b8d43 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 8 Jun 2022 12:11:35 +0200 Subject: [PATCH 132/185] Increase wait_task wainting time --- meilisearch-http/tests/common/index.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index 275bec4cd..010535e21 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -89,9 +89,9 @@ impl Index<'_> { } pub async fn wait_task(&self, update_id: u64) -> Value { - // try 10 times to get status, or panic to not wait forever + // try several times to get status, or panic to not wait forever let url = format!("/tasks/{}", update_id); - for _ in 0..10 { + for _ in 0..100 { let (response, status_code) = self.service.get(&url).await; assert_eq!(200, status_code, "response: {}", response); @@ -99,7 +99,8 @@ impl Index<'_> { return response; } - sleep(Duration::from_secs(1)).await; + // wait 0.5 second. + sleep(Duration::from_millis(500)).await; } panic!("Timeout waiting for update id"); } From 55169ff91432bda80c692735713d31b2803d9790 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 8 Jun 2022 15:09:06 +0200 Subject: [PATCH 133/185] Fix test get_document_s_nested_attributes_to_retrieve --- meilisearch-http/tests/documents/get_documents.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/meilisearch-http/tests/documents/get_documents.rs b/meilisearch-http/tests/documents/get_documents.rs index 83e433b22..c15d3f7fa 100644 --- a/meilisearch-http/tests/documents/get_documents.rs +++ b/meilisearch-http/tests/documents/get_documents.rs @@ -43,7 +43,7 @@ async fn get_document() { ]); let (_, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - index.wait_task(0).await; + index.wait_task(1).await; let (response, code) = index.get_document(0, None).await; assert_eq!(code, 200); assert_eq!( @@ -306,7 +306,7 @@ async fn get_document_s_nested_attributes_to_retrieve() { ]); let (_, code) = index.add_documents(documents, None).await; assert_eq!(code, 202); - index.wait_task(0).await; + index.wait_task(1).await; let (response, code) = index .get_document( From afcc49348068ca43368fd30eedc162c77326c001 Mon Sep 17 00:00:00 2001 From: Lawrence Chou Date: Wed, 8 Jun 2022 23:17:20 +0800 Subject: [PATCH 134/185] Merge publish-docker-latest.yml & publish-docker-tag.yml (#2477) close #1901 --- .github/workflows/publish-docker-latest.yml | 30 ------------------- ...lish-docker-tag.yml => publish-docker.yml} | 10 +++++-- 2 files changed, 8 insertions(+), 32 deletions(-) delete mode 100644 .github/workflows/publish-docker-latest.yml rename .github/workflows/{publish-docker-tag.yml => publish-docker.yml} (67%) diff --git a/.github/workflows/publish-docker-latest.yml b/.github/workflows/publish-docker-latest.yml deleted file mode 100644 index 59cbf9123..000000000 --- a/.github/workflows/publish-docker-latest.yml +++ /dev/null @@ -1,30 +0,0 @@ ---- -on: - release: - types: [released] - -name: Publish latest image to Docker Hub - -jobs: - docker-latest: - runs-on: docker - steps: - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Build and push - id: docker_build - uses: docker/build-push-action@v2 - with: - push: true - platforms: linux/amd64,linux/arm64 - tags: getmeili/meilisearch:latest diff --git a/.github/workflows/publish-docker-tag.yml b/.github/workflows/publish-docker.yml similarity index 67% rename from .github/workflows/publish-docker-tag.yml rename to .github/workflows/publish-docker.yml index eca3d1d25..2e2cd926d 100644 --- a/.github/workflows/publish-docker-tag.yml +++ b/.github/workflows/publish-docker.yml @@ -3,11 +3,13 @@ on: push: tags: - '*' + release: + types: [released] name: Publish tagged image to Docker Hub jobs: - docker-tag: + docker: runs-on: docker steps: - name: Set up QEMU @@ -27,8 +29,12 @@ jobs: uses: docker/metadata-action@v3 with: images: getmeili/meilisearch + # Output 'latest' tag only when 'release' happen, instead of when 'push tag' event happend (default behavior). + # See https://github.com/docker/metadata-action#latest-tag flavor: latest=false - tags: type=ref,event=tag + tags: | + type=ref,event=tag + type=raw,value=latest,enable=${{ github.event_name == 'release' && github.event.action == 'released' }} - name: Build and push id: docker_build From 39a1dcb32c84bb1056728cb9c23d5e0ad4768ba1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Wed, 8 Jun 2022 17:27:03 +0200 Subject: [PATCH 135/185] Update .github/workflows/publish-docker.yml --- .github/workflows/publish-docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml index 2e2cd926d..7ec91ab4d 100644 --- a/.github/workflows/publish-docker.yml +++ b/.github/workflows/publish-docker.yml @@ -34,7 +34,7 @@ jobs: flavor: latest=false tags: | type=ref,event=tag - type=raw,value=latest,enable=${{ github.event_name == 'release' && github.event.action == 'released' }} + type=raw,value=latest,enable=${{ github.event_name == 'release' }} - name: Build and push id: docker_build From fc4990b968c6fc695fa936d5269c5bd11185d2bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Wed, 8 Jun 2022 17:59:18 +0200 Subject: [PATCH 136/185] Update version for next release (v0.28.0) --- Cargo.lock | 8 ++++---- meilisearch-auth/Cargo.toml | 2 +- meilisearch-error/Cargo.toml | 2 +- meilisearch-http/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8e3ac34e1..ef5d0f27c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2001,7 +2001,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "meilisearch-auth" -version = "0.27.1" +version = "0.28.0" dependencies = [ "base64", "enum-iterator", @@ -2019,7 +2019,7 @@ dependencies = [ [[package]] name = "meilisearch-error" -version = "0.27.1" +version = "0.28.0" dependencies = [ "actix-web", "proptest", @@ -2030,7 +2030,7 @@ dependencies = [ [[package]] name = "meilisearch-http" -version = "0.27.1" +version = "0.28.0" dependencies = [ "actix-cors", "actix-rt", @@ -2103,7 +2103,7 @@ dependencies = [ [[package]] name = "meilisearch-lib" -version = "0.27.1" +version = "0.28.0" dependencies = [ "actix-rt", "actix-web", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index fe76561d8..275456d60 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-auth" -version = "0.27.1" +version = "0.28.0" edition = "2021" [dependencies] diff --git a/meilisearch-error/Cargo.toml b/meilisearch-error/Cargo.toml index e47e46b47..8d2203144 100644 --- a/meilisearch-error/Cargo.toml +++ b/meilisearch-error/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-error" -version = "0.27.1" +version = "0.28.0" authors = ["marin "] edition = "2021" diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index ac4fed7b1..53ff0b665 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -4,7 +4,7 @@ description = "Meilisearch HTTP server" edition = "2021" license = "MIT" name = "meilisearch-http" -version = "0.27.1" +version = "0.28.0" [[bin]] name = "meilisearch" diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 730061675..b1e5f8623 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-lib" -version = "0.27.1" +version = "0.28.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 108b3520de606a8f6dc78cd11f209b24f670c6de Mon Sep 17 00:00:00 2001 From: ad hoc Date: Tue, 7 Jun 2022 10:44:13 +0200 Subject: [PATCH 137/185] fix blocking auth controller dump --- meilisearch-lib/src/dump/handler.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/meilisearch-lib/src/dump/handler.rs b/meilisearch-lib/src/dump/handler.rs index 16bfd6a55..faa146867 100644 --- a/meilisearch-lib/src/dump/handler.rs +++ b/meilisearch-lib/src/dump/handler.rs @@ -16,7 +16,8 @@ pub fn generate_uid() -> String { } mod real { - use std::{fs::File, path::PathBuf, sync::Arc}; + use std::path::PathBuf; + use std::sync::Arc; use log::{info, trace}; use meilisearch_auth::AuthController; @@ -88,8 +89,13 @@ mod real { create_dir_all(&temp_dump_path.join("indexes")).await?; - // TODO: this is blocking!! - AuthController::dump(&self.db_path, &temp_dump_path)?; + let db_path = self.db_path.clone(); + let temp_dump_path_clone = temp_dump_path.clone(); + tokio::task::spawn_blocking(move || -> Result<()> { + AuthController::dump(db_path, temp_dump_path_clone)?; + Ok(()) + }) + .await??; TaskStore::dump( self.env.clone(), &temp_dump_path, From 2b944ecd893c3421baf529ffa060aecb74e5a88f Mon Sep 17 00:00:00 2001 From: walter Date: Wed, 8 Jun 2022 19:56:01 -0400 Subject: [PATCH 138/185] Remove IndexUid::new and replace with IndexUid::from_str --- meilisearch-lib/src/index_controller/mod.rs | 4 +++- meilisearch-lib/src/index_resolver/mod.rs | 12 ------------ 2 files changed, 3 insertions(+), 13 deletions(-) diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index ecca9ac63..81dd5b7c7 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -3,6 +3,7 @@ use std::collections::BTreeMap; use std::fmt; use std::io::Cursor; use std::path::{Path, PathBuf}; +use std::str::FromStr; use std::sync::Arc; use std::time::Duration; @@ -23,6 +24,7 @@ use crate::dump::{self, load_dump, DumpHandler}; use crate::index::{ Checked, Document, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings, Unchecked, }; +use crate::index_resolver::error::IndexResolverError; use crate::options::{IndexerOpts, SchedulerConfig}; use crate::snapshot::{load_snapshot, SnapshotService}; use crate::tasks::error::TaskError; @@ -356,7 +358,7 @@ where } pub async fn register_update(&self, uid: String, update: Update) -> Result { - let index_uid = IndexUid::new(uid)?; + let index_uid = IndexUid::from_str(&uid).map_err(IndexResolverError::from)?; let content = match update { Update::DeleteDocuments(ids) => TaskContent::DocumentDeletion { index_uid, diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index a7991e8ef..894eea9c5 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -48,18 +48,6 @@ pub fn create_index_resolver( } impl IndexUid { - pub fn new(uid: String) -> Result { - if !uid - .chars() - .all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') - || !(1..=400).contains(&uid.len()) - { - Err(IndexResolverError::BadlyFormatted(uid)) - } else { - Ok(Self(uid)) - } - } - pub fn new_unchecked(s: impl AsRef) -> Self { Self(s.as_ref().to_string()) } From 3e5d6be86b1465d6619942c42822e55b68b912b5 Mon Sep 17 00:00:00 2001 From: walter Date: Wed, 8 Jun 2022 19:57:45 -0400 Subject: [PATCH 139/185] Rename TaskType::from_str parameter to 'type_' --- meilisearch-http/src/task.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 27105ba5b..1a1d4e4a8 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -62,24 +62,24 @@ impl Error for TaskTypeError {} impl FromStr for TaskType { type Err = TaskTypeError; - fn from_str(status: &str) -> Result { - if status.eq_ignore_ascii_case("indexCreation") { + fn from_str(type_: &str) -> Result { + if type_.eq_ignore_ascii_case("indexCreation") { Ok(TaskType::IndexCreation) - } else if status.eq_ignore_ascii_case("indexUpdate") { + } else if type_.eq_ignore_ascii_case("indexUpdate") { Ok(TaskType::IndexUpdate) - } else if status.eq_ignore_ascii_case("indexDeletion") { + } else if type_.eq_ignore_ascii_case("indexDeletion") { Ok(TaskType::IndexDeletion) - } else if status.eq_ignore_ascii_case("documentAdditionOrUpdate") { + } else if type_.eq_ignore_ascii_case("documentAdditionOrUpdate") { Ok(TaskType::DocumentAdditionOrUpdate) - } else if status.eq_ignore_ascii_case("documentDeletion") { + } else if type_.eq_ignore_ascii_case("documentDeletion") { Ok(TaskType::DocumentDeletion) - } else if status.eq_ignore_ascii_case("settingsUpdate") { + } else if type_.eq_ignore_ascii_case("settingsUpdate") { Ok(TaskType::SettingsUpdate) - } else if status.eq_ignore_ascii_case("dumpCreation") { + } else if type_.eq_ignore_ascii_case("dumpCreation") { Ok(TaskType::DumpCreation) } else { Err(TaskTypeError { - invalid_type: status.to_string(), + invalid_type: type_.to_string(), }) } } From 96d4fd54bbd26193a2ec09e2fc5d7560289e2439 Mon Sep 17 00:00:00 2001 From: walter Date: Wed, 8 Jun 2022 19:58:47 -0400 Subject: [PATCH 140/185] Change the index uid format check for better legibility --- meilisearch-lib/src/index_resolver/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 894eea9c5..12a5e007b 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -110,7 +110,8 @@ impl FromStr for IndexUid { if !uid .chars() .all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') - || !(1..=400).contains(&uid.len()) + || uid.is_empty() + || uid.len() > 400 { Err(IndexUidFormatError { invalid_uid: uid.to_string(), From c9246145274634f3df1b17bd578dae7eb6f94ddd Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 9 Jun 2022 10:54:28 +0200 Subject: [PATCH 141/185] Bump milli to 0.29.2 --- Cargo.lock | 16 ++++++++-------- meilisearch-auth/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8e3ac34e1..b2e3d2c98 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1123,8 +1123,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.29.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.1#7313d6c5331e7dc13e9ded70b60b1f56dd7e583c" +version = "0.29.2" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.2#19d44142a170d63d076e7d327b542dfa1f3f8b96" dependencies = [ "nom", "nom_locate", @@ -1148,8 +1148,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.29.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.1#7313d6c5331e7dc13e9ded70b60b1f56dd7e583c" +version = "0.29.2" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.2#19d44142a170d63d076e7d327b542dfa1f3f8b96" dependencies = [ "serde_json", ] @@ -1661,8 +1661,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.29.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.1#7313d6c5331e7dc13e9ded70b60b1f56dd7e583c" +version = "0.29.2" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.2#19d44142a170d63d076e7d327b542dfa1f3f8b96" dependencies = [ "serde_json", ] @@ -2189,8 +2189,8 @@ dependencies = [ [[package]] name = "milli" -version = "0.29.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.1#7313d6c5331e7dc13e9ded70b60b1f56dd7e583c" +version = "0.29.2" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.2#19d44142a170d63d076e7d327b542dfa1f3f8b96" dependencies = [ "bimap", "bincode", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index fe76561d8..f798314fe 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -8,7 +8,7 @@ base64 = "0.13.0" enum-iterator = "0.7.0" hmac = "0.12.1" meilisearch-error = { path = "../meilisearch-error" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.1" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.2" } rand = "0.8.4" serde = { version = "1.0.136", features = ["derive"] } serde_json = { version = "1.0.79", features = ["preserve_order"] } diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 730061675..1311b66d8 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -30,7 +30,7 @@ lazy_static = "1.4.0" log = "0.4.14" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-error = { path = "../meilisearch-error" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.1" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.2" } mime = "0.3.16" num_cpus = "1.13.1" obkv = "0.2.0" From 5450b5ced3cca3669b7c164f4a07becba3733c77 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 8 Jun 2022 17:08:10 +0200 Subject: [PATCH 142/185] Add the faceting.max_values_per_facet setting --- .../src/routes/indexes/settings.rs | 28 +++++++++++++++++++ meilisearch-lib/src/index/updates.rs | 28 +++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 9efa825f8..7cf11dc31 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -282,6 +282,34 @@ make_setting_route!( } ); +make_setting_route!( + "/faceting", + patch, + meilisearch_lib::index::updates::TypoSettings, + faceting, + "faceting", + analytics, + |setting: &Option, req: &HttpRequest| { + use serde_json::json; + + analytics.publish( + "Faceting Updated".to_string(), + json!({ + "faceting": { + "max_values_per_facet": setting + .as_ref() + .and_then(|s| s.max_values_per_facet + .as_ref() + .set() + .map(|s| s.one_typo.set())) + .flatten(), + }, + }), + Some(req), + ); + } +); + macro_rules! generate_configure { ($($mod:ident),*) => { pub fn configure(cfg: &mut web::ServiceConfig) { diff --git a/meilisearch-lib/src/index/updates.rs b/meilisearch-lib/src/index/updates.rs index 3aefa1f5e..6f1c9350b 100644 --- a/meilisearch-lib/src/index/updates.rs +++ b/meilisearch-lib/src/index/updates.rs @@ -68,6 +68,17 @@ pub struct TypoSettings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] pub disable_on_attributes: Setting>, } + +#[cfg_attr(test, derive(proptest_derive::Arbitrary))] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct FacetingSettings { + #[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + pub max_values_per_facet: Setting, +} + /// Holds all the settings for an index. `T` can either be `Checked` if they represents settings /// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a /// call to `check` will return a `Settings` from a `Settings`. @@ -114,6 +125,9 @@ pub struct Settings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] #[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] pub typo_tolerance: Setting, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] + pub faceting: Setting, #[serde(skip)] pub _kind: PhantomData, @@ -131,6 +145,7 @@ impl Settings { synonyms: Setting::Reset, distinct_attribute: Setting::Reset, typo_tolerance: Setting::Reset, + faceting: Setting::Reset, _kind: PhantomData, } } @@ -146,6 +161,7 @@ impl Settings { synonyms, distinct_attribute, typo_tolerance, + faceting, .. } = self; @@ -159,6 +175,7 @@ impl Settings { synonyms, distinct_attribute, typo_tolerance, + faceting, _kind: PhantomData, } } @@ -198,6 +215,7 @@ impl Settings { synonyms: self.synonyms, distinct_attribute: self.distinct_attribute, typo_tolerance: self.typo_tolerance, + faceting: self.faceting, _kind: PhantomData, } } @@ -427,6 +445,16 @@ pub fn apply_settings_to_builder( } Setting::NotSet => (), } + + match settings.faceting { + Setting::Set(ref value) => match value.max_values_per_facet { + Setting::Set(val) => builder.set_max_values_per_facet(val), + Setting::Reset => builder.reset_max_values_per_facet(), + Setting::NotSet => (), + }, + Setting::Reset => builder.reset_max_values_per_facet(), + Setting::NotSet => (), + } } #[cfg(test)] From b96399d24bdbed3f9313ba790dc4d3c428289a03 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 8 Jun 2022 18:03:56 +0200 Subject: [PATCH 143/185] Plug the faceting.max_values_per_facet setting --- .../src/routes/indexes/settings.rs | 10 ++-------- meilisearch-http/tests/dumps/mod.rs | 18 +++++++++--------- .../tests/settings/get_settings.rs | 14 +++++++++++++- meilisearch-lib/src/index/index.rs | 14 ++++++++++++-- meilisearch-lib/src/index/search.rs | 7 +++++++ meilisearch-lib/src/index/updates.rs | 2 ++ 6 files changed, 45 insertions(+), 20 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 7cf11dc31..5620f9768 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -285,7 +285,7 @@ make_setting_route!( make_setting_route!( "/faceting", patch, - meilisearch_lib::index::updates::TypoSettings, + meilisearch_lib::index::updates::FacetingSettings, faceting, "faceting", analytics, @@ -296,13 +296,7 @@ make_setting_route!( "Faceting Updated".to_string(), json!({ "faceting": { - "max_values_per_facet": setting - .as_ref() - .and_then(|s| s.max_values_per_facet - .as_ref() - .set() - .map(|s| s.one_typo.set())) - .flatten(), + "max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()), }, }), Some(req), diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index c26b0e06e..8f1c88bd9 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -61,7 +61,7 @@ async fn import_dump_v2_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) ); let (tasks, code) = index.list_tasks().await; @@ -125,7 +125,7 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) ); let (tasks, code) = index.list_tasks().await; @@ -189,7 +189,7 @@ async fn import_dump_v2_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }}) ); let (tasks, code) = index.list_tasks().await; @@ -253,7 +253,7 @@ async fn import_dump_v3_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) ); let (tasks, code) = index.list_tasks().await; @@ -317,7 +317,7 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) ); let (tasks, code) = index.list_tasks().await; @@ -381,7 +381,7 @@ async fn import_dump_v3_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) ); let (tasks, code) = index.list_tasks().await; @@ -445,7 +445,7 @@ async fn import_dump_v4_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) ); let (tasks, code) = index.list_tasks().await; @@ -509,7 +509,7 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) ); let (tasks, code) = index.list_tasks().await; @@ -573,7 +573,7 @@ async fn import_dump_v4_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) ); let (tasks, code) = index.list_tasks().await; diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index d3ac47625..94cc674a6 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -24,6 +24,12 @@ static DEFAULT_SETTINGS_VALUES: Lazy> = Lazy::new(| ); map.insert("stop_words", json!([])); map.insert("synonyms", json!({})); + map.insert( + "faceting", + json!({ + "maxValuesByFacet": json!(100), + }), + ); map }); @@ -43,7 +49,7 @@ async fn get_settings() { let (response, code) = index.settings().await; assert_eq!(code, 200); let settings = response.as_object().unwrap(); - assert_eq!(settings.keys().len(), 9); + assert_eq!(settings.keys().len(), 10); assert_eq!(settings["displayedAttributes"], json!(["*"])); assert_eq!(settings["searchableAttributes"], json!(["*"])); assert_eq!(settings["filterableAttributes"], json!([])); @@ -61,6 +67,12 @@ async fn get_settings() { ]) ); assert_eq!(settings["stopWords"], json!([])); + assert_eq!( + settings["faceting"], + json!({ + "maxValuesPerFacet": 100 + }) + ); } #[actix_rt::test] diff --git a/meilisearch-lib/src/index/index.rs b/meilisearch-lib/src/index/index.rs index 1fe191c41..84b81a0ac 100644 --- a/meilisearch-lib/src/index/index.rs +++ b/meilisearch-lib/src/index/index.rs @@ -8,7 +8,7 @@ use std::sync::Arc; use fst::IntoStreamer; use milli::heed::{EnvOpenOptions, RoTxn}; use milli::update::{IndexerConfig, Setting}; -use milli::{obkv_to_json, FieldDistribution}; +use milli::{obkv_to_json, FieldDistribution, DEFAULT_VALUES_PER_FACET}; use serde::{Deserialize, Serialize}; use serde_json::{Map, Value}; use time::OffsetDateTime; @@ -18,7 +18,7 @@ use crate::EnvSizer; use super::error::IndexError; use super::error::Result; -use super::updates::{MinWordSizeTyposSetting, TypoSettings}; +use super::updates::{FacetingSettings, MinWordSizeTyposSetting, TypoSettings}; use super::{Checked, Settings}; pub type Document = Map; @@ -193,6 +193,15 @@ impl Index { disable_on_attributes: Setting::Set(disabled_attributes), }; + let faceting = FacetingSettings { + max_values_per_facet: Setting::Set( + self.max_values_per_facet(txn)? + .unwrap_or(DEFAULT_VALUES_PER_FACET), + ), + }; + + dbg!(&faceting); + Ok(Settings { displayed_attributes: match displayed_attributes { Some(attrs) => Setting::Set(attrs), @@ -212,6 +221,7 @@ impl Index { }, synonyms: Setting::Set(synonyms), typo_tolerance: Setting::Set(typo_tolerance), + faceting: Setting::Set(faceting), _kind: PhantomData, }) } diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index 6a4a0a672..25d619457 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -7,6 +7,7 @@ use either::Either; use milli::tokenizer::TokenizerBuilder; use milli::{ AscDesc, FieldId, FieldsIdsMap, Filter, FormatOptions, MatchBounds, MatcherBuilder, SortError, + DEFAULT_VALUES_PER_FACET, }; use regex::Regex; use serde::{Deserialize, Serialize}; @@ -223,6 +224,12 @@ impl Index { let facet_distribution = match query.facets { Some(ref fields) => { let mut facet_distribution = self.facets_distribution(&rtxn); + + let max_values_by_facet = self + .max_values_per_facet(&rtxn)? + .unwrap_or(DEFAULT_VALUES_PER_FACET); + facet_distribution.max_values_per_facet(max_values_by_facet); + if fields.iter().all(|f| f != "*") { facet_distribution.facets(fields); } diff --git a/meilisearch-lib/src/index/updates.rs b/meilisearch-lib/src/index/updates.rs index 6f1c9350b..ab7d08bb5 100644 --- a/meilisearch-lib/src/index/updates.rs +++ b/meilisearch-lib/src/index/updates.rs @@ -484,6 +484,7 @@ pub(crate) mod test { synonyms: Setting::NotSet, distinct_attribute: Setting::NotSet, typo_tolerance: Setting::NotSet, + faceting: Setting::NotSet, _kind: PhantomData::, }; @@ -506,6 +507,7 @@ pub(crate) mod test { synonyms: Setting::NotSet, distinct_attribute: Setting::NotSet, typo_tolerance: Setting::NotSet, + faceting: Setting::NotSet, _kind: PhantomData::, }; From 1e3dcbea3f32208d97f61fd33c40638ad73d6f13 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 9 Jun 2022 10:17:55 +0200 Subject: [PATCH 144/185] Plug the pagination.limited_to setting --- .../src/routes/indexes/settings.rs | 22 ++++++++++++++ meilisearch-http/tests/dumps/mod.rs | 18 ++++++------ .../tests/settings/get_settings.rs | 10 +++++-- meilisearch-lib/src/index/index.rs | 11 +++++-- meilisearch-lib/src/index/search.rs | 10 +++++-- meilisearch-lib/src/index/updates.rs | 29 +++++++++++++++++++ 6 files changed, 84 insertions(+), 16 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 5620f9768..2d0b47121 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -304,6 +304,28 @@ make_setting_route!( } ); +make_setting_route!( + "/pagination", + patch, + meilisearch_lib::index::updates::PaginationSettings, + pagination, + "pagination", + analytics, + |setting: &Option, req: &HttpRequest| { + use serde_json::json; + + analytics.publish( + "Pagination Updated".to_string(), + json!({ + "pagination": { + "limited_to": setting.as_ref().and_then(|s| s.limited_to.set()), + }, + }), + Some(req), + ); + } +); + macro_rules! generate_configure { ($($mod:ident),*) => { pub fn configure(cfg: &mut web::ServiceConfig) { diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 8f1c88bd9..75562a5a2 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -61,7 +61,7 @@ async fn import_dump_v2_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) + json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -125,7 +125,7 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -189,7 +189,7 @@ async fn import_dump_v2_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }}) + json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 }}) ); let (tasks, code) = index.list_tasks().await; @@ -253,7 +253,7 @@ async fn import_dump_v3_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) + json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -317,7 +317,7 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -381,7 +381,7 @@ async fn import_dump_v3_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) + json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -445,7 +445,7 @@ async fn import_dump_v4_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) + json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -509,7 +509,7 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -573,7 +573,7 @@ async fn import_dump_v4_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 } }) + json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) ); let (tasks, code) = index.list_tasks().await; diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index 94cc674a6..1cc60d652 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -49,7 +49,7 @@ async fn get_settings() { let (response, code) = index.settings().await; assert_eq!(code, 200); let settings = response.as_object().unwrap(); - assert_eq!(settings.keys().len(), 10); + assert_eq!(settings.keys().len(), 11); assert_eq!(settings["displayedAttributes"], json!(["*"])); assert_eq!(settings["searchableAttributes"], json!(["*"])); assert_eq!(settings["filterableAttributes"], json!([])); @@ -70,7 +70,13 @@ async fn get_settings() { assert_eq!( settings["faceting"], json!({ - "maxValuesPerFacet": 100 + "maxValuesPerFacet": 100, + }) + ); + assert_eq!( + settings["pagination"], + json!({ + "limitedTo": 1000, }) ); } diff --git a/meilisearch-lib/src/index/index.rs b/meilisearch-lib/src/index/index.rs index 84b81a0ac..d4772b73b 100644 --- a/meilisearch-lib/src/index/index.rs +++ b/meilisearch-lib/src/index/index.rs @@ -14,11 +14,12 @@ use serde_json::{Map, Value}; use time::OffsetDateTime; use uuid::Uuid; +use crate::index::search::DEFAULT_PAGINATION_LIMITED_TO; use crate::EnvSizer; use super::error::IndexError; use super::error::Result; -use super::updates::{FacetingSettings, MinWordSizeTyposSetting, TypoSettings}; +use super::updates::{FacetingSettings, MinWordSizeTyposSetting, PaginationSettings, TypoSettings}; use super::{Checked, Settings}; pub type Document = Map; @@ -200,7 +201,12 @@ impl Index { ), }; - dbg!(&faceting); + let pagination = PaginationSettings { + limited_to: Setting::Set( + self.pagination_limited_to(txn)? + .unwrap_or(DEFAULT_PAGINATION_LIMITED_TO), + ), + }; Ok(Settings { displayed_attributes: match displayed_attributes { @@ -222,6 +228,7 @@ impl Index { synonyms: Setting::Set(synonyms), typo_tolerance: Setting::Set(typo_tolerance), faceting: Setting::Set(faceting), + pagination: Setting::Set(pagination), _kind: PhantomData, }) } diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index 25d619457..781a5bb66 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -29,7 +29,7 @@ pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "".to_string(); /// The maximimum number of results that the engine /// will be able to return in one search call. -pub const HARD_RESULT_LIMIT: usize = 1000; +pub const DEFAULT_PAGINATION_LIMITED_TO: usize = 1000; #[derive(Deserialize, Debug, Clone, PartialEq)] #[serde(rename_all = "camelCase", deny_unknown_fields)] @@ -91,10 +91,14 @@ impl Index { search.query(query); } + let pagination_limited_to = self + .pagination_limited_to(&rtxn)? + .unwrap_or(DEFAULT_PAGINATION_LIMITED_TO); + // Make sure that a user can't get more documents than the hard limit, // we align that on the offset too. - let offset = min(query.offset.unwrap_or(0), HARD_RESULT_LIMIT); - let limit = min(query.limit, HARD_RESULT_LIMIT.saturating_sub(offset)); + let offset = min(query.offset.unwrap_or(0), pagination_limited_to); + let limit = min(query.limit, pagination_limited_to.saturating_sub(offset)); search.offset(offset); search.limit(limit); diff --git a/meilisearch-lib/src/index/updates.rs b/meilisearch-lib/src/index/updates.rs index ab7d08bb5..95edbbf9d 100644 --- a/meilisearch-lib/src/index/updates.rs +++ b/meilisearch-lib/src/index/updates.rs @@ -79,6 +79,16 @@ pub struct FacetingSettings { pub max_values_per_facet: Setting, } +#[cfg_attr(test, derive(proptest_derive::Arbitrary))] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct PaginationSettings { + #[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + pub limited_to: Setting, +} + /// Holds all the settings for an index. `T` can either be `Checked` if they represents settings /// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a /// call to `check` will return a `Settings` from a `Settings`. @@ -128,6 +138,9 @@ pub struct Settings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] #[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] pub faceting: Setting, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] + pub pagination: Setting, #[serde(skip)] pub _kind: PhantomData, @@ -146,6 +159,7 @@ impl Settings { distinct_attribute: Setting::Reset, typo_tolerance: Setting::Reset, faceting: Setting::Reset, + pagination: Setting::Reset, _kind: PhantomData, } } @@ -162,6 +176,7 @@ impl Settings { distinct_attribute, typo_tolerance, faceting, + pagination, .. } = self; @@ -176,6 +191,7 @@ impl Settings { distinct_attribute, typo_tolerance, faceting, + pagination, _kind: PhantomData, } } @@ -216,6 +232,7 @@ impl Settings { distinct_attribute: self.distinct_attribute, typo_tolerance: self.typo_tolerance, faceting: self.faceting, + pagination: self.pagination, _kind: PhantomData, } } @@ -455,6 +472,16 @@ pub fn apply_settings_to_builder( Setting::Reset => builder.reset_max_values_per_facet(), Setting::NotSet => (), } + + match settings.pagination { + Setting::Set(ref value) => match value.limited_to { + Setting::Set(val) => builder.set_pagination_limited_to(val), + Setting::Reset => builder.reset_pagination_limited_to(), + Setting::NotSet => (), + }, + Setting::Reset => builder.reset_pagination_limited_to(), + Setting::NotSet => (), + } } #[cfg(test)] @@ -485,6 +512,7 @@ pub(crate) mod test { distinct_attribute: Setting::NotSet, typo_tolerance: Setting::NotSet, faceting: Setting::NotSet, + pagination: Setting::NotSet, _kind: PhantomData::, }; @@ -508,6 +536,7 @@ pub(crate) mod test { distinct_attribute: Setting::NotSet, typo_tolerance: Setting::NotSet, faceting: Setting::NotSet, + pagination: Setting::NotSet, _kind: PhantomData::, }; From 5cd13cc303d6296fa9a301a244d6764924ed984c Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 9 Jun 2022 10:41:46 +0200 Subject: [PATCH 145/185] Add a test to validate the faceting.max_values_per_facet setting --- meilisearch-http/tests/search/mod.rs | 49 ++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/meilisearch-http/tests/search/mod.rs b/meilisearch-http/tests/search/mod.rs index 98893dac5..f244aa423 100644 --- a/meilisearch-http/tests/search/mod.rs +++ b/meilisearch-http/tests/search/mod.rs @@ -605,3 +605,52 @@ async fn search_is_hard_limited() { ) .await; } + +#[actix_rt::test] +async fn faceting_max_values_per_facet() { + let server = Server::new().await; + let index = server.index("test"); + + index + .update_settings(json!({ "filterableAttributes": ["number"] })) + .await; + + let documents: Vec<_> = (0..10_000) + .map(|id| json!({ "id": id, "number": id * 10 })) + .collect(); + index.add_documents(json!(documents), None).await; + index.wait_task(1).await; + + index + .search( + json!({ + "facets": ["number"] + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + let numbers = response["facetDistribution"]["number"].as_object().unwrap(); + assert_eq!(numbers.len(), 100); + }, + ) + .await; + + index + .update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })) + .await; + index.wait_task(2).await; + + index + .search( + json!({ + "facets": ["number"] + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + let numbers = dbg!(&response)["facetDistribution"]["number"] + .as_object() + .unwrap(); + assert_eq!(numbers.len(), 10_000); + }, + ) + .await; +} From 6f0d3472b151dff6ddebdbc9f643963b2bccab62 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 9 Jun 2022 10:48:32 +0200 Subject: [PATCH 146/185] Change the test for the new pagination.limited_to setting --- meilisearch-http/tests/search/mod.rs | 62 ++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/meilisearch-http/tests/search/mod.rs b/meilisearch-http/tests/search/mod.rs index f244aa423..02cdc751f 100644 --- a/meilisearch-http/tests/search/mod.rs +++ b/meilisearch-http/tests/search/mod.rs @@ -565,6 +565,36 @@ async fn placeholder_search_is_hard_limited() { }, ) .await; + + index + .update_settings(json!({ "pagination": { "limitedTo": 10_000 } })) + .await; + index.wait_task(1).await; + + index + .search( + json!({ + "limit": 1500, + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"].as_array().unwrap().len(), 1200); + }, + ) + .await; + + index + .search( + json!({ + "offset": 1000, + "limit": 400, + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"].as_array().unwrap().len(), 200); + }, + ) + .await; } #[actix_rt::test] @@ -604,6 +634,38 @@ async fn search_is_hard_limited() { }, ) .await; + + index + .update_settings(json!({ "pagination": { "limitedTo": 10_000 } })) + .await; + index.wait_task(1).await; + + index + .search( + json!({ + "q": "unique", + "limit": 1500, + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"].as_array().unwrap().len(), 1200); + }, + ) + .await; + + index + .search( + json!({ + "q": "unique", + "offset": 1000, + "limit": 400, + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"].as_array().unwrap().len(), 200); + }, + ) + .await; } #[actix_rt::test] From 4a494ad2fa899c00ffafe1e889975a2a85a86ef6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Thu, 9 Jun 2022 11:50:20 +0200 Subject: [PATCH 147/185] Add schedule to the CI --- .../{publish-docker.yml => publish-docker-images.yml} | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) rename .github/workflows/{publish-docker.yml => publish-docker-images.yml} (73%) diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker-images.yml similarity index 73% rename from .github/workflows/publish-docker.yml rename to .github/workflows/publish-docker-images.yml index 7ec91ab4d..b9ea50cb3 100644 --- a/.github/workflows/publish-docker.yml +++ b/.github/workflows/publish-docker-images.yml @@ -1,12 +1,14 @@ --- on: + schedule: + - cron: '0 4 * * *' # Every day at 4:00am push: tags: - '*' release: types: [released] -name: Publish tagged image to Docker Hub +name: Publish tagged images to Docker Hub jobs: docker: @@ -19,6 +21,7 @@ jobs: uses: docker/setup-buildx-action@v1 - name: Login to DockerHub + if: github.event_name != 'schedule' uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_USERNAME }} @@ -29,7 +32,7 @@ jobs: uses: docker/metadata-action@v3 with: images: getmeili/meilisearch - # Output 'latest' tag only when 'release' happen, instead of when 'push tag' event happend (default behavior). + # The lastest tag is only pushed for the official Meilisearch release # See https://github.com/docker/metadata-action#latest-tag flavor: latest=false tags: | @@ -40,6 +43,7 @@ jobs: id: docker_build uses: docker/build-push-action@v2 with: - push: true + # We do not push tags for the cron jobs, this is only for test purposes + push: ${{ github.event_name != 'schedule' }} platforms: linux/amd64,linux/arm64 tags: ${{ steps.meta.outputs.tags }} From 02c5c193a2041df924708129daa03dbbde5b00dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Thu, 9 Jun 2022 14:48:50 +0200 Subject: [PATCH 148/185] Update CONTRIBUTING.md to add the release process --- CONTRIBUTING.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c2674c4d1..7d6531ad8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -78,6 +78,19 @@ Some notes on GitHub PRs: The draft PRs are recommended when you want to show that you are working on something and make your work visible. - The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project uses [Bors](https://github.com/bors-ng/bors-ng) to automatically enforce this requirement without the PR author having to rebase manually. +## Release Process (for internal team only) + +Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/). + +### Automation to rebase and Merge the PRs + +This project integrates a bot that helps us manage pull requests merging.
+_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md)._ + +### How to Publish a new Release + +The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/core-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release. +


Thank you again for reading this through, we can not wait to begin to work with you if you made your way through this contributing guide ❤️ From 232b2baaa30eb72348b7c8acac70517fd308eb1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Thu, 9 Jun 2022 14:49:49 +0200 Subject: [PATCH 149/185] Update TOC --- CONTRIBUTING.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7d6531ad8..9b733665c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,6 +9,7 @@ Remember that there are many ways to contribute other than writing code: writing - [How to Contribute](#how-to-contribute) - [Development Workflow](#development-workflow) - [Git Guidelines](#git-guidelines) +- [Release Process (for internal team only)](#release-process-for-internal-team-only) ## Assumptions From fbba67fbe9d22850fabd6dd60ce0ab3d424ab263 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 2 Jun 2022 11:00:07 +0200 Subject: [PATCH 150/185] add mocker to IndexResolver --- meilisearch-lib/src/index_resolver/mod.rs | 99 +++++++++++++++++++++++ 1 file changed, 99 insertions(+) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index ff03ed0f8..f608ded3d 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -571,6 +571,105 @@ mod test { } } + pub enum MockIndexResolver { + Real(super::real::IndexResolver), + Mock(Mocker), + } + + impl MockIndexResolver { + pub fn load_dump( + src: impl AsRef, + dst: impl AsRef, + index_db_size: usize, + env: Arc, + indexer_opts: &IndexerOpts, + ) -> anyhow::Result<()> { + super::real::IndexResolver::load_dump(src, dst, index_db_size, env, indexer_opts) + } + } + + impl MockIndexResolver + where + U: IndexMetaStore, + I: IndexStore, + { + pub fn new(index_uuid_store: U, index_store: I, file_store: UpdateFileStore) -> Self { + Self::Real(super::real::IndexResolver { + index_uuid_store, + index_store, + file_store, + }) + } + + pub fn mock(mocker: Mocker) -> Self { + Self::Mock(mocker) + } + + pub async fn process_document_addition_batch(&self, tasks: Vec) -> Vec { + match self { + IndexResolver::Real(r) => r.process_document_addition_batch(tasks).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn process_task(&self, task: &Task) -> Result { + match self { + IndexResolver::Real(r) => r.process_task(task).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn dump(&self, path: impl AsRef) -> Result<()> { + match self { + IndexResolver::Real(r) => r.dump(path).await, + IndexResolver::Mock(_) => todo!(), + } + } + + /// Get or create an index with name `uid`. + pub async fn get_or_create_index(&self, uid: IndexUid, task_id: TaskId) -> Result { + match self { + IndexResolver::Real(r) => r.get_or_create_index(uid, task_id).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn list(&self) -> Result> { + match self { + IndexResolver::Real(r) => r.list().await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn delete_index(&self, uid: String) -> Result { + match self { + IndexResolver::Real(r) => r.delete_index(uid).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn get_index(&self, uid: String) -> Result { + match self { + IndexResolver::Real(r) => r.get_index(uid).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn get_index_creation_task_id(&self, index_uid: String) -> Result { + match self { + IndexResolver::Real(r) => r.get_index_creation_task_id(index_uid).await, + IndexResolver::Mock(_) => todo!(), + } + } + + pub async fn delete_content_file(&self, content_uuid: Uuid) -> Result<()> { + match self { + IndexResolver::Real(r) => r.delete_content_file(content_uuid).await, + IndexResolver::Mock(_) => todo!(), + } + } + } + // TODO: ignoring this test, it has become too complex to maintain, and rather implement // handler logic test. // proptest! { From b594d49def61cfb70d0c0c419ad66d2b17057a56 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 2 Jun 2022 12:15:36 +0200 Subject: [PATCH 151/185] add IndexResolver BatchHandler tests --- meilisearch-lib/src/index_resolver/mod.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index f608ded3d..19ba051fe 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -608,14 +608,16 @@ mod test { pub async fn process_document_addition_batch(&self, tasks: Vec) -> Vec { match self { IndexResolver::Real(r) => r.process_document_addition_batch(tasks).await, - IndexResolver::Mock(_) => todo!(), + IndexResolver::Mock(m) => unsafe { + m.get("process_document_addition_batch").call(tasks) + }, } } pub async fn process_task(&self, task: &Task) -> Result { match self { IndexResolver::Real(r) => r.process_task(task).await, - IndexResolver::Mock(_) => todo!(), + IndexResolver::Mock(m) => unsafe { m.get("process_task").call(task) }, } } @@ -665,7 +667,9 @@ mod test { pub async fn delete_content_file(&self, content_uuid: Uuid) -> Result<()> { match self { IndexResolver::Real(r) => r.delete_content_file(content_uuid).await, - IndexResolver::Mock(_) => todo!(), + IndexResolver::Mock(m) => unsafe { + m.get("delete_content_file").call(content_uuid) + }, } } } From 64e3096790cfe9e5c06d00cc2add4aa07cc9e3ca Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 2 Jun 2022 15:04:33 +0200 Subject: [PATCH 152/185] process_task updates task events --- meilisearch-lib/src/index_resolver/mod.rs | 114 ++---------------- .../src/tasks/handlers/dump_handler.rs | 2 +- .../tasks/handlers/index_resolver_handler.rs | 11 +- meilisearch-lib/src/tasks/task.rs | 4 +- 4 files changed, 16 insertions(+), 115 deletions(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 19ba051fe..97265e509 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -265,7 +265,7 @@ mod real { Ok(()) } - pub async fn process_task(&self, task: &Task) -> Result { + async fn process_task_inner(&self, task: &Task) -> Result { match &task.content { TaskContent::DocumentAddition { .. } => { panic!("updates should be handled by batch") @@ -354,6 +354,13 @@ mod real { } } + pub async fn process_task(&self, task: &mut Task) { + match self.process_task_inner(&task).await { + Ok(res) => task.events.push(TaskEvent::succeeded(res)), + Err(e) => task.events.push(TaskEvent::failed(e)), + } + } + pub async fn dump(&self, path: impl AsRef) -> Result<()> { for (_, index) in self.list().await? { index.dump(&path)?; @@ -511,110 +518,7 @@ mod test { } } - pub async fn process_task(&self, task: &Task) -> Result { - match self { - IndexResolver::Real(r) => r.process_task(task).await, - IndexResolver::Mock(m) => unsafe { m.get("process_task").call(task) }, - } - } - - pub async fn dump(&self, path: impl AsRef) -> Result<()> { - match self { - IndexResolver::Real(r) => r.dump(path).await, - IndexResolver::Mock(_) => todo!(), - } - } - - /// Get or create an index with name `uid`. - pub async fn get_or_create_index(&self, uid: IndexUid, task_id: TaskId) -> Result { - match self { - IndexResolver::Real(r) => r.get_or_create_index(uid, task_id).await, - IndexResolver::Mock(_) => todo!(), - } - } - - pub async fn list(&self) -> Result> { - match self { - IndexResolver::Real(r) => r.list().await, - IndexResolver::Mock(_) => todo!(), - } - } - - pub async fn delete_index(&self, uid: String) -> Result { - match self { - IndexResolver::Real(r) => r.delete_index(uid).await, - IndexResolver::Mock(_) => todo!(), - } - } - - pub async fn get_index(&self, uid: String) -> Result { - match self { - IndexResolver::Real(r) => r.get_index(uid).await, - IndexResolver::Mock(_) => todo!(), - } - } - - pub async fn get_index_creation_task_id(&self, index_uid: String) -> Result { - match self { - IndexResolver::Real(r) => r.get_index_creation_task_id(index_uid).await, - IndexResolver::Mock(_) => todo!(), - } - } - - pub async fn delete_content_file(&self, content_uuid: Uuid) -> Result<()> { - match self { - IndexResolver::Real(r) => r.delete_content_file(content_uuid).await, - IndexResolver::Mock(m) => unsafe { - m.get("delete_content_file").call(content_uuid) - }, - } - } - } - - pub enum MockIndexResolver { - Real(super::real::IndexResolver), - Mock(Mocker), - } - - impl MockIndexResolver { - pub fn load_dump( - src: impl AsRef, - dst: impl AsRef, - index_db_size: usize, - env: Arc, - indexer_opts: &IndexerOpts, - ) -> anyhow::Result<()> { - super::real::IndexResolver::load_dump(src, dst, index_db_size, env, indexer_opts) - } - } - - impl MockIndexResolver - where - U: IndexMetaStore, - I: IndexStore, - { - pub fn new(index_uuid_store: U, index_store: I, file_store: UpdateFileStore) -> Self { - Self::Real(super::real::IndexResolver { - index_uuid_store, - index_store, - file_store, - }) - } - - pub fn mock(mocker: Mocker) -> Self { - Self::Mock(mocker) - } - - pub async fn process_document_addition_batch(&self, tasks: Vec) -> Vec { - match self { - IndexResolver::Real(r) => r.process_document_addition_batch(tasks).await, - IndexResolver::Mock(m) => unsafe { - m.get("process_document_addition_batch").call(tasks) - }, - } - } - - pub async fn process_task(&self, task: &Task) -> Result { + pub async fn process_task(&self, task: &mut Task) { match self { IndexResolver::Real(r) => r.process_task(task).await, IndexResolver::Mock(m) => unsafe { m.get("process_task").call(task) }, diff --git a/meilisearch-lib/src/tasks/handlers/dump_handler.rs b/meilisearch-lib/src/tasks/handlers/dump_handler.rs index c708dadcc..c0833e4c7 100644 --- a/meilisearch-lib/src/tasks/handlers/dump_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/dump_handler.rs @@ -27,7 +27,7 @@ where .content .push_event(TaskEvent::succeeded(TaskResult::Other)); } - Err(e) => batch.content.push_event(TaskEvent::failed(e.into())), + Err(e) => batch.content.push_event(TaskEvent::failed(e)), } batch } diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index de624106c..21751cd1c 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -1,7 +1,6 @@ use crate::index_resolver::IndexResolver; use crate::index_resolver::{index_store::IndexStore, meta_store::IndexMetaStore}; use crate::tasks::batch::{Batch, BatchContent}; -use crate::tasks::task::TaskEvent; use crate::tasks::BatchHandler; #[async_trait::async_trait] @@ -24,10 +23,9 @@ where .process_document_addition_batch(std::mem::take(tasks)) .await; } - BatchContent::IndexUpdate(ref mut task) => match self.process_task(task).await { - Ok(success) => task.events.push(TaskEvent::succeeded(success)), - Err(err) => task.events.push(TaskEvent::failed(err.into())), - }, + BatchContent::IndexUpdate(ref mut task) => { + self.process_task(task).await; + } _ => unreachable!(), } @@ -54,7 +52,6 @@ mod test { use crate::index_resolver::{ error::Result as IndexResult, index_store::MockIndexStore, meta_store::MockIndexMetaStore, }; - use crate::tasks::task::TaskResult; use crate::tasks::{ handlers::test::task_to_batch, task::{Task, TaskContent}, @@ -181,7 +178,7 @@ mod test { } TaskContent::Dump { .. } => (), _ => { - mocker.when::<&Task, IndexResult>("process_task").then(|_| Ok(TaskResult::Other)); + mocker.when::<&mut Task, ()>("process_task").then(|_| ()); } } let index_resolver: IndexResolver = IndexResolver::mock(mocker); diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index f19f6cbfe..cf9ab0520 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -68,9 +68,9 @@ impl TaskEvent { } } - pub fn failed(error: ResponseError) -> Self { + pub fn failed(error: impl Into) -> Self { Self::Failed { - error, + error: error.into(), timestamp: OffsetDateTime::now_utc(), } } From 8fc3b7d3b0d76e31d768ad74c8a3836cb621d4d8 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 2 Jun 2022 15:11:50 +0200 Subject: [PATCH 153/185] refactor process_document_addition_batch --- meilisearch-lib/src/index_resolver/mod.rs | 11 +++++------ .../src/tasks/handlers/index_resolver_handler.rs | 6 ++---- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 97265e509..316528647 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -170,7 +170,7 @@ mod real { } } - pub async fn process_document_addition_batch(&self, mut tasks: Vec) -> Vec { + pub async fn process_document_addition_batch(&self, tasks: &mut [Task]) { fn get_content_uuid(task: &Task) -> Uuid { match task { Task { @@ -218,7 +218,8 @@ mod real { timestamp: now, }); } - return tasks; + + return; } }; @@ -253,8 +254,6 @@ mod real { for task in tasks.iter_mut() { task.events.push(event.clone()); } - - tasks } _ => panic!("invalid batch!"), } @@ -355,7 +354,7 @@ mod real { } pub async fn process_task(&self, task: &mut Task) { - match self.process_task_inner(&task).await { + match self.process_task_inner(task).await { Ok(res) => task.events.push(TaskEvent::succeeded(res)), Err(e) => task.events.push(TaskEvent::failed(e)), } @@ -509,7 +508,7 @@ mod test { Self::Mock(mocker) } - pub async fn process_document_addition_batch(&self, tasks: Vec) -> Vec { + pub async fn process_document_addition_batch(&self, tasks: &mut [Task]) { match self { IndexResolver::Real(r) => r.process_document_addition_batch(tasks).await, IndexResolver::Mock(m) => unsafe { diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index 21751cd1c..0975ba912 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -19,9 +19,7 @@ where async fn process_batch(&self, mut batch: Batch) -> Batch { match batch.content { BatchContent::DocumentsAdditionBatch(ref mut tasks) => { - *tasks = self - .process_document_addition_batch(std::mem::take(tasks)) - .await; + self.process_document_addition_batch(tasks).await; } BatchContent::IndexUpdate(ref mut task) => { self.process_task(task).await; @@ -174,7 +172,7 @@ mod test { let mocker = Mocker::default(); match task.content { TaskContent::DocumentAddition { .. } => { - mocker.when::, Vec>("process_document_addition_batch").then(|tasks| tasks); + mocker.when::<&mut [Task], ()>("process_document_addition_batch").then(|_| ()); } TaskContent::Dump { .. } => (), _ => { From 36cb09eb251a9c2da4028a2ca9f44508aaeea2fb Mon Sep 17 00:00:00 2001 From: pierre-l Date: Mon, 6 Jun 2022 12:38:46 +0200 Subject: [PATCH 154/185] Add a new `meilisearch_types` crate Move `meilisearch_error` to `meilisearch_types::error` Move `meilisearch_lib::index_resolver::IndexUid` to `meilisearch_types::index_uid` Add a new `InvalidIndexUid` error in `meilisearch_types::index_uid` --- Cargo.lock | 28 +++--- Cargo.toml | 2 +- meilisearch-auth/Cargo.toml | 2 +- meilisearch-auth/src/error.rs | 4 +- meilisearch-http/Cargo.toml | 2 +- meilisearch-http/src/error.rs | 2 +- .../src/extractors/authentication/error.rs | 2 +- .../src/extractors/authentication/mod.rs | 5 +- meilisearch-http/src/lib.rs | 2 +- meilisearch-http/src/routes/api_key.rs | 2 +- meilisearch-http/src/routes/dump.rs | 2 +- .../src/routes/indexes/documents.rs | 2 +- meilisearch-http/src/routes/indexes/mod.rs | 2 +- meilisearch-http/src/routes/indexes/search.rs | 2 +- .../src/routes/indexes/settings.rs | 4 +- meilisearch-http/src/routes/mod.rs | 2 +- meilisearch-http/src/routes/tasks.rs | 5 +- meilisearch-http/src/task.rs | 2 +- .../tests/documents/add_documents.rs | 4 +- meilisearch-http/tests/index/create_index.rs | 2 +- .../tests/settings/get_settings.rs | 2 +- meilisearch-lib/Cargo.toml | 4 +- meilisearch-lib/src/document_formats.rs | 3 +- meilisearch-lib/src/dump/compat/v2.rs | 2 +- meilisearch-lib/src/dump/compat/v3.rs | 4 +- meilisearch-lib/src/dump/compat/v4.rs | 4 +- meilisearch-lib/src/dump/error.rs | 3 +- meilisearch-lib/src/error.rs | 2 +- meilisearch-lib/src/index/error.rs | 3 +- meilisearch-lib/src/index_controller/error.rs | 11 ++- meilisearch-lib/src/index_controller/mod.rs | 2 +- .../src/index_controller/updates/error.rs | 2 +- meilisearch-lib/src/index_resolver/error.rs | 8 +- meilisearch-lib/src/index_resolver/mod.rs | 90 +------------------ meilisearch-lib/src/lib.rs | 2 +- meilisearch-lib/src/tasks/error.rs | 3 +- .../tasks/handlers/index_resolver_handler.rs | 2 +- meilisearch-lib/src/tasks/scheduler.rs | 3 +- meilisearch-lib/src/tasks/task.rs | 4 +- meilisearch-lib/src/tasks/task_store/mod.rs | 6 +- meilisearch-lib/src/tasks/task_store/store.rs | 2 +- .../Cargo.toml | 2 +- .../lib.rs => meilisearch-types/src/error.rs | 0 meilisearch-types/src/index_uid.rs | 85 ++++++++++++++++++ meilisearch-types/src/lib.rs | 2 + 45 files changed, 172 insertions(+), 157 deletions(-) rename {meilisearch-error => meilisearch-types}/Cargo.toml (93%) rename meilisearch-error/src/lib.rs => meilisearch-types/src/error.rs (100%) create mode 100644 meilisearch-types/src/index_uid.rs create mode 100644 meilisearch-types/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 073411f95..b15b69a6c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2006,7 +2006,7 @@ dependencies = [ "base64", "enum-iterator", "hmac", - "meilisearch-error", + "meilisearch-types", "milli", "rand", "serde", @@ -2017,17 +2017,6 @@ dependencies = [ "uuid", ] -[[package]] -name = "meilisearch-error" -version = "0.28.0" -dependencies = [ - "actix-web", - "proptest", - "proptest-derive", - "serde", - "serde_json", -] - [[package]] name = "meilisearch-http" version = "0.28.0" @@ -2061,8 +2050,8 @@ dependencies = [ "manifest-dir-macros", "maplit", "meilisearch-auth", - "meilisearch-error", "meilisearch-lib", + "meilisearch-types", "mime", "num_cpus", "obkv", @@ -2129,7 +2118,7 @@ dependencies = [ "lazy_static", "log", "meilisearch-auth", - "meilisearch-error", + "meilisearch-types", "milli", "mime", "mockall", @@ -2163,6 +2152,17 @@ dependencies = [ "whoami", ] +[[package]] +name = "meilisearch-types" +version = "0.28.0" +dependencies = [ + "actix-web", + "proptest", + "proptest-derive", + "serde", + "serde_json", +] + [[package]] name = "memchr" version = "2.5.0" diff --git a/Cargo.toml b/Cargo.toml index 33b961446..99ec43528 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,7 +2,7 @@ resolver = "2" members = [ "meilisearch-http", - "meilisearch-error", + "meilisearch-types", "meilisearch-lib", "meilisearch-auth", "permissive-json-pointer", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 2174b607c..8f00f3c99 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" base64 = "0.13.0" enum-iterator = "0.7.0" hmac = "0.12.1" -meilisearch-error = { path = "../meilisearch-error" } +meilisearch-types = { path = "../meilisearch-types" } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.2" } rand = "0.8.4" serde = { version = "1.0.136", features = ["derive"] } diff --git a/meilisearch-auth/src/error.rs b/meilisearch-auth/src/error.rs index 8be97daaf..bb96be789 100644 --- a/meilisearch-auth/src/error.rs +++ b/meilisearch-auth/src/error.rs @@ -1,7 +1,7 @@ use std::error::Error; -use meilisearch_error::ErrorCode; -use meilisearch_error::{internal_error, Code}; +use meilisearch_types::error::{Code, ErrorCode}; +use meilisearch_types::internal_error; use serde_json::Value; pub type Result = std::result::Result; diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 53ff0b665..763337888 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -45,7 +45,7 @@ itertools = "0.10.3" jsonwebtoken = "8.0.1" log = "0.4.14" meilisearch-auth = { path = "../meilisearch-auth" } -meilisearch-error = { path = "../meilisearch-error" } +meilisearch-types = { path = "../meilisearch-types" } meilisearch-lib = { path = "../meilisearch-lib" } mime = "0.3.16" num_cpus = "1.13.1" diff --git a/meilisearch-http/src/error.rs b/meilisearch-http/src/error.rs index b2b6c1b3c..86b7c1964 100644 --- a/meilisearch-http/src/error.rs +++ b/meilisearch-http/src/error.rs @@ -1,6 +1,6 @@ use actix_web as aweb; use aweb::error::{JsonPayloadError, QueryPayloadError}; -use meilisearch_error::{Code, ErrorCode, ResponseError}; +use meilisearch_types::error::{Code, ErrorCode, ResponseError}; #[derive(Debug, thiserror::Error)] pub enum MeilisearchHttpError { diff --git a/meilisearch-http/src/extractors/authentication/error.rs b/meilisearch-http/src/extractors/authentication/error.rs index 6d362dcbf..bb78c53d0 100644 --- a/meilisearch-http/src/extractors/authentication/error.rs +++ b/meilisearch-http/src/extractors/authentication/error.rs @@ -1,4 +1,4 @@ -use meilisearch_error::{Code, ErrorCode}; +use meilisearch_types::error::{Code, ErrorCode}; #[derive(Debug, thiserror::Error)] pub enum AuthenticationError { diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index 7732bd7fa..22f080a6f 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -5,12 +5,11 @@ use std::ops::Deref; use std::pin::Pin; use actix_web::FromRequest; +use error::AuthenticationError; use futures::future::err; use futures::Future; -use meilisearch_error::{Code, ResponseError}; - -use error::AuthenticationError; use meilisearch_auth::{AuthController, AuthFilter}; +use meilisearch_types::error::{Code, ResponseError}; pub struct GuardedData { data: D, diff --git a/meilisearch-http/src/lib.rs b/meilisearch-http/src/lib.rs index 201013bc6..bfdb829d4 100644 --- a/meilisearch-http/src/lib.rs +++ b/meilisearch-http/src/lib.rs @@ -148,10 +148,10 @@ macro_rules! create_app { use actix_web::middleware::TrailingSlash; use actix_web::App; use actix_web::{middleware, web}; - use meilisearch_error::ResponseError; use meilisearch_http::error::MeilisearchHttpError; use meilisearch_http::routes; use meilisearch_http::{configure_data, dashboard}; + use meilisearch_types::error::ResponseError; App::new() .configure(|s| configure_data(s, $data.clone(), $auth.clone(), &$opt, $analytics)) diff --git a/meilisearch-http/src/routes/api_key.rs b/meilisearch-http/src/routes/api_key.rs index 87db3f890..3513d23ca 100644 --- a/meilisearch-http/src/routes/api_key.rs +++ b/meilisearch-http/src/routes/api_key.rs @@ -7,7 +7,7 @@ use time::OffsetDateTime; use uuid::Uuid; use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key}; -use meilisearch_error::{Code, ResponseError}; +use meilisearch_types::error::{Code, ResponseError}; use crate::extractors::{ authentication::{policies::*, GuardedData}, diff --git a/meilisearch-http/src/routes/dump.rs b/meilisearch-http/src/routes/dump.rs index 55469b0b4..4d9106ee0 100644 --- a/meilisearch-http/src/routes/dump.rs +++ b/meilisearch-http/src/routes/dump.rs @@ -1,7 +1,7 @@ use actix_web::{web, HttpRequest, HttpResponse}; use log::debug; -use meilisearch_error::ResponseError; use meilisearch_lib::MeiliSearch; +use meilisearch_types::error::ResponseError; use serde_json::json; use crate::analytics::Analytics; diff --git a/meilisearch-http/src/routes/indexes/documents.rs b/meilisearch-http/src/routes/indexes/documents.rs index b5f578a56..1d97e0736 100644 --- a/meilisearch-http/src/routes/indexes/documents.rs +++ b/meilisearch-http/src/routes/indexes/documents.rs @@ -6,10 +6,10 @@ use actix_web::{web, HttpRequest, HttpResponse}; use bstr::ByteSlice; use futures::{Stream, StreamExt}; use log::debug; -use meilisearch_error::ResponseError; use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update}; use meilisearch_lib::milli::update::IndexDocumentsMethod; use meilisearch_lib::MeiliSearch; +use meilisearch_types::error::ResponseError; use mime::Mime; use once_cell::sync::Lazy; use serde::Deserialize; diff --git a/meilisearch-http/src/routes/indexes/mod.rs b/meilisearch-http/src/routes/indexes/mod.rs index f5ebf83a9..ed6196ebd 100644 --- a/meilisearch-http/src/routes/indexes/mod.rs +++ b/meilisearch-http/src/routes/indexes/mod.rs @@ -1,8 +1,8 @@ use actix_web::{web, HttpRequest, HttpResponse}; use log::debug; -use meilisearch_error::ResponseError; use meilisearch_lib::index_controller::Update; use meilisearch_lib::MeiliSearch; +use meilisearch_types::error::ResponseError; use serde::{Deserialize, Serialize}; use serde_json::json; use time::OffsetDateTime; diff --git a/meilisearch-http/src/routes/indexes/search.rs b/meilisearch-http/src/routes/indexes/search.rs index 4eaa65b9d..3f8fecd5c 100644 --- a/meilisearch-http/src/routes/indexes/search.rs +++ b/meilisearch-http/src/routes/indexes/search.rs @@ -1,12 +1,12 @@ use actix_web::{web, HttpRequest, HttpResponse}; use log::debug; use meilisearch_auth::IndexSearchRules; -use meilisearch_error::ResponseError; use meilisearch_lib::index::{ SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, }; use meilisearch_lib::MeiliSearch; +use meilisearch_types::error::ResponseError; use serde::Deserialize; use serde_cs::vec::CS; use serde_json::Value; diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 2d0b47121..962fe7d82 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -1,10 +1,10 @@ use log::debug; use actix_web::{web, HttpRequest, HttpResponse}; -use meilisearch_error::ResponseError; use meilisearch_lib::index::{Settings, Unchecked}; use meilisearch_lib::index_controller::Update; use meilisearch_lib::MeiliSearch; +use meilisearch_types::error::ResponseError; use serde_json::json; use crate::analytics::Analytics; @@ -21,7 +21,7 @@ macro_rules! make_setting_route { use meilisearch_lib::milli::update::Setting; use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch}; - use meilisearch_error::ResponseError; + use meilisearch_types::error::ResponseError; use $crate::analytics::Analytics; use $crate::extractors::authentication::{policies::*, GuardedData}; use $crate::extractors::sequential_extractor::SeqHandler; diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index a438d12d7..97351b584 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -6,9 +6,9 @@ use serde::{Deserialize, Serialize}; use time::OffsetDateTime; -use meilisearch_error::ResponseError; use meilisearch_lib::index::{Settings, Unchecked}; use meilisearch_lib::MeiliSearch; +use meilisearch_types::error::ResponseError; use crate::extractors::authentication::{policies::*, GuardedData}; diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 14716ff6b..b8fc428a1 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -1,8 +1,9 @@ use actix_web::{web, HttpRequest, HttpResponse}; -use meilisearch_error::ResponseError; use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId}; use meilisearch_lib::tasks::TaskFilter; -use meilisearch_lib::{IndexUid, MeiliSearch}; +use meilisearch_lib::MeiliSearch; +use meilisearch_types::error::ResponseError; +use meilisearch_types::index_uid::IndexUid; use serde::Deserialize; use serde_cs::vec::CS; use serde_json::json; diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 1a1d4e4a8..06bba1f76 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -3,12 +3,12 @@ use std::fmt::{self, Write}; use std::str::FromStr; use std::write; -use meilisearch_error::ResponseError; use meilisearch_lib::index::{Settings, Unchecked}; use meilisearch_lib::tasks::batch::BatchId; use meilisearch_lib::tasks::task::{ DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult, }; +use meilisearch_types::error::ResponseError; use serde::{Deserialize, Serialize, Serializer}; use time::{Duration, OffsetDateTime}; diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index d6235c8b7..c3baf0cb0 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -638,7 +638,7 @@ async fn error_document_add_create_index_bad_uid() { let (response, code) = index.add_documents(json!([{"id": 1}]), None).await; let expected_response = json!({ - "message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_index_uid" @@ -655,7 +655,7 @@ async fn error_document_update_create_index_bad_uid() { let (response, code) = index.update_documents(json!([{"id": 1}]), None).await; let expected_response = json!({ - "message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_index_uid" diff --git a/meilisearch-http/tests/index/create_index.rs b/meilisearch-http/tests/index/create_index.rs index 0e134600e..a1c508e1f 100644 --- a/meilisearch-http/tests/index/create_index.rs +++ b/meilisearch-http/tests/index/create_index.rs @@ -102,7 +102,7 @@ async fn error_create_with_invalid_index_uid() { let (response, code) = index.create(None).await; let expected_response = json!({ - "message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "message": "invalid index uid `test test#!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_index_uid" diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index 1cc60d652..0862b15c5 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -197,7 +197,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() { assert_eq!(code, 400); let expected = json!({ - "message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "message": "invalid index uid `test##! `, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_index_uid"}); diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 425f931b1..fa4e6587a 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -29,7 +29,7 @@ itertools = "0.10.3" lazy_static = "1.4.0" log = "0.4.14" meilisearch-auth = { path = "../meilisearch-auth" } -meilisearch-error = { path = "../meilisearch-error" } +meilisearch-types = { path = "../meilisearch-types" } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.2" } mime = "0.3.16" num_cpus = "1.13.1" @@ -59,7 +59,7 @@ whoami = { version = "1.2.1", optional = true } [dev-dependencies] actix-rt = "2.7.0" -meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] } +meilisearch-types = { path = "../meilisearch-types", features = ["test-traits"] } mockall = "0.11.0" nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"} paste = "1.0.6" diff --git a/meilisearch-lib/src/document_formats.rs b/meilisearch-lib/src/document_formats.rs index 93c47afe8..de3d7f5d5 100644 --- a/meilisearch-lib/src/document_formats.rs +++ b/meilisearch-lib/src/document_formats.rs @@ -2,7 +2,8 @@ use std::borrow::Borrow; use std::fmt::{self, Debug, Display}; use std::io::{self, BufRead, BufReader, BufWriter, Cursor, Read, Seek, Write}; -use meilisearch_error::{internal_error, Code, ErrorCode}; +use meilisearch_types::error::{Code, ErrorCode}; +use meilisearch_types::internal_error; use milli::documents::DocumentBatchBuilder; type Result = std::result::Result; diff --git a/meilisearch-lib/src/dump/compat/v2.rs b/meilisearch-lib/src/dump/compat/v2.rs index a30e24794..364d894c4 100644 --- a/meilisearch-lib/src/dump/compat/v2.rs +++ b/meilisearch-lib/src/dump/compat/v2.rs @@ -1,5 +1,5 @@ use anyhow::bail; -use meilisearch_error::Code; +use meilisearch_types::error::Code; use milli::update::IndexDocumentsMethod; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; diff --git a/meilisearch-lib/src/dump/compat/v3.rs b/meilisearch-lib/src/dump/compat/v3.rs index 2044e3b60..61e31eccd 100644 --- a/meilisearch-lib/src/dump/compat/v3.rs +++ b/meilisearch-lib/src/dump/compat/v3.rs @@ -1,4 +1,5 @@ -use meilisearch_error::{Code, ResponseError}; +use meilisearch_types::error::{Code, ResponseError}; +use meilisearch_types::index_uid::IndexUid; use milli::update::IndexDocumentsMethod; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; @@ -6,7 +7,6 @@ use uuid::Uuid; use super::v4::{Task, TaskContent, TaskEvent}; use crate::index::{Settings, Unchecked}; -use crate::index_resolver::IndexUid; use crate::tasks::task::{DocumentDeletion, TaskId, TaskResult}; use super::v2; diff --git a/meilisearch-lib/src/dump/compat/v4.rs b/meilisearch-lib/src/dump/compat/v4.rs index 867bc7b63..c412e7f17 100644 --- a/meilisearch-lib/src/dump/compat/v4.rs +++ b/meilisearch-lib/src/dump/compat/v4.rs @@ -1,4 +1,5 @@ -use meilisearch_error::ResponseError; +use meilisearch_types::error::ResponseError; +use meilisearch_types::index_uid::IndexUid; use milli::update::IndexDocumentsMethod; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; @@ -9,7 +10,6 @@ use crate::tasks::batch::BatchId; use crate::tasks::task::{ DocumentDeletion, TaskContent as NewTaskContent, TaskEvent as NewTaskEvent, TaskId, TaskResult, }; -use crate::IndexUid; #[derive(Debug, Serialize, Deserialize)] pub struct Task { diff --git a/meilisearch-lib/src/dump/error.rs b/meilisearch-lib/src/dump/error.rs index da9010347..3f6e2aae5 100644 --- a/meilisearch-lib/src/dump/error.rs +++ b/meilisearch-lib/src/dump/error.rs @@ -1,5 +1,6 @@ use meilisearch_auth::error::AuthControllerError; -use meilisearch_error::{internal_error, Code, ErrorCode}; +use meilisearch_types::error::{Code, ErrorCode}; +use meilisearch_types::internal_error; use crate::{index_resolver::error::IndexResolverError, tasks::error::TaskError}; diff --git a/meilisearch-lib/src/error.rs b/meilisearch-lib/src/error.rs index c3e7b8313..83e9263b4 100644 --- a/meilisearch-lib/src/error.rs +++ b/meilisearch-lib/src/error.rs @@ -1,7 +1,7 @@ use std::error::Error; use std::fmt; -use meilisearch_error::{Code, ErrorCode}; +use meilisearch_types::error::{Code, ErrorCode}; use milli::UserError; #[derive(Debug)] diff --git a/meilisearch-lib/src/index/error.rs b/meilisearch-lib/src/index/error.rs index 89a12a41f..e31fcc4a0 100644 --- a/meilisearch-lib/src/index/error.rs +++ b/meilisearch-lib/src/index/error.rs @@ -1,6 +1,7 @@ use std::error::Error; -use meilisearch_error::{internal_error, Code, ErrorCode}; +use meilisearch_types::error::{Code, ErrorCode}; +use meilisearch_types::internal_error; use serde_json::Value; use crate::{error::MilliError, update_file_store}; diff --git a/meilisearch-lib/src/index_controller/error.rs b/meilisearch-lib/src/index_controller/error.rs index 529887b6a..ab2dd142d 100644 --- a/meilisearch-lib/src/index_controller/error.rs +++ b/meilisearch-lib/src/index_controller/error.rs @@ -1,7 +1,8 @@ use std::error::Error; -use meilisearch_error::Code; -use meilisearch_error::{internal_error, ErrorCode}; +use meilisearch_types::error::{Code, ErrorCode}; +use meilisearch_types::index_uid::IndexUidFormatError; +use meilisearch_types::internal_error; use tokio::task::JoinError; use super::DocumentAdditionFormat; @@ -63,3 +64,9 @@ impl ErrorCode for IndexControllerError { } } } + +impl From for IndexControllerError { + fn from(err: IndexUidFormatError) -> Self { + IndexResolverError::from(err).into() + } +} diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 7d8b51fa8..88782c5ea 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -11,6 +11,7 @@ use actix_web::error::PayloadError; use bytes::Bytes; use futures::Stream; use futures::StreamExt; +use meilisearch_types::index_uid::IndexUid; use milli::update::IndexDocumentsMethod; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; @@ -37,7 +38,6 @@ use error::Result; use self::error::IndexControllerError; use crate::index_resolver::index_store::{IndexStore, MapIndexStore}; use crate::index_resolver::meta_store::{HeedMetaStore, IndexMetaStore}; -pub use crate::index_resolver::IndexUid; use crate::index_resolver::{create_index_resolver, IndexResolver}; use crate::update_file_store::UpdateFileStore; diff --git a/meilisearch-lib/src/index_controller/updates/error.rs b/meilisearch-lib/src/index_controller/updates/error.rs index 434783041..7ecaa45c5 100644 --- a/meilisearch-lib/src/index_controller/updates/error.rs +++ b/meilisearch-lib/src/index_controller/updates/error.rs @@ -1,7 +1,7 @@ use std::error::Error; use std::fmt; -use meilisearch_error::{internal_error, Code, ErrorCode}; +use meilisearch_types::{internal_error, Code, ErrorCode}; use crate::{ document_formats::DocumentFormatError, diff --git a/meilisearch-lib/src/index_resolver/error.rs b/meilisearch-lib/src/index_resolver/error.rs index 610ec6c7c..d973d2229 100644 --- a/meilisearch-lib/src/index_resolver/error.rs +++ b/meilisearch-lib/src/index_resolver/error.rs @@ -1,6 +1,8 @@ use std::fmt; -use meilisearch_error::{internal_error, Code, ErrorCode}; +use meilisearch_types::error::{Code, ErrorCode}; +use meilisearch_types::index_uid::IndexUidFormatError; +use meilisearch_types::internal_error; use tokio::sync::mpsc::error::SendError as MpscSendError; use tokio::sync::oneshot::error::RecvError as OneshotRecvError; use uuid::Uuid; @@ -25,8 +27,8 @@ pub enum IndexResolverError { UuidAlreadyExists(Uuid), #[error("{0}")] Milli(#[from] milli::Error), - #[error("`{0}` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).")] - BadlyFormatted(String), + #[error("{0}")] + BadlyFormatted(#[from] IndexUidFormatError), } impl From> for IndexResolverError diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index ff03ed0f8..343efa321 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -2,20 +2,17 @@ pub mod error; pub mod index_store; pub mod meta_store; -use std::convert::{TryFrom, TryInto}; -use std::error::Error; -use std::fmt; +use std::convert::TryFrom; use std::path::Path; -use std::str::FromStr; use std::sync::Arc; use error::{IndexResolverError, Result}; use index_store::{IndexStore, MapIndexStore}; -use meilisearch_error::ResponseError; +use meilisearch_types::error::ResponseError; +use meilisearch_types::index_uid::IndexUid; use meta_store::{HeedMetaStore, IndexMetaStore}; use milli::heed::Env; use milli::update::{DocumentDeletionResult, IndexerConfig}; -use serde::{Deserialize, Serialize}; use time::OffsetDateTime; use tokio::task::spawn_blocking; use uuid::Uuid; @@ -35,12 +32,6 @@ pub use real::IndexResolver; #[cfg(test)] pub use test::MockIndexResolver as IndexResolver; -/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400 -/// bytes long -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] -#[cfg_attr(test, derive(proptest_derive::Arbitrary))] -pub struct IndexUid(#[cfg_attr(test, proptest(regex("[a-zA-Z0-9_-]{1,400}")))] String); - pub fn create_index_resolver( path: impl AsRef, index_size: usize, @@ -53,81 +44,6 @@ pub fn create_index_resolver( Ok(IndexResolver::new(uuid_store, index_store, file_store)) } -impl IndexUid { - pub fn new_unchecked(s: impl AsRef) -> Self { - Self(s.as_ref().to_string()) - } - - pub fn into_inner(self) -> String { - self.0 - } - - /// Return a reference over the inner str. - pub fn as_str(&self) -> &str { - &self.0 - } -} - -impl std::ops::Deref for IndexUid { - type Target = str; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl TryInto for String { - type Error = IndexUidFormatError; - - fn try_into(self) -> std::result::Result { - IndexUid::from_str(&self) - } -} - -#[derive(Debug)] -pub struct IndexUidFormatError { - invalid_uid: String, -} - -impl fmt::Display for IndexUidFormatError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "invalid index uid `{}`, the uid must be an integer \ - or a string containing only alphanumeric characters \ - a-z A-Z 0-9, hyphens - and underscores _.", - self.invalid_uid, - ) - } -} - -impl Error for IndexUidFormatError {} - -impl From for IndexResolverError { - fn from(error: IndexUidFormatError) -> Self { - Self::BadlyFormatted(error.invalid_uid) - } -} - -impl FromStr for IndexUid { - type Err = IndexUidFormatError; - - fn from_str(uid: &str) -> std::result::Result { - if !uid - .chars() - .all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') - || uid.is_empty() - || uid.len() > 400 - { - Err(IndexUidFormatError { - invalid_uid: uid.to_string(), - }) - } else { - Ok(IndexUid(uid.to_string())) - } - } -} - mod real { use super::*; diff --git a/meilisearch-lib/src/lib.rs b/meilisearch-lib/src/lib.rs index 52da63027..3d3d5e860 100644 --- a/meilisearch-lib/src/lib.rs +++ b/meilisearch-lib/src/lib.rs @@ -13,7 +13,7 @@ mod update_file_store; use std::path::Path; -pub use index_controller::{IndexUid, MeiliSearch}; +pub use index_controller::MeiliSearch; pub use milli; pub use milli::heed; diff --git a/meilisearch-lib/src/tasks/error.rs b/meilisearch-lib/src/tasks/error.rs index d849b4c10..75fd7a591 100644 --- a/meilisearch-lib/src/tasks/error.rs +++ b/meilisearch-lib/src/tasks/error.rs @@ -1,4 +1,5 @@ -use meilisearch_error::{internal_error, Code, ErrorCode}; +use meilisearch_types::error::{Code, ErrorCode}; +use meilisearch_types::internal_error; use tokio::task::JoinError; use crate::update_file_store::UpdateFileStoreError; diff --git a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs index de624106c..f5f3e5c01 100644 --- a/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs +++ b/meilisearch-lib/src/tasks/handlers/index_resolver_handler.rs @@ -60,9 +60,9 @@ mod test { task::{Task, TaskContent}, }; use crate::update_file_store::{Result as FileStoreResult, UpdateFileStore}; - use crate::IndexUid; use super::*; + use meilisearch_types::index_uid::IndexUid; use milli::update::IndexDocumentsMethod; use nelson::Mocker; use proptest::prelude::*; diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 76294b6e7..8ce14fe8c 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -534,10 +534,11 @@ fn make_batch(tasks: &mut TaskQueue, config: &SchedulerConfig) -> Processing { #[cfg(test)] mod test { + use meilisearch_types::index_uid::IndexUid; use milli::update::IndexDocumentsMethod; use uuid::Uuid; - use crate::{index_resolver::IndexUid, tasks::task::TaskContent}; + use crate::tasks::task::TaskContent; use super::*; diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index f19f6cbfe..e463a92f5 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -1,4 +1,5 @@ -use meilisearch_error::ResponseError; +use meilisearch_types::error::ResponseError; +use meilisearch_types::index_uid::IndexUid; use milli::update::{DocumentAdditionResult, IndexDocumentsMethod}; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; @@ -6,7 +7,6 @@ use uuid::Uuid; use super::batch::BatchId; use crate::index::{Settings, Unchecked}; -use crate::index_resolver::IndexUid; pub type TaskId = u32; diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index 33f4bfb50..e2b01afb8 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -267,13 +267,11 @@ impl TaskStore { #[cfg(test)] pub mod test { - use crate::{ - tasks::{scheduler::Processing, task_store::store::test::tmp_env}, - IndexUid, - }; + use crate::tasks::{scheduler::Processing, task_store::store::test::tmp_env}; use super::*; + use meilisearch_types::index_uid::IndexUid; use nelson::Mocker; use proptest::{ strategy::Strategy, diff --git a/meilisearch-lib/src/tasks/task_store/store.rs b/meilisearch-lib/src/tasks/task_store/store.rs index f044bd077..9dfe61c55 100644 --- a/meilisearch-lib/src/tasks/task_store/store.rs +++ b/meilisearch-lib/src/tasks/task_store/store.rs @@ -179,11 +179,11 @@ impl Store { #[cfg(test)] pub mod test { use itertools::Itertools; + use meilisearch_types::index_uid::IndexUid; use milli::heed::EnvOpenOptions; use nelson::Mocker; use tempfile::TempDir; - use crate::index_resolver::IndexUid; use crate::tasks::task::TaskContent; use super::*; diff --git a/meilisearch-error/Cargo.toml b/meilisearch-types/Cargo.toml similarity index 93% rename from meilisearch-error/Cargo.toml rename to meilisearch-types/Cargo.toml index 8d2203144..6949722e7 100644 --- a/meilisearch-error/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "meilisearch-error" +name = "meilisearch-types" version = "0.28.0" authors = ["marin "] edition = "2021" diff --git a/meilisearch-error/src/lib.rs b/meilisearch-types/src/error.rs similarity index 100% rename from meilisearch-error/src/lib.rs rename to meilisearch-types/src/error.rs diff --git a/meilisearch-types/src/index_uid.rs b/meilisearch-types/src/index_uid.rs new file mode 100644 index 000000000..de453572b --- /dev/null +++ b/meilisearch-types/src/index_uid.rs @@ -0,0 +1,85 @@ +use serde::{Deserialize, Serialize}; +use std::error::Error; +use std::fmt; +use std::str::FromStr; + +/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400 +/// bytes long +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] +pub struct IndexUid( + #[cfg_attr(feature = "test-traits", proptest(regex("[a-zA-Z0-9_-]{1,400}")))] String, +); + +impl IndexUid { + pub fn new_unchecked(s: impl AsRef) -> Self { + Self(s.as_ref().to_string()) + } + + pub fn into_inner(self) -> String { + self.0 + } + + /// Return a reference over the inner str. + pub fn as_str(&self) -> &str { + &self.0 + } +} + +impl std::ops::Deref for IndexUid { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl TryFrom for IndexUid { + type Error = IndexUidFormatError; + + fn try_from(uid: String) -> Result { + if !uid + .chars() + .all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') + || uid.is_empty() + || uid.len() > 400 + { + Err(IndexUidFormatError { invalid_uid: uid }) + } else { + Ok(IndexUid(uid)) + } + } +} + +impl FromStr for IndexUid { + type Err = IndexUidFormatError; + + fn from_str(uid: &str) -> Result { + uid.to_string().try_into() + } +} + +impl From for String { + fn from(uid: IndexUid) -> Self { + uid.into_inner() + } +} + +#[derive(Debug)] +pub struct IndexUidFormatError { + pub invalid_uid: String, +} + +impl fmt::Display for IndexUidFormatError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "invalid index uid `{}`, the uid must be an integer \ + or a string containing only alphanumeric characters \ + a-z A-Z 0-9, hyphens - and underscores _.", + self.invalid_uid, + ) + } +} + +impl Error for IndexUidFormatError {} diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs new file mode 100644 index 000000000..cfc66c899 --- /dev/null +++ b/meilisearch-types/src/lib.rs @@ -0,0 +1,2 @@ +pub mod error; +pub mod index_uid; From b8745420dac265973da0e34716f161d05c8e0766 Mon Sep 17 00:00:00 2001 From: pierre-l Date: Mon, 6 Jun 2022 12:45:52 +0200 Subject: [PATCH 155/185] Use the `IndexUid` and `StarOr` in `meilisearch_auth::Key` Move `meilisearch_http::routes::StarOr` to `meilisearch_types::star_or` Fixes #2158 --- meilisearch-auth/src/key.rs | 8 +- meilisearch-auth/src/lib.rs | 14 +- meilisearch-auth/src/store.rs | 6 +- meilisearch-http/src/routes/api_key.rs | 2 +- .../src/routes/indexes/documents.rs | 3 +- meilisearch-http/src/routes/mod.rs | 23 +-- meilisearch-http/src/routes/tasks.rs | 3 +- meilisearch-http/tests/auth/api_keys.rs | 26 ++++ meilisearch-types/src/lib.rs | 1 + meilisearch-types/src/star_or.rs | 138 ++++++++++++++++++ permissive-json-pointer/src/lib.rs | 2 +- 11 files changed, 190 insertions(+), 36 deletions(-) create mode 100644 meilisearch-types/src/star_or.rs diff --git a/meilisearch-auth/src/key.rs b/meilisearch-auth/src/key.rs index 0e336a7db..eb72aaa72 100644 --- a/meilisearch-auth/src/key.rs +++ b/meilisearch-auth/src/key.rs @@ -2,6 +2,8 @@ use crate::action::Action; use crate::error::{AuthControllerError, Result}; use crate::store::KeyId; +use meilisearch_types::index_uid::IndexUid; +use meilisearch_types::star_or::StarOr; use serde::{Deserialize, Serialize}; use serde_json::{from_value, Value}; use time::format_description::well_known::Rfc3339; @@ -17,7 +19,7 @@ pub struct Key { pub name: Option, pub uid: KeyId, pub actions: Vec, - pub indexes: Vec, + pub indexes: Vec>, #[serde(with = "time::serde::rfc3339::option")] pub expires_at: Option, #[serde(with = "time::serde::rfc3339")] @@ -136,7 +138,7 @@ impl Key { description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()), uid, actions: vec![Action::All], - indexes: vec!["*".to_string()], + indexes: vec![StarOr::Star], expires_at: None, created_at: now, updated_at: now, @@ -151,7 +153,7 @@ impl Key { description: Some("Use it to search from the frontend".to_string()), uid, actions: vec![Action::Search], - indexes: vec!["*".to_string()], + indexes: vec![StarOr::Star], expires_at: None, created_at: now, updated_at: now, diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index e41fd92f4..81443348a 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -5,6 +5,7 @@ mod key; mod store; use std::collections::{HashMap, HashSet}; +use std::ops::Deref; use std::path::Path; use std::sync::Arc; @@ -16,6 +17,7 @@ use uuid::Uuid; pub use action::{actions, Action}; use error::{AuthControllerError, Result}; pub use key::Key; +use meilisearch_types::star_or::StarOr; use store::generate_key_as_base64; pub use store::open_auth_store_env; use store::HeedAuthStore; @@ -87,20 +89,22 @@ impl AuthController { .get_api_key(uid)? .ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?; - if !key.indexes.iter().any(|i| i.as_str() == "*") { + if !key.indexes.iter().any(|i| i == &StarOr::Star) { filters.search_rules = match search_rules { // Intersect search_rules with parent key authorized indexes. Some(search_rules) => SearchRules::Map( key.indexes .into_iter() .filter_map(|index| { - search_rules - .get_index_search_rules(&index) - .map(|index_search_rules| (index, Some(index_search_rules))) + search_rules.get_index_search_rules(index.deref()).map( + |index_search_rules| { + (String::from(index), Some(index_search_rules)) + }, + ) }) .collect(), ), - None => SearchRules::Set(key.indexes.into_iter().collect()), + None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()), }; } else if let Some(search_rules) = search_rules { filters.search_rules = search_rules; diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index d1af1b4ab..0355c4579 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -3,12 +3,14 @@ use std::cmp::Reverse; use std::convert::TryFrom; use std::convert::TryInto; use std::fs::create_dir_all; +use std::ops::Deref; use std::path::Path; use std::str; use std::sync::Arc; use enum_iterator::IntoEnumIterator; use hmac::{Hmac, Mac}; +use meilisearch_types::star_or::StarOr; use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; use milli::heed::{Database, Env, EnvOpenOptions, RwTxn}; use sha2::{Digest, Sha256}; @@ -92,7 +94,7 @@ impl HeedAuthStore { key.actions.clone() }; - let no_index_restriction = key.indexes.contains(&"*".to_owned()); + let no_index_restriction = key.indexes.contains(&StarOr::Star); for action in actions { if no_index_restriction { // If there is no index restriction we put None. @@ -102,7 +104,7 @@ impl HeedAuthStore { for index in key.indexes.iter() { db.put( &mut wtxn, - &(&uid, &action, Some(index.as_bytes())), + &(&uid, &action, Some(index.deref().as_bytes())), &key.expires_at, )?; } diff --git a/meilisearch-http/src/routes/api_key.rs b/meilisearch-http/src/routes/api_key.rs index 3513d23ca..7605fa644 100644 --- a/meilisearch-http/src/routes/api_key.rs +++ b/meilisearch-http/src/routes/api_key.rs @@ -151,7 +151,7 @@ impl KeyView { key: generated_key, uid: key.uid, actions: key.actions, - indexes: key.indexes, + indexes: key.indexes.into_iter().map(String::from).collect(), expires_at: key.expires_at, created_at: key.created_at, updated_at: key.updated_at, diff --git a/meilisearch-http/src/routes/indexes/documents.rs b/meilisearch-http/src/routes/indexes/documents.rs index 1d97e0736..2becc6db1 100644 --- a/meilisearch-http/src/routes/indexes/documents.rs +++ b/meilisearch-http/src/routes/indexes/documents.rs @@ -10,6 +10,7 @@ use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update}; use meilisearch_lib::milli::update::IndexDocumentsMethod; use meilisearch_lib::MeiliSearch; use meilisearch_types::error::ResponseError; +use meilisearch_types::star_or::StarOr; use mime::Mime; use once_cell::sync::Lazy; use serde::Deserialize; @@ -22,7 +23,7 @@ use crate::error::MeilisearchHttpError; use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::payload::Payload; use crate::extractors::sequential_extractor::SeqHandler; -use crate::routes::{fold_star_or, PaginationView, StarOr}; +use crate::routes::{fold_star_or, PaginationView}; use crate::task::SummarizedTaskView; static ACCEPTED_CONTENT_TYPE: Lazy> = Lazy::new(|| { diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index 97351b584..f61854c48 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -1,5 +1,3 @@ -use std::str::FromStr; - use actix_web::{web, HttpResponse}; use log::debug; use serde::{Deserialize, Serialize}; @@ -9,6 +7,7 @@ use time::OffsetDateTime; use meilisearch_lib::index::{Settings, Unchecked}; use meilisearch_lib::MeiliSearch; use meilisearch_types::error::ResponseError; +use meilisearch_types::star_or::StarOr; use crate::extractors::authentication::{policies::*, GuardedData}; @@ -27,26 +26,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .service(web::scope("/indexes").configure(indexes::configure)); } -/// A type that tries to match either a star (*) or -/// any other thing that implements `FromStr`. -#[derive(Debug)] -pub enum StarOr { - Star, - Other(T), -} - -impl FromStr for StarOr { - type Err = T::Err; - - fn from_str(s: &str) -> Result { - if s.trim() == "*" { - Ok(StarOr::Star) - } else { - T::from_str(s).map(StarOr::Other) - } - } -} - /// Extracts the raw values from the `StarOr` types and /// return None if a `StarOr::Star` is encountered. pub fn fold_star_or(content: impl IntoIterator>) -> Option diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index b8fc428a1..fed7fa634 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -4,6 +4,7 @@ use meilisearch_lib::tasks::TaskFilter; use meilisearch_lib::MeiliSearch; use meilisearch_types::error::ResponseError; use meilisearch_types::index_uid::IndexUid; +use meilisearch_types::star_or::StarOr; use serde::Deserialize; use serde_cs::vec::CS; use serde_json::json; @@ -13,7 +14,7 @@ use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::sequential_extractor::SeqHandler; use crate::task::{TaskListView, TaskStatus, TaskType, TaskView}; -use super::{fold_star_or, StarOr}; +use super::fold_star_or; const DEFAULT_LIMIT: fn() -> usize = || 20; diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index 28be81c91..9dcbd9b55 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -358,6 +358,32 @@ async fn error_add_api_key_invalid_parameters_indexes() { assert_eq!(response, expected_response); } +#[actix_rt::test] +async fn error_add_api_key_invalid_index_uids() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "description": Value::Null, + "indexes": ["invalid index # / \\name with spaces"], + "actions": [ + "documents.add" + ], + "expiresAt": "2050-11-13T00:00:00" + }); + let (response, code) = server.add_api_key(content).await; + + let expected_response = json!({ + "message": r#"`indexes` field value `["invalid index # / \\name with spaces"]` is invalid. It should be an array of string representing index names."#, + "code": "invalid_api_key_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" + }); + + assert_eq!(response, expected_response); + assert_eq!(code, 400); +} + #[actix_rt::test] async fn error_add_api_key_invalid_parameters_actions() { let mut server = Server::new_auth().await; diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index cfc66c899..2d685c2dc 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -1,2 +1,3 @@ pub mod error; pub mod index_uid; +pub mod star_or; diff --git a/meilisearch-types/src/star_or.rs b/meilisearch-types/src/star_or.rs new file mode 100644 index 000000000..02c9c3524 --- /dev/null +++ b/meilisearch-types/src/star_or.rs @@ -0,0 +1,138 @@ +use serde::de::Visitor; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::fmt::{Display, Formatter}; +use std::marker::PhantomData; +use std::ops::Deref; +use std::str::FromStr; + +/// A type that tries to match either a star (*) or +/// any other thing that implements `FromStr`. +#[derive(Debug)] +pub enum StarOr { + Star, + Other(T), +} + +impl FromStr for StarOr { + type Err = T::Err; + + fn from_str(s: &str) -> Result { + if s.trim() == "*" { + Ok(StarOr::Star) + } else { + T::from_str(s).map(StarOr::Other) + } + } +} + +impl> Deref for StarOr { + type Target = str; + + fn deref(&self) -> &Self::Target { + match self { + Self::Star => "*", + Self::Other(t) => t.deref(), + } + } +} + +impl> From> for String { + fn from(s: StarOr) -> Self { + match s { + StarOr::Star => "*".to_string(), + StarOr::Other(t) => t.into(), + } + } +} + +impl PartialEq for StarOr { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Star, Self::Star) => true, + (Self::Other(left), Self::Other(right)) if left.eq(right) => true, + _ => false, + } + } +} + +impl Eq for StarOr {} + +impl<'de, T, E> Deserialize<'de> for StarOr +where + T: FromStr, + E: Display, +{ + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + /// Serde can't differentiate between `StarOr::Star` and `StarOr::Other` without a tag. + /// Simply using `#[serde(untagged)]` + `#[serde(rename="*")]` will lead to attempting to + /// deserialize everything as a `StarOr::Other`, including "*". + /// [`#[serde(other)]`](https://serde.rs/variant-attrs.html#other) might have helped but is + /// not supported on untagged enums. + struct StarOrVisitor(PhantomData); + + impl<'de, T, FE> Visitor<'de> for StarOrVisitor + where + T: FromStr, + FE: Display, + { + type Value = StarOr; + + fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { + formatter.write_str("a string") + } + + fn visit_str(self, v: &str) -> Result + where + SE: serde::de::Error, + { + match v { + "*" => Ok(StarOr::Star), + v => { + let other = FromStr::from_str(v).map_err(|e: T::Err| { + SE::custom(format!("Invalid `other` value: {}", e)) + })?; + Ok(StarOr::Other(other)) + } + } + } + } + + deserializer.deserialize_str(StarOrVisitor(PhantomData)) + } +} + +impl Serialize for StarOr +where + T: Deref, +{ + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + StarOr::Star => serializer.serialize_str("*"), + StarOr::Other(other) => serializer.serialize_str(other.deref()), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::{json, Value}; + + #[test] + fn star_or_serde_roundtrip() { + fn roundtrip(content: Value, expected: StarOr) { + let deserialized: StarOr = serde_json::from_value(content.clone()).unwrap(); + assert_eq!(deserialized, expected); + assert_eq!(content, serde_json::to_value(deserialized).unwrap()); + } + + roundtrip(json!("products"), StarOr::Other("products".to_string())); + roundtrip(json!("*"), StarOr::Star); + } +} diff --git a/permissive-json-pointer/src/lib.rs b/permissive-json-pointer/src/lib.rs index 56382beae..8f97ab2de 100644 --- a/permissive-json-pointer/src/lib.rs +++ b/permissive-json-pointer/src/lib.rs @@ -206,7 +206,7 @@ fn create_value(value: &Document, mut selectors: HashSet<&str>) -> Document { new_value } -fn create_array(array: &Vec, selectors: &HashSet<&str>) -> Vec { +fn create_array(array: &[Value], selectors: &HashSet<&str>) -> Vec { let mut res = Vec::new(); for value in array { From 0e416b4bcd08e71f411dc98fde3b0777549fa952 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 2 Jun 2022 17:15:31 +0200 Subject: [PATCH 156/185] delete index test --- meilisearch-lib/src/index_resolver/mod.rs | 79 ++++++++++++++++++++++- 1 file changed, 78 insertions(+), 1 deletion(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 316528647..7d06f9f0d 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -470,8 +470,12 @@ mod real { #[cfg(test)] mod test { - use super::*; + use crate::index::IndexStats; + use super::{index_store::MockIndexStore, meta_store::MockIndexMetaStore, *}; + + use futures::future::ok; + use milli::FieldDistribution; use nelson::Mocker; pub enum MockIndexResolver { @@ -577,6 +581,79 @@ mod test { } } + #[actix_rt::test] + async fn test_remove_unknown_index() { + let mut meta_store = MockIndexMetaStore::new(); + meta_store + .expect_delete() + .once() + .returning(|_| Box::pin(ok(None))); + + let index_store = MockIndexStore::new(); + + let mocker = Mocker::default(); + let file_store = UpdateFileStore::mock(mocker); + + let index_resolver = IndexResolver::new(meta_store, index_store, file_store); + + let mut task = Task { + id: 1, + content: TaskContent::IndexDeletion { + index_uid: IndexUid::new_unchecked("test"), + }, + events: Vec::new(), + }; + + index_resolver.process_task(&mut task).await; + + assert!(matches!(task.events[0], TaskEvent::Failed { .. })); + } + + #[actix_rt::test] + async fn test_remove_index() { + let mut meta_store = MockIndexMetaStore::new(); + meta_store.expect_delete().once().returning(|_| { + Box::pin(ok(Some(IndexMeta { + uuid: Uuid::new_v4(), + creation_task_id: 1, + }))) + }); + + let mut index_store = MockIndexStore::new(); + index_store.expect_delete().once().returning(|_| { + let mocker = Mocker::default(); + mocker.when::<(), ()>("close").then(|_| ()); + mocker + .when::<(), IndexResult>("stats") + .then(|_| { + Ok(IndexStats { + size: 10, + number_of_documents: 10, + is_indexing: None, + field_distribution: FieldDistribution::default(), + }) + }); + Box::pin(ok(Some(Index::mock(mocker)))) + }); + + let mocker = Mocker::default(); + let file_store = UpdateFileStore::mock(mocker); + + let index_resolver = IndexResolver::new(meta_store, index_store, file_store); + + let mut task = Task { + id: 1, + content: TaskContent::IndexDeletion { + index_uid: IndexUid::new_unchecked("test"), + }, + events: Vec::new(), + }; + + index_resolver.process_task(&mut task).await; + + assert!(matches!(task.events[0], TaskEvent::Succeeded { .. })); + } + // TODO: ignoring this test, it has become too complex to maintain, and rather implement // handler logic test. // proptest! { From 0333bad0573ceea53b6c702399e6a17cf7544b07 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 2 Jun 2022 17:31:51 +0200 Subject: [PATCH 157/185] delete documents test --- meilisearch-lib/src/index_resolver/mod.rs | 53 +++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 7d06f9f0d..c4c51700a 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -654,6 +654,59 @@ mod test { assert!(matches!(task.events[0], TaskEvent::Succeeded { .. })); } + #[actix_rt::test] + async fn test_delete_documents() { + let mut meta_store = MockIndexMetaStore::new(); + meta_store.expect_get().once().returning(|_| { + Box::pin(ok(( + "test".to_string(), + Some(IndexMeta { + uuid: Uuid::new_v4(), + creation_task_id: 1, + }), + ))) + }); + + let mut index_store = MockIndexStore::new(); + index_store.expect_get().once().returning(|_| { + let mocker = Mocker::default(); + mocker + .when::<(), IndexResult<()>>("clear_documents") + .once() + .then(|_| Ok(())); + mocker + .when::<(), IndexResult>("stats") + .once() + .then(|_| { + Ok(IndexStats { + size: 10, + number_of_documents: 10, + is_indexing: None, + field_distribution: FieldDistribution::default(), + }) + }); + Box::pin(ok(Some(Index::mock(mocker)))) + }); + + let mocker = Mocker::default(); + let file_store = UpdateFileStore::mock(mocker); + + let index_resolver = IndexResolver::new(meta_store, index_store, file_store); + + let mut task = Task { + id: 1, + content: TaskContent::DocumentDeletion { + deletion: DocumentDeletion::Clear, + index_uid: IndexUid::new_unchecked("test"), + }, + events: Vec::new(), + }; + + index_resolver.process_task(&mut task).await; + + assert!(matches!(task.events[0], TaskEvent::Succeeded { .. })); + } + // TODO: ignoring this test, it has become too complex to maintain, and rather implement // handler logic test. // proptest! { From 354f7fb2bf007b7ab31c5deec9e196d31a80d880 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 6 Jun 2022 10:49:58 +0200 Subject: [PATCH 158/185] test index_update --- meilisearch-lib/src/index_resolver/mod.rs | 53 ++++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index c4c51700a..352ebe80e 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -472,7 +472,9 @@ mod real { mod test { use crate::index::IndexStats; - use super::{index_store::MockIndexStore, meta_store::MockIndexMetaStore, *}; + use super::index_store::MockIndexStore; + use super::meta_store::MockIndexMetaStore; + use super::*; use futures::future::ok; use milli::FieldDistribution; @@ -707,6 +709,55 @@ mod test { assert!(matches!(task.events[0], TaskEvent::Succeeded { .. })); } + #[actix_rt::test] + async fn test_index_update() { + let mut meta_store = MockIndexMetaStore::new(); + meta_store.expect_get().once().returning(|_| { + Box::pin(ok(( + "test".to_string(), + Some(IndexMeta { + uuid: Uuid::new_v4(), + creation_task_id: 1, + }), + ))) + }); + + let mut index_store = MockIndexStore::new(); + index_store.expect_get().once().returning(|_| { + let mocker = Mocker::default(); + + mocker + .when::>("update_primary_key") + .once() + .then(|_| { + Ok(crate::index::IndexMeta { + created_at: OffsetDateTime::now_utc(), + updated_at: OffsetDateTime::now_utc(), + primary_key: Some("key".to_string()), + }) + }); + Box::pin(ok(Some(Index::mock(mocker)))) + }); + + let mocker = Mocker::default(); + let file_store = UpdateFileStore::mock(mocker); + + let index_resolver = IndexResolver::new(meta_store, index_store, file_store); + + let mut task = Task { + id: 1, + content: TaskContent::IndexUpdate { + primary_key: Some("key".to_string()), + index_uid: IndexUid::new_unchecked("test"), + }, + events: Vec::new(), + }; + + index_resolver.process_task(&mut task).await; + + assert!(matches!(task.events[0], TaskEvent::Succeeded { .. })); + } + // TODO: ignoring this test, it has become too complex to maintain, and rather implement // handler logic test. // proptest! { From 9a6841c7ceab6ce82e4ddc8b49bff54210aa8263 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 6 Jun 2022 12:45:32 +0200 Subject: [PATCH 159/185] remove unused test --- meilisearch-lib/src/index_resolver/mod.rs | 171 ---------------------- 1 file changed, 171 deletions(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 352ebe80e..8ad85dc7b 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -757,175 +757,4 @@ mod test { assert!(matches!(task.events[0], TaskEvent::Succeeded { .. })); } - - // TODO: ignoring this test, it has become too complex to maintain, and rather implement - // handler logic test. - // proptest! { - // #[test] - // #[ignore] - // fn test_process_task( - // task in any::().prop_filter("IndexUid should be Some", |s| s.index_uid.is_some()), - // index_exists in any::(), - // index_op_fails in any::(), - // any_int in any::(), - // ) { - // actix_rt::System::new().block_on(async move { - // let uuid = Uuid::new_v4(); - // let mut index_store = MockIndexStore::new(); - // - // let mocker = Mocker::default(); - // - // // Return arbitrary data from index call. - // match &task.content { - // TaskContent::DocumentAddition{primary_key, ..} => { - // let result = move || if !index_op_fails { - // Ok(DocumentAdditionResult { indexed_documents: any_int, number_of_documents: any_int }) - // } else { - // // return this error because it's easy to generate... - // Err(IndexError::DocumentNotFound("a doc".into())) - // }; - // if primary_key.is_some() { - // mocker.when::>("update_primary_key") - // .then(move |_| Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None })); - // } - // mocker.when::<(IndexDocumentsMethod, Option, UpdateFileStore, IntoIter), IndexResult>("update_documents") - // .then(move |(_, _, _, _)| result()); - // } - // TaskContent::SettingsUpdate{..} => { - // let result = move || if !index_op_fails { - // Ok(()) - // } else { - // // return this error because it's easy to generate... - // Err(IndexError::DocumentNotFound("a doc".into())) - // }; - // mocker.when::<&Settings, IndexResult<()>>("update_settings") - // .then(move |_| result()); - // } - // TaskContent::DocumentDeletion(DocumentDeletion::Ids(_ids)) => { - // let result = move || if !index_op_fails { - // Ok(DocumentDeletionResult { deleted_documents: any_int as u64, remaining_documents: any_int as u64 }) - // } else { - // // return this error because it's easy to generate... - // Err(IndexError::DocumentNotFound("a doc".into())) - // }; - // - // mocker.when::<&[String], IndexResult>("delete_documents") - // .then(move |_| result()); - // }, - // TaskContent::DocumentDeletion(DocumentDeletion::Clear) => { - // let result = move || if !index_op_fails { - // Ok(()) - // } else { - // // return this error because it's easy to generate... - // Err(IndexError::DocumentNotFound("a doc".into())) - // }; - // mocker.when::<(), IndexResult<()>>("clear_documents") - // .then(move |_| result()); - // }, - // TaskContent::IndexDeletion => { - // mocker.when::<(), ()>("close") - // .times(index_exists as usize) - // .then(move |_| ()); - // } - // TaskContent::IndexUpdate { primary_key } - // | TaskContent::IndexCreation { primary_key } => { - // if primary_key.is_some() { - // let result = move || if !index_op_fails { - // Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None }) - // } else { - // // return this error because it's easy to generate... - // Err(IndexError::DocumentNotFound("a doc".into())) - // }; - // mocker.when::>("update_primary_key") - // .then(move |_| result()); - // } - // } - // TaskContent::Dump { .. } => { } - // } - // - // mocker.when::<(), IndexResult>("stats") - // .then(|()| Ok(IndexStats { size: 0, number_of_documents: 0, is_indexing: Some(false), field_distribution: BTreeMap::new() })); - // - // let index = Index::mock(mocker); - // - // match &task.content { - // // an unexisting index should trigger an index creation in the folllowing cases: - // TaskContent::DocumentAddition { allow_index_creation: true, .. } - // | TaskContent::SettingsUpdate { allow_index_creation: true, is_deletion: false, .. } - // | TaskContent::IndexCreation { .. } if !index_exists => { - // index_store - // .expect_create() - // .once() - // .withf(move |&found| !index_exists || found == uuid) - // .returning(move |_| Box::pin(ok(index.clone()))); - // }, - // TaskContent::IndexDeletion => { - // index_store - // .expect_delete() - // // this is called only if the index.exists - // .times(index_exists as usize) - // .withf(move |&found| !index_exists || found == uuid) - // .returning(move |_| Box::pin(ok(Some(index.clone())))); - // } - // // if index already exists, create index will return an error - // TaskContent::IndexCreation { .. } if index_exists => (), - // TaskContent::Dump { .. } => (), - // // The index exists and get should be called - // _ if index_exists => { - // index_store - // .expect_get() - // .once() - // .withf(move |&found| found == uuid) - // .returning(move |_| Box::pin(ok(Some(index.clone())))); - // }, - // // the index doesn't exist and shouldn't be created, the uuidstore will return an error, and get_index will never be called. - // _ => (), - // } - // - // let mut uuid_store = MockIndexMetaStore::new(); - // uuid_store - // .expect_get() - // .returning(move |uid| { - // Box::pin(ok((uid, index_exists.then(|| crate::index_resolver::meta_store::IndexMeta {uuid, creation_task_id: 0 })))) - // }); - // - // // we sould only be creating an index if the index doesn't alredy exist - // uuid_store - // .expect_insert() - // .withf(move |_, _| !index_exists) - // .returning(|_, _| Box::pin(ok(()))); - // - // uuid_store - // .expect_delete() - // .times(matches!(task.content, TaskContent::IndexDeletion) as usize) - // .returning(move |_| Box::pin(ok(index_exists.then(|| crate::index_resolver::meta_store::IndexMeta { uuid, creation_task_id: 0})))); - // - // let mocker = Mocker::default(); - // let update_file_store = UpdateFileStore::mock(mocker); - // let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store); - // - // let batch = Batch { id: Some(1), created_at: OffsetDateTime::now_utc(), content: crate::tasks::batch::BatchContent::IndexUpdate(task.clone()) }; - // if index_resolver.accept(&batch) { - // let result = index_resolver.process_batch(batch).await; - // - // // Test for some expected output scenarios: - // // Index creation and deletion cannot fail because of a failed index op, since they - // // don't perform index ops. - // if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None } | TaskContent::Dump { .. }) - // || (index_exists && matches!(task.content, TaskContent::IndexCreation { .. })) - // || (!index_exists && matches!(task.content, TaskContent::IndexDeletion - // | TaskContent::DocumentDeletion(_) - // | TaskContent::SettingsUpdate { is_deletion: true, ..} - // | TaskContent::SettingsUpdate { allow_index_creation: false, ..} - // | TaskContent::DocumentAddition { allow_index_creation: false, ..} - // | TaskContent::IndexUpdate { .. } )) - // { - // assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Failed { .. }), "{:?}", result); - // } else { - // assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Succeeded { .. }), "{:?}", result); - // } - // } - // }); - // } - // } } From 2063fbd9851bb366912a1393a709861e5b5a880e Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 9 Jun 2022 17:44:15 +0200 Subject: [PATCH 160/185] chore: bump milli --- Cargo.lock | 146 +++++++++++++++--------------------- meilisearch-auth/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 2 +- 3 files changed, 64 insertions(+), 86 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b15b69a6c..1bd47e355 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -78,7 +78,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" dependencies = [ "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -213,7 +213,7 @@ dependencies = [ "actix-router", "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -320,18 +320,18 @@ checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] name = "async-trait" -version = "0.1.53" +version = "0.1.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6aa3524a2dfcf9fe180c51eae2b58738348d819517ceadf95789c51fff7600" +checksum = "96cf8829f67d2eab0b2dfa42c5d0ef737e0724e4a82b01b3e292456202b19716" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -509,9 +509,9 @@ checksum = "b4ae4235e6dac0694637c763029ecea1a2ec9e4e06ec2729bd21ba4d9c863eb7" [[package]] name = "bumpalo" -version = "3.9.1" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" +checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" [[package]] name = "byte-unit" @@ -525,9 +525,9 @@ dependencies = [ [[package]] name = "bytecount" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e" +checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c" [[package]] name = "bytemuck" @@ -546,7 +546,7 @@ checksum = "562e382481975bc61d11275ac5e62a19abd00b0547d99516a415336f183dcd0e" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -705,7 +705,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -725,7 +725,7 @@ checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -912,7 +912,7 @@ checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -925,7 +925,7 @@ dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", "rustc_version 0.4.0", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -1084,7 +1084,7 @@ checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -1123,8 +1123,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.29.2" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.2#19d44142a170d63d076e7d327b542dfa1f3f8b96" +version = "0.29.3" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.3#f1d848bb9add86b9414d110a083dfa0462d5d636" dependencies = [ "nom", "nom_locate", @@ -1148,8 +1148,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.29.2" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.2#19d44142a170d63d076e7d327b542dfa1f3f8b96" +version = "0.29.3" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.3#f1d848bb9add86b9414d110a083dfa0462d5d636" dependencies = [ "serde_json", ] @@ -1181,9 +1181,9 @@ dependencies = [ [[package]] name = "fragile" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9d758e60b45e8d749c89c1b389ad8aee550f86aa12e2b9298b546dda7a82ab1" +checksum = "85dcb89d2b10c5f6133de2efd8c11959ce9dbb46a2f7a4cab208c4eeda6ce1ab" [[package]] name = "fs_extra" @@ -1253,7 +1253,7 @@ checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -1331,7 +1331,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -1493,9 +1493,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff8670570af52249509a86f5e3e18a08c60b177071826898fde8997cf5f6bfbb" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ "bytes", "fnv", @@ -1661,8 +1661,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.29.2" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.2#19d44142a170d63d076e7d327b542dfa1f3f8b96" +version = "0.29.3" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.3#f1d848bb9add86b9414d110a083dfa0462d5d636" dependencies = [ "serde_json", ] @@ -1962,7 +1962,7 @@ dependencies = [ "log", "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -1984,7 +1984,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -2171,9 +2171,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057a3db23999c867821a7a59feb06a578fcb03685e983dff90daf9e7d24ac08f" +checksum = "d5172b50c23043ff43dd53e51392f36519d9b35a8f3a410d30ece5d1aedd58ae" dependencies = [ "libc", ] @@ -2189,8 +2189,8 @@ dependencies = [ [[package]] name = "milli" -version = "0.29.2" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.2#19d44142a170d63d076e7d327b542dfa1f3f8b96" +version = "0.29.3" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.3#f1d848bb9add86b9414d110a083dfa0462d5d636" dependencies = [ "bimap", "bincode", @@ -2299,7 +2299,7 @@ dependencies = [ "cfg-if 1.0.0", "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -2631,7 +2631,7 @@ dependencies = [ "proc-macro-error-attr", "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", "version_check", ] @@ -2707,15 +2707,6 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" -[[package]] -name = "quickcheck" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" -dependencies = [ - "rand", -] - [[package]] name = "quote" version = "0.6.13" @@ -3107,7 +3098,7 @@ checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -3177,9 +3168,9 @@ dependencies = [ [[package]] name = "simple_asn1" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a762b1c38b9b990c694b9c2f8abe3372ce6a9ceaae6bca39cfc46e054f45745" +checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ "num-bigint", "num-traits", @@ -3305,9 +3296,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.95" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942" +checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", @@ -3331,7 +3322,7 @@ checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", "unicode-xid 0.2.3", ] @@ -3413,7 +3404,7 @@ checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -3456,7 +3447,6 @@ dependencies = [ "itoa 1.0.2", "libc", "num_threads", - "quickcheck", "serde", "time-macros", ] @@ -3484,9 +3474,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.18.2" +version = "1.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4903bf0427cf68dddd5aa6a93220756f8be0c34fcfa9f5e6191e103e15a31395" +checksum = "c51a52ed6686dd62c320f9b89299e9dfb46f730c7a48e635c19f21d116cb1439" dependencies = [ "bytes", "libc", @@ -3504,13 +3494,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" +checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", ] [[package]] @@ -3526,9 +3516,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50145484efff8818b5ccd256697f36863f587da82cf8b409c53adf1e840798e3" +checksum = "df54d54117d6fdc4e4fea40fe1e4e566b3505700e148a6827e59b34b0d2600d9" dependencies = [ "futures-core", "pin-project-lite", @@ -3537,9 +3527,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f988a1a1adc2fb21f9c12aa96441da33a1728193ae0b95d2be22dbd17fcb4e5c" +checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" dependencies = [ "bytes", "futures-core", @@ -3566,35 +3556,23 @@ checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" [[package]] name = "tracing" -version = "0.1.34" +version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09" +checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160" dependencies = [ "cfg-if 1.0.0", "log", "pin-project-lite", - "tracing-attributes", "tracing-core", ] -[[package]] -name = "tracing-attributes" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" -dependencies = [ - "proc-macro2 1.0.39", - "quote 1.0.18", - "syn 1.0.95", -] - [[package]] name = "tracing-core" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f54c8ca710e81886d498c2fd3331b56c93aa248d49de2222ad2742247c60072f" +checksum = "7709595b8878a4965ce5e87ebf880a7d39c9afc6837721b21a5a816a8117d921" dependencies = [ - "lazy_static", + "once_cell", ] [[package]] @@ -3727,9 +3705,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vergen" -version = "7.2.0" +version = "7.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "626fd028e124b3ee607632d92ba99b5a5a086cfd404ede4af6c19ecd9b75a02d" +checksum = "b1f44ef1afcf5979e34748c12595f9589f3dc4e34abf156fb6d95f9b835568dc" dependencies = [ "anyhow", "cfg-if 1.0.0", @@ -3825,7 +3803,7 @@ dependencies = [ "log", "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", "wasm-bindgen-shared", ] @@ -3859,7 +3837,7 @@ checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b" dependencies = [ "proc-macro2 1.0.39", "quote 1.0.18", - "syn 1.0.95", + "syn 1.0.96", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4033,7 +4011,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb" dependencies = [ "proc-macro2 1.0.39", - "syn 1.0.95", + "syn 1.0.96", "synstructure", ] diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 8f00f3c99..ed3a589e2 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -8,7 +8,7 @@ base64 = "0.13.0" enum-iterator = "0.7.0" hmac = "0.12.1" meilisearch-types = { path = "../meilisearch-types" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.2" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.3" } rand = "0.8.4" serde = { version = "1.0.136", features = ["derive"] } serde_json = { version = "1.0.79", features = ["preserve_order"] } diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index fa4e6587a..094c79901 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -30,7 +30,7 @@ lazy_static = "1.4.0" log = "0.4.14" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-types = { path = "../meilisearch-types" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.2" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.3" } mime = "0.3.16" num_cpus = "1.13.1" obkv = "0.2.0" From 1425d62a31b3601775089cb5951a72d6dc079ec1 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 9 Jun 2022 18:08:26 +0200 Subject: [PATCH 161/185] test dump v5 --- .../tests/assets/v5_v0.28.0_test_dump.dump | Bin 0 -> 5429 bytes meilisearch-http/tests/common/server.rs | 11 ++- meilisearch-http/tests/dumps/data.rs | 5 ++ meilisearch-http/tests/dumps/mod.rs | 71 +++++++++++++++++- 4 files changed, 82 insertions(+), 5 deletions(-) create mode 100644 meilisearch-http/tests/assets/v5_v0.28.0_test_dump.dump diff --git a/meilisearch-http/tests/assets/v5_v0.28.0_test_dump.dump b/meilisearch-http/tests/assets/v5_v0.28.0_test_dump.dump new file mode 100644 index 0000000000000000000000000000000000000000..bc30ea7b91a7f2d0015e3b83f90b03daf0e9843a GIT binary patch literal 5429 zcmbuC!PhB@g(w8s1qkFqGQ?gPOkg57*1IpWN>14xFYgg~f4X_Kpc!Y-8 zA|x|qR3Gsjp_bbjnVc=Zz;dQiraVatfr?FRLXECG00m}(xs6k>uk~s9_g{^$Y0ANd zxbpp-3R7OPN7ccmn}SEM<$Ah#CMHQe|J1Nm1U&W9}34?Vf`%=mkJ9U{apE%MkSi~JmD7CkPyMnw5UFbtJK)>R6S+!23FNhOn z8vg+?uWi}Q4;j*+ik*5R4~-gE!Xy)JVUR~38@d7<@gd>!vLpq9uezihv$vPN! zhLZ{*EW+hMLhSp+0}e;_b42?mANW=-_(?8zk)hwh4QjXZV>B%*XkI5GYOMqEE#UtD zEQ=!NWkMt_SNX)8;1++EKRvf8$0*zWCCLx)dRDb0m4LIz8(bB5HxJ5P5ol`C30PLN>ld@ALA*_b)mWc-pt|RkUEgE*1V`)ztb!&7!vIJ4sC;}AU9btm1 z6?8t7ahqUf?O>|{l;x%z+SWIwVrcQQ=}!_yIC~K;8wObk7SGkd@~H?zQZ;d=DS|{0 zx+zUw)rt3@Ro>c0Lc~;Q=?n($M2h-aCTo$2sQ92HnlHrGAHx&yg=0e2a?OKP-Z6tq zcQ~vb?GJGx+TePK=%sBYsCW!MDt|H*#3Qto$r4eUytJCb`YDTe5xv^Vt*HlPP{6q* z)Cmu{?WAChOPPitWK0-2O)Tu%CH*WRaW-1kh1o8#cr@j<6;yewua_k6%C?1hJ1l(J z+hk5eDP%Pe{DcZZp5102P*84BSg>rm*+UI1xrLHZJ8z=V1RdSeUkF`N3Is ztj~Or)R~BeDjV{Y+pzI)sVcxM{1r7d>XXui$eO7--mYp9edlD14wLXoF3*#ycJUq#Dl8I*lSjChDNGR->GM3wLx5J|qr7#Ah(t>1g-zu91u^ikQe>`d(mLJa#9oH`tjujJR0pi!!((*Gtb$!fZWNWS+J?lOTY|aQKKme9%Tc{FF<47L?>^J6*|9{kjX&$N z(eKEaKa!43;5Id9puC0sIK6a)2=d*2AK6Q;&n1wWh{vQ_&^)7|cO6+|yDrykkLu4P zbX0NJZu(x!FXXDCbdXa4bw7*;Ylw9TZ$>>3)jEUQ@FZa>PUvNsiB zojLSP>_zIvC+Bo=Uhh=3`@@jioNfA-Yq5I9EDKNNNA%1ep! zE3FH|cFj|BzH&`f7I8N|uMQp~{9(KM`7GjS5B4~IvMQ`&(Cv+Ys>fGhx~08VzC`tL zRc#F!0qmA*N}P1$-&gycHu8WF(t?pfFXPZd>)AsF zqJ5DmCvee6b$1xKGttvV9G1qrAMImGj0;pMyELYex=5tSF{%2xC z-{Q*}v^OS@k6QD-XpP{UXd+KY*y(7WVPya7L&5Q)7>?HNcLM2fgE=?yiPD}sG2E)I zLPHjzWzl|RZmWnDrHgUaKeT+zshQMEYQ|!-K+z6PPHq9WgdmC&Z#h>W?!3OmJ9ArF zq-4Y08tU|>qhi(*s5|E>gW4j^O>%ZDGzta* zCK^?Hbk@kBvW&4~3dalgTt~et_9Db)Uh{q{`{K6;9KJ0W=nQXb9OB*N9*~}Y7h(up zaoJm)0K4WK?1dlOF3}7a>wOa~mV`QY?NWCLa@6x!!Cx&-L6J@5BnY!5*v>-I;jR=> zO7!D&-w>j`h`(oB;#;z^14sKIKa1WRkt3EHz&&!M_AiYBm&Xp0ol;`#fg*xqVd{jd z6;)5`nIDHi2VZuH@q<$^WxBCJ*Wa{`wByQk^$eJ1rD8PRnV;HoB^pu z>?oGA-1GSLlS|}Mb&e>C9VMaDHhJU^8MiKs>@DKLK%hjXQb(hQUYiz8_3?nKWTi|0 zU>ceb1xC~8<^!%!$0*I+#6ZZrbw_BKH3e~YYYd)cw9q{;!ox1Z8|CO8mCTA_;LqDA zoF)m0>5!NkcMI8rgqmH@P@M-6rn-4?-QSlgAUO=RN!x0Wu%rBazf@Qw|?;K5~_t;=+}d9s~`loItM)PyP+ZB-*QKEsG zkilH@uRm9tY>@NDR#=pT)^Ze{ft=o>4|7O$affGRevJFOzY0PvN5e|BbDP4WDkBhg zTO;U~pwjGm(f+a_kVTZ^k(fuFfDmj4sV+$-=8Zfu)aFCED=-(vwD#}2KO~7t?V|3K-s;9cD#}>8tQ}ciC4?Fu zn2D(195hA%0b^> zFLh_C?5*f;)>Mnt$4pAx}PiWKR4F}DrSbjKPrQ-9rA z);nUs?%t%2c-G*}_o(xl2b9I~SGpA&D4YuU`!`fS*T}@Qf7iYoAvLJVF`p#VcW(V5 zlux2zfJ!Fr`IcH8eF%H*NJ#qZW*7HFX#( z5)j)K&5T8R{Umox^33r|-eHl%eH3pX_WSHhp=1*3P^U&W^3PhQ(Uza>$c+Uv+qyxTJB zGGDs2T!bYZy~=KukWD>DJUsulw+}x_N`)PzOyF_(O5g}G8*H<5EQ+NpGRJ3lt`s|a zM<+*L*B{J^;PTQ2piY?iJ++Q9Wiu+;eGbMJ#HfmFJx-;1*LJy2%YOLy?cBKxjN~JYuCti^ZBZ_|lT|7!$F@Sr3|bkiRguHyUFIf5 zE$W>g*D6YG7sM6WAwVidNLW*gP!vUOO&MP@_AhFAsb-s zM84=w;*A~WUcI%=x&@(Q;HaN_PiQh8gKV6!XLaQpeyQHSHg99S8e?03FUkd~7 zLeaJ#{3%k_PiBgw>LYcj+L9pEvW<1;g`J?wga9a{i}Sv`9YksbBE>eTBCoQ0wJ zzr6(b!3U39wcD~)mTX91RgOz}Rl@d{XH&2(pDhpNmCLr?>&NKlfOx~U$>)ICOA*;O z$oX4=-^H78&yTeGfc#yi3%DQjRs8r4)^|sFa|a=W!XMA!Br0H|_5VNphp&CtZ^qv6 z`fj-PHk{=Go^}tPcn@~#d!zMdy9f{&yy=xfL(L1l z7-!)oL6notOux`!>#}Qi?6MQU>N?YlH-zcMl?&PHTUE=xB>?CVFvl+Alav5A+;4=a8(RdXQHTJ?oBE>dBY>g6BJ zc~&uIHNq8`}ZqH5(P4Y?K}MHw5T-E2Y(`;Wqa%kJin>f`scfOQ}zsr)I6W)J71jq zZ+k}aTX-p?)kg0;R`tvScSE@X$lRcPOwKCau5ambq?BX3o2({&c44ENV*bvkH-m68 z;G19m=Kibb+v~kt6nyrQ=o)UdsyYj}?ArzjTueahQt#mfhu-ynr3J;V-J0_W81KQ( zl>@rwJYfSwFs*N60*)_M+9!Wmo~$^+wk}wDMEZL(qO*?TinS zw$~cP4cV@Gr-Xy#U4|8fn`M%uP$(|%dqob4nP$+_{diIHw(5cJZ3Zqyn{bKa9Nc54 zv5RM2SPOa7&uY>^+|={0sn&*_<9Xd%+13_Tt^Q;5!qkm3u%JGw27QfqOONeb-7hK9 z3b{;i%59L@K16e2uiBI&!&~>%Z?{K4DtrS%*>#^+#Q?{5H|tg2eqyvfBss&9-$t@E z#v96Tx;b1im3aGFI~qN?$pzH|V|%Zu;;;DVRR3^_o_s29{}JBmU)CO-<@|eT6ECBq za&348{Mp~15KbNR?VT2uw?2xoR*&fuPpZXP+**yDfVEu9mWS8UPtv-Ls-No3>mP|R z_yA6)Xx7X}w`na={X$I)%hgAKA}7BJw%OkLKAu+ggygY+GTx*(>7b34R?F4tQ8LIh z=OUo?t8I2c#?i>15$n=N5~jjB=V>f6WJg#`X=~0`L#Otnjw_2S2%uwBbe2CTJB1Sv5*`4>2^1#ofg=t zj_|%$sm>tazHmE~C5(^sZ`0|ZBd?#aJZ?es(mFljrq*Ms>VFV9z6nSSCW9ilAt3x8 DhaK7& literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/common/server.rs b/meilisearch-http/tests/common/server.rs index 2dd235e8f..88cacd0ee 100644 --- a/meilisearch-http/tests/common/server.rs +++ b/meilisearch-http/tests/common/server.rs @@ -52,16 +52,13 @@ impl Server { } } - pub async fn new_auth() -> Self { - let dir = TempDir::new().unwrap(); - + pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self { if cfg!(windows) { std::env::set_var("TMP", TEST_TEMP_DIR.path()); } else { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()); } - let mut options = default_settings(dir.path()); options.master_key = Some("MASTER_KEY".to_string()); let meilisearch = setup_meilisearch(&options).unwrap(); @@ -79,6 +76,12 @@ impl Server { } } + pub async fn new_auth() -> Self { + let dir = TempDir::new().unwrap(); + let options = default_settings(dir.path()); + Self::new_auth_with_options(options, dir).await + } + pub async fn new_with_options(options: Opt) -> Result { let meilisearch = setup_meilisearch(&options)?; let auth = AuthController::new(&options.db_path, &options.master_key)?; diff --git a/meilisearch-http/tests/dumps/data.rs b/meilisearch-http/tests/dumps/data.rs index a5d7f3426..5df09bfd1 100644 --- a/meilisearch-http/tests/dumps/data.rs +++ b/meilisearch-http/tests/dumps/data.rs @@ -18,6 +18,8 @@ pub enum GetDump { MoviesRawV4, MoviesWithSettingsV4, RubyGemsWithSettingsV4, + + TestV5, } impl GetDump { @@ -63,6 +65,9 @@ impl GetDump { GetDump::RubyGemsWithSettingsV4 => { exist_relative_path!("tests/assets/v4_v0.25.2_rubygems_with_settings.dump").into() } + GetDump::TestV5 => { + exist_relative_path!("tests/assets/v5_v0.28.0_test_dump.dump").into() + } } } } diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index c26b0e06e..b171330e0 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -1,6 +1,6 @@ mod data; -use crate::common::{default_settings, Server}; +use crate::common::{default_settings, GetAllDocumentsOptions, Server}; use meilisearch_http::Opt; use serde_json::json; @@ -605,3 +605,72 @@ async fn import_dump_v4_rubygems_with_settings() { json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"}) ); } + +#[actix_rt::test] +async fn import_dump_v5() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::TestV5.path()), + ..default_settings(temp.path()) + }; + let mut server = Server::new_auth_with_options(options, temp).await; + server.use_api_key("MASTER_KEY"); + + let (indexes, code) = server.list_indexes(None, None).await; + assert_eq!(code, 200, "{indexes}"); + + assert_eq!(indexes["results"].as_array().unwrap().len(), 2); + assert_eq!(indexes["results"][0]["uid"], json!("test")); + assert_eq!(indexes["results"][1]["uid"], json!("test2")); + assert_eq!(indexes["results"][0]["primaryKey"], json!("id")); + + let expected_stats = json!({ + "numberOfDocuments": 10, + "isIndexing": false, + "fieldDistribution": { + "cast": 10, + "director": 10, + "genres": 10, + "id": 10, + "overview": 10, + "popularity": 10, + "poster_path": 10, + "producer": 10, + "production_companies": 10, + "release_date": 10, + "tagline": 10, + "title": 10, + "vote_average": 10, + "vote_count": 10 + } + }); + + let index1 = server.index("test"); + let index2 = server.index("test2"); + + let (stats, code) = index1.stats().await; + assert_eq!(code, 200); + assert_eq!(stats, expected_stats); + + let (docs, code) = index2 + .get_all_documents(GetAllDocumentsOptions::default()) + .await; + assert_eq!(code, 200); + assert_eq!(docs["results"].as_array().unwrap().len(), 10); + let (docs, code) = index1 + .get_all_documents(GetAllDocumentsOptions::default()) + .await; + assert_eq!(code, 200); + assert_eq!(docs["results"].as_array().unwrap().len(), 10); + + let (stats, code) = index2.stats().await; + assert_eq!(code, 200); + assert_eq!(stats, expected_stats); + + let (keys, code) = server.list_api_keys().await; + assert_eq!(code, 200); + let key = &keys["results"][0]; + + assert_eq!(key["name"], "my key"); +} From 07a35c644582b701b60c250a3b31f594b4a9228c Mon Sep 17 00:00:00 2001 From: Ryan Russell Date: Sat, 11 Jun 2022 10:20:28 -0500 Subject: [PATCH 162/185] docs: Bash comment readability fixes Signed-off-by: Ryan Russell --- download-latest.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/download-latest.sh b/download-latest.sh index 6fa714c55..d1cfdd127 100644 --- a/download-latest.sh +++ b/download-latest.sh @@ -67,8 +67,8 @@ semverLT() { return 1 } -# Get a token from https://github.com/settings/tokens to increasae rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo' -# Create GITHUB_PAT enviroment variable once you aquired the token to start using it +# Get a token from https://github.com/settings/tokens to increase rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo' +# Create GITHUB_PAT environment variable once you acquired the token to start using it # Returns the tag of the latest stable release (in terms of semver and not of release date) get_latest() { temp_file='temp_file' # temp_file needed because the grep would start before the download is over @@ -89,7 +89,7 @@ get_latest() { latest='' current_tag='' for release_info in $releases; do - if [ $i -eq 0 ]; then # Cheking tag_name + if [ $i -eq 0 ]; then # Checking tag_name if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then # If it's not an alpha or beta release current_tag=$release_info else From 8990b1260902cc753c0990ecbc9a5ca4decae0c6 Mon Sep 17 00:00:00 2001 From: Ryan Russell Date: Sat, 11 Jun 2022 10:21:05 -0500 Subject: [PATCH 163/185] docs: Readability fixes in `src/analytics/.rs` files Signed-off-by: Ryan Russell --- meilisearch-http/src/analytics/mod.rs | 2 +- meilisearch-http/src/analytics/segment_analytics.rs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/meilisearch-http/src/analytics/mod.rs b/meilisearch-http/src/analytics/mod.rs index 1d37a053d..b51f306a9 100644 --- a/meilisearch-http/src/analytics/mod.rs +++ b/meilisearch-http/src/analytics/mod.rs @@ -61,7 +61,7 @@ pub trait Analytics: Sync + Send { /// The method used to publish most analytics that do not need to be batched every hours fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>); - /// This method should be called to aggergate a get search + /// This method should be called to aggregate a get search fn get_search(&self, aggregate: SearchAggregator); /// This method should be called to aggregate a post search diff --git a/meilisearch-http/src/analytics/segment_analytics.rs b/meilisearch-http/src/analytics/segment_analytics.rs index 562b99c16..1007a242a 100644 --- a/meilisearch-http/src/analytics/segment_analytics.rs +++ b/meilisearch-http/src/analytics/segment_analytics.rs @@ -495,7 +495,7 @@ impl SearchAggregator { let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.; // we get all the values in a sorted manner let time_spent = self.time_spent.into_sorted_vec(); - // We are only intersted by the slowest value of the 99th fastest results + // We are only interested by the slowest value of the 99th fastest results let time_spent = time_spent.get(percentile_99th as usize); let properties = json!({ @@ -574,8 +574,8 @@ impl DocumentsAggregator { let content_type = request .headers() .get(CONTENT_TYPE) - .map(|s| s.to_str().unwrap_or("unkown")) - .unwrap_or("unkown") + .map(|s| s.to_str().unwrap_or("unknown")) + .unwrap_or("unknown") .to_string(); ret.content_types.insert(content_type); ret.index_creation = index_creation; From 97daea5a66dd7429a3a42f35f0234924e41830ec Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 13 Jun 2022 09:16:36 +0100 Subject: [PATCH 164/185] ignore dump test on windows --- meilisearch-http/tests/dumps/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index b171330e0..3241a7b45 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -607,6 +607,7 @@ async fn import_dump_v4_rubygems_with_settings() { } #[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] async fn import_dump_v5() { let temp = tempfile::tempdir().unwrap(); From 44e004d89572ee8c9dd9a8302bf1e7fa92f9b1c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Wed, 15 Jun 2022 10:33:03 +0200 Subject: [PATCH 165/185] Use nightly for cargo fmt in CI --- .github/workflows/rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 92ac4722f..748c5d690 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -83,7 +83,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: nightly + toolchain: stable override: true components: rustfmt - name: Cache dependencies From 22e1ac969a5fbf5feb370ee59fa1a11b6fb76203 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 15 Jun 2022 15:27:06 +0200 Subject: [PATCH 166/185] Add specific routes for the pagination and faceting settings --- meilisearch-http/src/routes/indexes/settings.rs | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 962fe7d82..5b659a323 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -349,7 +349,9 @@ generate_configure!( stop_words, synonyms, ranking_rules, - typo_tolerance + typo_tolerance, + pagination, + faceting ); pub async fn update_all( @@ -409,6 +411,18 @@ pub async fn update_all( .map(|s| s.two_typos.set())) .flatten(), }, + "faceting": { + "max_values_per_facet": settings.faceting + .as_ref() + .set() + .and_then(|s| s.max_values_per_facet.as_ref().set()), + }, + "pagination": { + "limited_to": settings.pagination + .as_ref() + .set() + .and_then(|s| s.limited_to.as_ref().set()), + }, }), Some(&req), ); From 9d692ba1c687dee0f8da35d484b7b665d55d53a8 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 15 Jun 2022 15:53:43 +0200 Subject: [PATCH 167/185] Add more tests for the pagination and faceting subsettings routes --- .../tests/settings/get_settings.rs | 28 ++++++++++++------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index 0862b15c5..f0c28385d 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -27,7 +27,13 @@ static DEFAULT_SETTINGS_VALUES: Lazy> = Lazy::new(| map.insert( "faceting", json!({ - "maxValuesByFacet": json!(100), + "maxValuesPerFacet": json!(100), + }), + ); + map.insert( + "pagination", + json!({ + "limitedTo": json!(1000), }), ); map @@ -206,7 +212,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() { } macro_rules! test_setting_routes { - ($($setting:ident), *) => { + ($($setting:ident $write_method:ident), *) => { $( mod $setting { use crate::common::Server; @@ -232,7 +238,7 @@ macro_rules! test_setting_routes { .chars() .map(|c| if c == '_' { '-' } else { c }) .collect::()); - let (response, code) = server.service.put(url, serde_json::Value::Null).await; + let (response, code) = server.service.$write_method(url, serde_json::Value::Null).await; assert_eq!(code, 202, "{}", response); server.index("").wait_task(0).await; let (response, code) = server.index("test").get().await; @@ -276,13 +282,15 @@ macro_rules! test_setting_routes { } test_setting_routes!( - filterable_attributes, - displayed_attributes, - searchable_attributes, - distinct_attribute, - stop_words, - ranking_rules, - synonyms + filterable_attributes put, + displayed_attributes put, + searchable_attributes put, + distinct_attribute put, + stop_words put, + ranking_rules put, + synonyms put, + pagination patch, + faceting patch ); #[actix_rt::test] From 10f315015068daeefea3fde91e01730a1b3ab1b5 Mon Sep 17 00:00:00 2001 From: Janith Petangoda <22471198+janithpet@users.noreply.github.com> Date: Wed, 15 Jun 2022 09:45:18 +0100 Subject: [PATCH 168/185] Improve docker CI: push `vX.Y` tag (without patch) to DockerHub (#2507) * Create a docker tag without patch version if git tag has 0 patch version. * Create Docker tag without patch number if git tag follows v.. Add minor changes on CI --- .github/workflows/publish-docker-images.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index b9ea50cb3..8d24c1123 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -27,6 +27,20 @@ jobs: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} + - name: Check tag format + id: check-tag-format + run: | + # Escape submitted tag name + escaped_tag=$(printf "%q" ${{ github.ref_name }}) + + # Check if tag has format v.. and set output.match + # to create a vX.Y (without patch version) Docker tag + if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo ::set-output name=match::true + else + echo ::set-output name=match::false + fi + - name: Docker meta id: meta uses: docker/metadata-action@v3 @@ -37,6 +51,7 @@ jobs: flavor: latest=false tags: | type=ref,event=tag + type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.match }} type=raw,value=latest,enable=${{ github.event_name == 'release' }} - name: Build and push From 2dde6fadb4f20610c07e47ecae0f9c397b7e83c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Thu, 16 Jun 2022 19:27:27 +0200 Subject: [PATCH 169/185] Check the version in Cargo.toml before publishing --- .github/scripts/check-release.sh | 28 ++++++++++++ .github/workflows/publish-binaries.yml | 10 +++++ .github/workflows/publish-deb-brew-pkg.yml | 10 +++++ .github/workflows/publish-docker-images.yml | 47 ++++++++++++--------- 4 files changed, 75 insertions(+), 20 deletions(-) create mode 100644 .github/scripts/check-release.sh diff --git a/.github/scripts/check-release.sh b/.github/scripts/check-release.sh new file mode 100644 index 000000000..230c3234f --- /dev/null +++ b/.github/scripts/check-release.sh @@ -0,0 +1,28 @@ +#!/bin/sh + +# Checking if current tag matches the package version +current_tag=$(echo $GITHUB_REF | tr -d 'refs/tags/v') +file1='meilisearch-auth/Cargo.toml' +file2='meilisearch-http/Cargo.toml' +file3='meilisearch-lib/Cargo.toml' +file4='meilisearch-types/Cargo.toml' +file5='Cargo.lock' + +file_tag1=$(grep '^version = ' $file1 | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') +file_tag2=$(grep '^version = ' $file2 | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') +file_tag3=$(grep '^version = ' $file3 | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') +file_tag4=$(grep '^version = ' $file4 | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') +file_tag5=$(grep -A 1 'name = "meilisearch-auth"' $file5 | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') + +if [ "$current_tag" != "$file_tag1" ] || [ "$current_tag" != "$file_tag2" ] || [ "$current_tag" != "$file_tag3" ] || [ "$current_tag" != "$file_tag4" ] || [ "$current_tag" != "$file_tag5" ]; then + echo "Error: the current tag does not match the version in package file(s)." + echo "$file1: found $file_tag1 - expected $current_tag" + echo "$file2: found $file_tag2 - expected $current_tag" + echo "$file3: found $file_tag3 - expected $current_tag" + echo "$file4: found $file_tag4 - expected $current_tag" + echo "$file5: found $file_tag5 - expected $current_tag" + exit 1 +fi + +echo 'OK' +exit 0 diff --git a/.github/workflows/publish-binaries.yml b/.github/workflows/publish-binaries.yml index 304798d75..eee7449a8 100644 --- a/.github/workflows/publish-binaries.yml +++ b/.github/workflows/publish-binaries.yml @@ -5,9 +5,18 @@ on: name: Publish binaries to release jobs: + check-version: + name: Check the version validity + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Check release validity + run: sh .github/scripts/check-release.sh + publish: name: Publish binary for ${{ matrix.os }} runs-on: ${{ matrix.os }} + needs: check-version strategy: fail-fast: false matrix: @@ -41,6 +50,7 @@ jobs: publish-aarch64: name: Publish binary for aarch64 runs-on: ${{ matrix.os }} + needs: check-version continue-on-error: false strategy: fail-fast: false diff --git a/.github/workflows/publish-deb-brew-pkg.yml b/.github/workflows/publish-deb-brew-pkg.yml index 6a5a21287..7618496e9 100644 --- a/.github/workflows/publish-deb-brew-pkg.yml +++ b/.github/workflows/publish-deb-brew-pkg.yml @@ -5,9 +5,18 @@ on: types: [released] jobs: + check-version: + name: Check the version validity + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Check release validity + run: sh .github/scripts/check-release.sh + debian: name: Publish debian packagge runs-on: ubuntu-18.04 + needs: check-version steps: - uses: hecrj/setup-rust-action@master with: @@ -30,6 +39,7 @@ jobs: homebrew: name: Bump Homebrew formula runs-on: ubuntu-18.04 + needs: check-version steps: - name: Create PR to Homebrew uses: mislav/bump-homebrew-formula-action@v1 diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index 8d24c1123..223ef41b3 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -5,8 +5,6 @@ on: push: tags: - '*' - release: - types: [released] name: Publish tagged images to Docker Hub @@ -14,45 +12,54 @@ jobs: docker: runs-on: docker steps: + - uses: actions/checkout@v2 + + # Check if the tag has the v.. format. If yes, it means we are publishing an official release. + # In this situation, we need to set `output.stable` to create/update the following tags (additionally to the `vX.Y.Z` Docker tag): + # - a `vX.Y` (without patch version) Docker tag + # - a `latest` Docker tag + - name: Check tag format + if: github.event_name != 'schedule' + id: check-tag-format + run: | + escaped_tag=$(printf "%q" ${{ github.ref_name }}) + + if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo ::set-output name=stable::true + else + echo ::set-output name=stable::false + fi + + # Check only the validity of the tag for official releases (not for pre-releases or other tags) + - name: Check release validity + if: github.event_name != 'schedule' && steps.check-tag-format.outputs.stable + run: sh .github/scripts/check-release.sh + - name: Set up QEMU uses: docker/setup-qemu-action@v1 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub + - name: Login to Docker Hub if: github.event_name != 'schedule' uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Check tag format - id: check-tag-format - run: | - # Escape submitted tag name - escaped_tag=$(printf "%q" ${{ github.ref_name }}) - - # Check if tag has format v.. and set output.match - # to create a vX.Y (without patch version) Docker tag - if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then - echo ::set-output name=match::true - else - echo ::set-output name=match::false - fi - - name: Docker meta id: meta uses: docker/metadata-action@v3 with: images: getmeili/meilisearch - # The lastest tag is only pushed for the official Meilisearch release + # The lastest and `vX.Y` tags are only pushed for the official Meilisearch releases # See https://github.com/docker/metadata-action#latest-tag flavor: latest=false tags: | type=ref,event=tag - type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.match }} - type=raw,value=latest,enable=${{ github.event_name == 'release' }} + type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable }} + type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable }} - name: Build and push id: docker_build From 5318e53248eccd0efdc36bfcd2d0c3a4b061a541 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Tue, 21 Jun 2022 10:08:07 +0200 Subject: [PATCH 170/185] Move is-latest-release.sh script into the scripts folder --- .github/{ => scripts}/is-latest-release.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) rename .github/{ => scripts}/is-latest-release.sh (90%) diff --git a/.github/is-latest-release.sh b/.github/scripts/is-latest-release.sh similarity index 90% rename from .github/is-latest-release.sh rename to .github/scripts/is-latest-release.sh index 0c1db61c2..af0ff45b3 100644 --- a/.github/is-latest-release.sh +++ b/.github/scripts/is-latest-release.sh @@ -1,14 +1,14 @@ #!/bin/sh -# Checks if the current tag should be the latest (in terms of semver and not of release date). -# Ex: previous tag -> v0.10.1 -# new tag -> v0.8.12 -# The new tag should not be the latest -# So it returns "false", the CI should not run for the release v0.8.2 - -# Used in GHA in publish-docker-latest.yml +# Was used in our CIs to publish the latest docker image. Not used anymore, will be used again when v1 and v2 will be out and we will want to maintain multiple stable versions. # Returns "true" or "false" (as a string) to be used in the `if` in GHA +# Checks if the current tag should be the latest (in terms of semver and not of release date). +# Ex: previous tag -> v2.1.1 +# new tag -> v1.20.3 +# The new tag (v1.20.3) should NOT be the latest +# So it returns "false", the `latest tag` should not be updated for the release v1.20.3 and still need to correspond to v2.1.1 + # GLOBAL GREP_SEMVER_REGEXP='v\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)$' # i.e. v[number].[number].[number] From c484d2864617234807541433e75567f9ac9d0949 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Tue, 21 Jun 2022 10:14:17 +0200 Subject: [PATCH 171/185] Update .github/scripts/check-release.sh Co-authored-by: Tamo --- .github/scripts/check-release.sh | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/.github/scripts/check-release.sh b/.github/scripts/check-release.sh index 230c3234f..ea138c4d7 100644 --- a/.github/scripts/check-release.sh +++ b/.github/scripts/check-release.sh @@ -8,21 +8,18 @@ file3='meilisearch-lib/Cargo.toml' file4='meilisearch-types/Cargo.toml' file5='Cargo.lock' -file_tag1=$(grep '^version = ' $file1 | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') -file_tag2=$(grep '^version = ' $file2 | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') -file_tag3=$(grep '^version = ' $file3 | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') -file_tag4=$(grep '^version = ' $file4 | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') -file_tag5=$(grep -A 1 'name = "meilisearch-auth"' $file5 | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') -if [ "$current_tag" != "$file_tag1" ] || [ "$current_tag" != "$file_tag2" ] || [ "$current_tag" != "$file_tag3" ] || [ "$current_tag" != "$file_tag4" ] || [ "$current_tag" != "$file_tag5" ]; then - echo "Error: the current tag does not match the version in package file(s)." - echo "$file1: found $file_tag1 - expected $current_tag" - echo "$file2: found $file_tag2 - expected $current_tag" - echo "$file3: found $file_tag3 - expected $current_tag" - echo "$file4: found $file_tag4 - expected $current_tag" - echo "$file5: found $file_tag5 - expected $current_tag" - exit 1 -fi +file5=$(grep -A 1 'name = "meilisearch-auth"' $file5 | grep version) + +for file in $file1 $file2 $file3 $file4 $file5; +do + file_tag=$(grep '^version = ' $file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') + if [ "$current_tag" != "$file_tag" ]; then + echo "Error: the current tag does not match the version in package file(s)." + echo "$file: found $file_tag - expected $current_tag" + exit 1 + fi +done echo 'OK' exit 0 From de16de20f46390efe9619bf3788dd802a7a091f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Tue, 21 Jun 2022 10:14:24 +0200 Subject: [PATCH 172/185] Update .github/scripts/check-release.sh Co-authored-by: Tamo --- .github/scripts/check-release.sh | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/scripts/check-release.sh b/.github/scripts/check-release.sh index ea138c4d7..189019ce1 100644 --- a/.github/scripts/check-release.sh +++ b/.github/scripts/check-release.sh @@ -2,11 +2,8 @@ # Checking if current tag matches the package version current_tag=$(echo $GITHUB_REF | tr -d 'refs/tags/v') -file1='meilisearch-auth/Cargo.toml' -file2='meilisearch-http/Cargo.toml' -file3='meilisearch-lib/Cargo.toml' -file4='meilisearch-types/Cargo.toml' -file5='Cargo.lock' +files='*/Cargo.toml' +lock_file='Cargo.lock' file5=$(grep -A 1 'name = "meilisearch-auth"' $file5 | grep version) From c6ed756dbc3573c385bb6ed14e9da49f20e37c8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Tue, 21 Jun 2022 10:46:32 +0200 Subject: [PATCH 173/185] Update script after review --- .github/scripts/check-release.sh | 34 ++++++++++++++++------------ .github/scripts/is-latest-release.sh | 2 +- Cargo.lock | 2 +- permissive-json-pointer/Cargo.toml | 2 +- 4 files changed, 22 insertions(+), 18 deletions(-) diff --git a/.github/scripts/check-release.sh b/.github/scripts/check-release.sh index 189019ce1..bad957cff 100644 --- a/.github/scripts/check-release.sh +++ b/.github/scripts/check-release.sh @@ -1,22 +1,26 @@ #!/bin/sh -# Checking if current tag matches the package version -current_tag=$(echo $GITHUB_REF | tr -d 'refs/tags/v') -files='*/Cargo.toml' -lock_file='Cargo.lock' - - -file5=$(grep -A 1 'name = "meilisearch-auth"' $file5 | grep version) - -for file in $file1 $file2 $file3 $file4 $file5; -do - file_tag=$(grep '^version = ' $file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') - if [ "$current_tag" != "$file_tag" ]; then - echo "Error: the current tag does not match the version in package file(s)." - echo "$file: found $file_tag - expected $current_tag" +# check_tag $current_tag $file_tag $file_name +function check_tag { + if [ "$1" != "$2" ]; then + echo "Error: the current tag does not match the version in $3:" + echo "Found $1 - expected $2" exit 1 - fi + fi +} + +current_tag=$(echo $GITHUB_REF | tr -d 'refs/tags/v') + +files='*/Cargo.toml' +for file in $files; +do + file_tag="$(grep '^version = ' $file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')" + check_tag $current_tag $file_tag $file done +lock_file='Cargo.lock' +lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') +check_tag $current_tag $lock_tag $lock_file + echo 'OK' exit 0 diff --git a/.github/scripts/is-latest-release.sh b/.github/scripts/is-latest-release.sh index af0ff45b3..81534a2f7 100644 --- a/.github/scripts/is-latest-release.sh +++ b/.github/scripts/is-latest-release.sh @@ -7,7 +7,7 @@ # Ex: previous tag -> v2.1.1 # new tag -> v1.20.3 # The new tag (v1.20.3) should NOT be the latest -# So it returns "false", the `latest tag` should not be updated for the release v1.20.3 and still need to correspond to v2.1.1 +# So it returns "false", the `latest` tag should not be updated for the release v1.20.3 and still need to correspond to v2.1.1 # GLOBAL GREP_SEMVER_REGEXP='v\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)$' # i.e. v[number].[number].[number] diff --git a/Cargo.lock b/Cargo.lock index 1bd47e355..ff2325361 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2515,7 +2515,7 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "permissive-json-pointer" -version = "0.2.0" +version = "0.28.0" dependencies = [ "big_s", "serde_json", diff --git a/permissive-json-pointer/Cargo.toml b/permissive-json-pointer/Cargo.toml index b50f30f19..9e01b81ab 100644 --- a/permissive-json-pointer/Cargo.toml +++ b/permissive-json-pointer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "permissive-json-pointer" -version = "0.2.0" +version = "0.28.0" edition = "2021" description = "A permissive json pointer" readme = "README.md" From 7490383d4f71d282a075cf3e9421d596c59b51c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Tue, 21 Jun 2022 19:17:33 +0200 Subject: [PATCH 174/185] Update the not-released version in Cargo.toml files --- Cargo.lock | 6 +++--- meilisearch-lib/Cargo.toml | 2 +- meilisearch-types/Cargo.toml | 2 +- permissive-json-pointer/Cargo.toml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ff2325361..f736ebecb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2092,7 +2092,7 @@ dependencies = [ [[package]] name = "meilisearch-lib" -version = "0.28.0" +version = "0.0.0" dependencies = [ "actix-rt", "actix-web", @@ -2154,7 +2154,7 @@ dependencies = [ [[package]] name = "meilisearch-types" -version = "0.28.0" +version = "0.0.0" dependencies = [ "actix-web", "proptest", @@ -2515,7 +2515,7 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "permissive-json-pointer" -version = "0.28.0" +version = "0.0.0" dependencies = [ "big_s", "serde_json", diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 094c79901..d9603e1d5 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-lib" -version = "0.28.0" +version = "0.0.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 6949722e7..1614f5b34 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-types" -version = "0.28.0" +version = "0.0.0" authors = ["marin "] edition = "2021" diff --git a/permissive-json-pointer/Cargo.toml b/permissive-json-pointer/Cargo.toml index 9e01b81ab..9e01f8807 100644 --- a/permissive-json-pointer/Cargo.toml +++ b/permissive-json-pointer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "permissive-json-pointer" -version = "0.28.0" +version = "0.0.0" edition = "2021" description = "A permissive json pointer" readme = "README.md" From 32c8846514dba7ea03b169c3002bf8365dd714ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Wed, 22 Jun 2022 12:20:12 +0200 Subject: [PATCH 175/185] Rollback 0.0.0 versionning --- .github/scripts/check-release.sh | 20 +++++++++++--------- Cargo.lock | 6 +++--- meilisearch-lib/Cargo.toml | 2 +- meilisearch-types/Cargo.toml | 2 +- permissive-json-pointer/Cargo.toml | 2 +- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/.github/scripts/check-release.sh b/.github/scripts/check-release.sh index bad957cff..3c8dd64a7 100644 --- a/.github/scripts/check-release.sh +++ b/.github/scripts/check-release.sh @@ -3,24 +3,26 @@ # check_tag $current_tag $file_tag $file_name function check_tag { if [ "$1" != "$2" ]; then - echo "Error: the current tag does not match the version in $3:" - echo "Found $1 - expected $2" - exit 1 + echo "Error: the current tag does not match the version in $3: found $1 - expected $2" + ret=1 fi } +ret=0 current_tag=$(echo $GITHUB_REF | tr -d 'refs/tags/v') -files='*/Cargo.toml' -for file in $files; +toml_files='*/Cargo.toml' +for toml_file in $toml_files; do - file_tag="$(grep '^version = ' $file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')" - check_tag $current_tag $file_tag $file + file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')" + check_tag $current_tag $file_tag $toml_file done lock_file='Cargo.lock' lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') check_tag $current_tag $lock_tag $lock_file -echo 'OK' -exit 0 +if [ "$ret" -eq 0 ] ; then + echo 'OK' +fi +exit $ret diff --git a/Cargo.lock b/Cargo.lock index f736ebecb..ff2325361 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2092,7 +2092,7 @@ dependencies = [ [[package]] name = "meilisearch-lib" -version = "0.0.0" +version = "0.28.0" dependencies = [ "actix-rt", "actix-web", @@ -2154,7 +2154,7 @@ dependencies = [ [[package]] name = "meilisearch-types" -version = "0.0.0" +version = "0.28.0" dependencies = [ "actix-web", "proptest", @@ -2515,7 +2515,7 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "permissive-json-pointer" -version = "0.0.0" +version = "0.28.0" dependencies = [ "big_s", "serde_json", diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index d9603e1d5..094c79901 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-lib" -version = "0.0.0" +version = "0.28.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 1614f5b34..6949722e7 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "meilisearch-types" -version = "0.0.0" +version = "0.28.0" authors = ["marin "] edition = "2021" diff --git a/permissive-json-pointer/Cargo.toml b/permissive-json-pointer/Cargo.toml index 9e01f8807..9e01b81ab 100644 --- a/permissive-json-pointer/Cargo.toml +++ b/permissive-json-pointer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "permissive-json-pointer" -version = "0.0.0" +version = "0.28.0" edition = "2021" description = "A permissive json pointer" readme = "README.md" From c47369b502137ae5c754e666580c10d67819d13c Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 22 Jun 2022 12:33:15 +0200 Subject: [PATCH 176/185] fix all the array on the search get route and improve the tests --- Cargo.lock | 22 ++++++------- meilisearch-http/Cargo.toml | 2 +- meilisearch-http/src/routes/indexes/search.rs | 21 +++++++------ meilisearch-http/tests/common/index.rs | 2 +- meilisearch-http/tests/search/errors.rs | 24 +++++--------- meilisearch-http/tests/search/formatted.rs | 31 ++++++++++--------- 6 files changed, 50 insertions(+), 52 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1bd47e355..1a861c5fb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2069,7 +2069,6 @@ dependencies = [ "serde", "serde-cs", "serde_json", - "serde_url_params", "sha-1", "sha2", "siphasher", @@ -2087,6 +2086,7 @@ dependencies = [ "uuid", "vergen", "walkdir", + "yaup", "zip", ] @@ -3113,16 +3113,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_url_params" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c43307d0640738af32fe8d01e47119bc0fc8a686be470a44a586caff76dfb34" -dependencies = [ - "serde", - "url", -] - [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -3994,6 +3984,16 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6d12cb7a57bbf2ab670ed9545bae3648048547f9039279a89ce000208e585c1" +[[package]] +name = "yaup" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f8e1d3d18db742c8b9ad2f5f3c5bf5b63aa67b9933617c8f8350d39a3c173c6" +dependencies = [ + "serde", + "url", +] + [[package]] name = "zerocopy" version = "0.3.0" diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 763337888..0012ea9b0 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -83,8 +83,8 @@ actix-rt = "2.7.0" assert-json-diff = "2.0.1" manifest-dir-macros = "0.1.14" maplit = "1.0.2" -serde_url_params = "0.2.1" urlencoding = "2.1.0" +yaup = "0.1.0" [features] default = ["analytics", "mini-dashboard"] diff --git a/meilisearch-http/src/routes/indexes/search.rs b/meilisearch-http/src/routes/indexes/search.rs index 3f8fecd5c..9e3a64881 100644 --- a/meilisearch-http/src/routes/indexes/search.rs +++ b/meilisearch-http/src/routes/indexes/search.rs @@ -14,7 +14,6 @@ use serde_json::Value; use crate::analytics::{Analytics, SearchAggregator}; use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::sequential_extractor::SeqHandler; -use crate::routes::{fold_star_or, StarOr}; pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service( @@ -30,16 +29,16 @@ pub struct SearchQueryGet { q: Option, offset: Option, limit: Option, - attributes_to_retrieve: Option>>, - attributes_to_crop: Option>>, + attributes_to_retrieve: Option>, + attributes_to_crop: Option>, #[serde(default = "DEFAULT_CROP_LENGTH")] crop_length: usize, - attributes_to_highlight: Option>>, + attributes_to_highlight: Option>, filter: Option, sort: Option, #[serde(default = "Default::default")] show_matches_position: bool, - facets: Option>>, + facets: Option>, #[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")] highlight_pre_tag: String, #[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")] @@ -62,14 +61,18 @@ impl From for SearchQuery { q: other.q, offset: other.offset, limit: other.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT), - attributes_to_retrieve: other.attributes_to_retrieve.and_then(fold_star_or), - attributes_to_crop: other.attributes_to_crop.and_then(fold_star_or), + attributes_to_retrieve: other + .attributes_to_retrieve + .map(|o| o.into_iter().collect()), + attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()), crop_length: other.crop_length, - attributes_to_highlight: other.attributes_to_highlight.and_then(fold_star_or), + attributes_to_highlight: other + .attributes_to_highlight + .map(|o| o.into_iter().collect()), filter, sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)), show_matches_position: other.show_matches_position, - facets: other.facets.and_then(fold_star_or), + facets: other.facets.map(|o| o.into_iter().collect()), highlight_pre_tag: other.highlight_pre_tag, highlight_post_tag: other.highlight_post_tag, crop_marker: other.crop_marker, diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index 010535e21..c92c58560 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -221,7 +221,7 @@ impl Index<'_> { } pub async fn search_get(&self, query: Value) -> (Value, StatusCode) { - let params = serde_url_params::to_string(&query).unwrap(); + let params = yaup::to_string(&query).unwrap(); let url = format!("/indexes/{}/search?{}", encode(self.uid.as_ref()), params); self.service.get(url).await } diff --git a/meilisearch-http/tests/search/errors.rs b/meilisearch-http/tests/search/errors.rs index 500825364..c2523597d 100644 --- a/meilisearch-http/tests/search/errors.rs +++ b/meilisearch-http/tests/search/errors.rs @@ -45,26 +45,18 @@ async fn search_invalid_highlight_and_crop_tags() { for field in fields { // object - index - .search( - json!({field.to_string(): {"marker": ""}}), - |response, code| { - assert_eq!(code, 400, "field {} passing object: {}", &field, response); - assert_eq!(response["code"], "bad_request"); - }, - ) + let (response, code) = index + .search_post(json!({field.to_string(): {"marker": ""}})) .await; + assert_eq!(code, 400, "field {} passing object: {}", &field, response); + assert_eq!(response["code"], "bad_request"); // array - index - .search( - json!({field.to_string(): ["marker", ""]}), - |response, code| { - assert_eq!(code, 400, "field {} passing array: {}", &field, response); - assert_eq!(response["code"], "bad_request"); - }, - ) + let (response, code) = index + .search_post(json!({field.to_string(): ["marker", ""]})) .await; + assert_eq!(code, 400, "field {} passing array: {}", &field, response); + assert_eq!(response["code"], "bad_request"); } } diff --git a/meilisearch-http/tests/search/formatted.rs b/meilisearch-http/tests/search/formatted.rs index 9876bac3a..556b0bf35 100644 --- a/meilisearch-http/tests/search/formatted.rs +++ b/meilisearch-http/tests/search/formatted.rs @@ -15,20 +15,23 @@ async fn formatted_contain_wildcard() { index.add_documents(documents, None).await; index.wait_task(1).await; - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "_formatted": { - "id": "852", - "cattos": "pesti", - }, - "_matchesPosition": {"cattos": [{"start": 0, "length": 5}]}, - }) - ); + index.search(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }), + |response, code| + { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "_formatted": { + "id": "852", + "cattos": "pesti", + }, + "_matchesPosition": {"cattos": [{"start": 0, "length": 5}]}, + }) + ); + } + ) + .await; let (response, code) = index .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["*"] })) From 7feb15df2806a7f50c245420a33770fed6f6fe4b Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 23 Jun 2022 10:47:48 +0200 Subject: [PATCH 177/185] Bump milli to 0.31.1 --- Cargo.lock | 16 ++++++++-------- meilisearch-auth/Cargo.toml | 2 +- meilisearch-lib/Cargo.toml | 4 +--- 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ff2325361..d8bc4fe64 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1123,8 +1123,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.29.3" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.3#f1d848bb9add86b9414d110a083dfa0462d5d636" +version = "0.31.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.31.1#83ad1aaf0552db9f63fc21ae9fe3976e61577dc8" dependencies = [ "nom", "nom_locate", @@ -1148,8 +1148,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.29.3" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.3#f1d848bb9add86b9414d110a083dfa0462d5d636" +version = "0.31.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.31.1#83ad1aaf0552db9f63fc21ae9fe3976e61577dc8" dependencies = [ "serde_json", ] @@ -1661,8 +1661,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.29.3" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.3#f1d848bb9add86b9414d110a083dfa0462d5d636" +version = "0.31.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.31.1#83ad1aaf0552db9f63fc21ae9fe3976e61577dc8" dependencies = [ "serde_json", ] @@ -2189,8 +2189,8 @@ dependencies = [ [[package]] name = "milli" -version = "0.29.3" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.29.3#f1d848bb9add86b9414d110a083dfa0462d5d636" +version = "0.31.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.31.1#83ad1aaf0552db9f63fc21ae9fe3976e61577dc8" dependencies = [ "bimap", "bincode", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index ed3a589e2..b4921655e 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -8,7 +8,7 @@ base64 = "0.13.0" enum-iterator = "0.7.0" hmac = "0.12.1" meilisearch-types = { path = "../meilisearch-types" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.3" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.31.1" } rand = "0.8.4" serde = { version = "1.0.136", features = ["derive"] } serde_json = { version = "1.0.79", features = ["preserve_order"] } diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 094c79901..3629f51a8 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -3,8 +3,6 @@ name = "meilisearch-lib" version = "0.28.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] actix-web = { version = "4.0.1", default-features = false } anyhow = { version = "1.0.56", features = ["backtrace"] } @@ -30,7 +28,7 @@ lazy_static = "1.4.0" log = "0.4.14" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-types = { path = "../meilisearch-types" } -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.29.3" } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.31.1" } mime = "0.3.16" num_cpus = "1.13.1" obkv = "0.2.0" From dad86fc3d6f87cc37ddb0546a355bb7a1e60a55a Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 22 Jun 2022 17:24:25 +0200 Subject: [PATCH 178/185] Make the changes necessary to use milli 0.31.1 --- .../src/routes/indexes/settings.rs | 6 ++--- meilisearch-http/tests/dumps/mod.rs | 18 ++++++------- meilisearch-http/tests/search/mod.rs | 4 +-- .../tests/settings/get_settings.rs | 4 +-- meilisearch-lib/src/index/dump.rs | 2 +- meilisearch-lib/src/index/index.rs | 25 +++++++++++-------- meilisearch-lib/src/index/search.rs | 12 ++++----- meilisearch-lib/src/index/updates.rs | 10 ++++---- .../src/index_resolver/meta_store.rs | 9 +++++-- meilisearch-lib/src/lib.rs | 17 ------------- meilisearch-lib/src/snapshot.rs | 5 ++-- 11 files changed, 51 insertions(+), 61 deletions(-) diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 5b659a323..bc8642def 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -318,7 +318,7 @@ make_setting_route!( "Pagination Updated".to_string(), json!({ "pagination": { - "limited_to": setting.as_ref().and_then(|s| s.limited_to.set()), + "max_total_hits": setting.as_ref().and_then(|s| s.max_total_hits.set()), }, }), Some(req), @@ -418,10 +418,10 @@ pub async fn update_all( .and_then(|s| s.max_values_per_facet.as_ref().set()), }, "pagination": { - "limited_to": settings.pagination + "max_total_hits": settings.pagination .as_ref() .set() - .and_then(|s| s.limited_to.as_ref().set()), + .and_then(|s| s.max_total_hits.as_ref().set()), }, }), Some(&req), diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 851f502a9..389f6b480 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -61,7 +61,7 @@ async fn import_dump_v2_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) + json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -125,7 +125,7 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -189,7 +189,7 @@ async fn import_dump_v2_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 }}) + json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 }}) ); let (tasks, code) = index.list_tasks().await; @@ -253,7 +253,7 @@ async fn import_dump_v3_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) + json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -317,7 +317,7 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -381,7 +381,7 @@ async fn import_dump_v3_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) + json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -445,7 +445,7 @@ async fn import_dump_v4_movie_raw() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) + json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -509,7 +509,7 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; @@ -573,7 +573,7 @@ async fn import_dump_v4_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "limitedTo": 1000 } }) + json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; diff --git a/meilisearch-http/tests/search/mod.rs b/meilisearch-http/tests/search/mod.rs index 02cdc751f..b9c187783 100644 --- a/meilisearch-http/tests/search/mod.rs +++ b/meilisearch-http/tests/search/mod.rs @@ -567,7 +567,7 @@ async fn placeholder_search_is_hard_limited() { .await; index - .update_settings(json!({ "pagination": { "limitedTo": 10_000 } })) + .update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })) .await; index.wait_task(1).await; @@ -636,7 +636,7 @@ async fn search_is_hard_limited() { .await; index - .update_settings(json!({ "pagination": { "limitedTo": 10_000 } })) + .update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })) .await; index.wait_task(1).await; diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index f0c28385d..9d10b7820 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -33,7 +33,7 @@ static DEFAULT_SETTINGS_VALUES: Lazy> = Lazy::new(| map.insert( "pagination", json!({ - "limitedTo": json!(1000), + "maxTotalHits": json!(1000), }), ); map @@ -82,7 +82,7 @@ async fn get_settings() { assert_eq!( settings["pagination"], json!({ - "limitedTo": 1000, + "maxTotalHits": 1000, }) ); } diff --git a/meilisearch-lib/src/index/dump.rs b/meilisearch-lib/src/index/dump.rs index e201e738b..c6feb187f 100644 --- a/meilisearch-lib/src/index/dump.rs +++ b/meilisearch-lib/src/index/dump.rs @@ -27,7 +27,7 @@ const DATA_FILE_NAME: &str = "documents.jsonl"; impl Index { pub fn dump(&self, path: impl AsRef) -> Result<()> { // acquire write txn make sure any ongoing write is finished before we start. - let txn = self.env.write_txn()?; + let txn = self.write_txn()?; let path = path.as_ref().join(format!("indexes/{}", self.uuid)); create_dir_all(&path)?; diff --git a/meilisearch-lib/src/index/index.rs b/meilisearch-lib/src/index/index.rs index d4772b73b..518e9ce3e 100644 --- a/meilisearch-lib/src/index/index.rs +++ b/meilisearch-lib/src/index/index.rs @@ -4,9 +4,10 @@ use std::marker::PhantomData; use std::ops::Deref; use std::path::Path; use std::sync::Arc; +use walkdir::WalkDir; use fst::IntoStreamer; -use milli::heed::{EnvOpenOptions, RoTxn}; +use milli::heed::{CompactionOption, EnvOpenOptions, RoTxn}; use milli::update::{IndexerConfig, Setting}; use milli::{obkv_to_json, FieldDistribution, DEFAULT_VALUES_PER_FACET}; use serde::{Deserialize, Serialize}; @@ -14,8 +15,7 @@ use serde_json::{Map, Value}; use time::OffsetDateTime; use uuid::Uuid; -use crate::index::search::DEFAULT_PAGINATION_LIMITED_TO; -use crate::EnvSizer; +use crate::index::search::DEFAULT_PAGINATION_MAX_TOTAL_HITS; use super::error::IndexError; use super::error::Result; @@ -202,9 +202,9 @@ impl Index { }; let pagination = PaginationSettings { - limited_to: Setting::Set( - self.pagination_limited_to(txn)? - .unwrap_or(DEFAULT_PAGINATION_LIMITED_TO), + max_total_hits: Setting::Set( + self.pagination_max_total_hits(txn)? + .unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS), ), }; @@ -245,7 +245,7 @@ impl Index { let fields_ids_map = self.fields_ids_map(&txn)?; let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect(); - let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit); + let iter = self.all_documents(&txn)?.skip(offset).take(limit); let mut documents = Vec::new(); @@ -302,7 +302,12 @@ impl Index { } pub fn size(&self) -> u64 { - self.env.size() + WalkDir::new(self.inner.path()) + .into_iter() + .filter_map(|entry| entry.ok()) + .filter_map(|entry| entry.metadata().ok()) + .filter(|metadata| metadata.is_file()) + .fold(0, |acc, m| acc + m.len()) } pub fn snapshot(&self, path: impl AsRef) -> Result<()> { @@ -310,9 +315,7 @@ impl Index { create_dir_all(&dst)?; dst.push("data.mdb"); let _txn = self.write_txn()?; - self.inner - .env - .copy_to_path(dst, milli::heed::CompactionOption::Enabled)?; + self.inner.copy_to_path(dst, CompactionOption::Enabled)?; Ok(()) } } diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index 781a5bb66..58bcf7ef4 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -29,7 +29,7 @@ pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "".to_string(); /// The maximimum number of results that the engine /// will be able to return in one search call. -pub const DEFAULT_PAGINATION_LIMITED_TO: usize = 1000; +pub const DEFAULT_PAGINATION_MAX_TOTAL_HITS: usize = 1000; #[derive(Deserialize, Debug, Clone, PartialEq)] #[serde(rename_all = "camelCase", deny_unknown_fields)] @@ -91,14 +91,14 @@ impl Index { search.query(query); } - let pagination_limited_to = self - .pagination_limited_to(&rtxn)? - .unwrap_or(DEFAULT_PAGINATION_LIMITED_TO); + let max_total_hits = self + .pagination_max_total_hits(&rtxn)? + .unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS); // Make sure that a user can't get more documents than the hard limit, // we align that on the offset too. - let offset = min(query.offset.unwrap_or(0), pagination_limited_to); - let limit = min(query.limit, pagination_limited_to.saturating_sub(offset)); + let offset = min(query.offset.unwrap_or(0), max_total_hits); + let limit = min(query.limit, max_total_hits.saturating_sub(offset)); search.offset(offset); search.limit(limit); diff --git a/meilisearch-lib/src/index/updates.rs b/meilisearch-lib/src/index/updates.rs index 95edbbf9d..07695af05 100644 --- a/meilisearch-lib/src/index/updates.rs +++ b/meilisearch-lib/src/index/updates.rs @@ -86,7 +86,7 @@ pub struct FacetingSettings { pub struct PaginationSettings { #[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[serde(default, skip_serializing_if = "Setting::is_not_set")] - pub limited_to: Setting, + pub max_total_hits: Setting, } /// Holds all the settings for an index. `T` can either be `Checked` if they represents settings @@ -474,12 +474,12 @@ pub fn apply_settings_to_builder( } match settings.pagination { - Setting::Set(ref value) => match value.limited_to { - Setting::Set(val) => builder.set_pagination_limited_to(val), - Setting::Reset => builder.reset_pagination_limited_to(), + Setting::Set(ref value) => match value.max_total_hits { + Setting::Set(val) => builder.set_pagination_max_total_hits(val), + Setting::Reset => builder.reset_pagination_max_total_hits(), Setting::NotSet => (), }, - Setting::Reset => builder.reset_pagination_limited_to(), + Setting::Reset => builder.reset_pagination_max_total_hits(), Setting::NotSet => (), } } diff --git a/meilisearch-lib/src/index_resolver/meta_store.rs b/meilisearch-lib/src/index_resolver/meta_store.rs index f53f9cae9..f335d9923 100644 --- a/meilisearch-lib/src/index_resolver/meta_store.rs +++ b/meilisearch-lib/src/index_resolver/meta_store.rs @@ -3,6 +3,7 @@ use std::fs::{create_dir_all, File}; use std::io::{BufRead, BufReader, Write}; use std::path::{Path, PathBuf}; use std::sync::Arc; +use walkdir::WalkDir; use milli::heed::types::{SerdeBincode, Str}; use milli::heed::{CompactionOption, Database, Env}; @@ -11,7 +12,6 @@ use uuid::Uuid; use super::error::{IndexResolverError, Result}; use crate::tasks::task::TaskId; -use crate::EnvSizer; #[derive(Serialize, Deserialize)] pub struct DumpEntry { @@ -131,7 +131,12 @@ impl HeedMetaStore { } fn get_size(&self) -> Result { - Ok(self.env.size()) + Ok(WalkDir::new(self.env.path()) + .into_iter() + .filter_map(|entry| entry.ok()) + .filter_map(|entry| entry.metadata().ok()) + .filter(|metadata| metadata.is_file()) + .fold(0, |acc, m| acc + m.len())) } pub fn dump(&self, path: PathBuf) -> Result<()> { diff --git a/meilisearch-lib/src/lib.rs b/meilisearch-lib/src/lib.rs index 3d3d5e860..70fd2ba51 100644 --- a/meilisearch-lib/src/lib.rs +++ b/meilisearch-lib/src/lib.rs @@ -20,23 +20,6 @@ pub use milli::heed; mod compression; pub mod document_formats; -use walkdir::WalkDir; - -pub trait EnvSizer { - fn size(&self) -> u64; -} - -impl EnvSizer for milli::heed::Env { - fn size(&self) -> u64 { - WalkDir::new(self.path()) - .into_iter() - .filter_map(|entry| entry.ok()) - .filter_map(|entry| entry.metadata().ok()) - .filter(|metadata| metadata.is_file()) - .fold(0, |acc, m| acc + m.len()) - } -} - /// Check if a db is empty. It does not provide any information on the /// validity of the data in it. /// We consider a database as non empty when it's a non empty directory. diff --git a/meilisearch-lib/src/snapshot.rs b/meilisearch-lib/src/snapshot.rs index 527195729..da4907939 100644 --- a/meilisearch-lib/src/snapshot.rs +++ b/meilisearch-lib/src/snapshot.rs @@ -7,6 +7,7 @@ use anyhow::bail; use fs_extra::dir::{self, CopyOptions}; use log::{info, trace}; use meilisearch_auth::open_auth_store_env; +use milli::heed::CompactionOption; use tokio::sync::RwLock; use tokio::time::sleep; use walkdir::WalkDir; @@ -181,9 +182,7 @@ impl SnapshotJob { let mut options = milli::heed::EnvOpenOptions::new(); options.map_size(self.index_size); let index = milli::Index::new(options, entry.path())?; - index - .env - .copy_to_path(dst, milli::heed::CompactionOption::Enabled)?; + index.copy_to_path(dst, CompactionOption::Enabled)?; } Ok(()) From f83188fd606c9a0d576a2dfeafcaac67b7a0d760 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Thu, 23 Jun 2022 19:14:39 +0200 Subject: [PATCH 179/185] Fix CI with check-release.sh script --- .github/scripts/check-release.sh | 10 +++++----- .github/workflows/publish-binaries.yml | 2 +- .github/workflows/publish-deb-brew-pkg.yml | 2 +- .github/workflows/publish-docker-images.yml | 8 ++++---- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/scripts/check-release.sh b/.github/scripts/check-release.sh index 3c8dd64a7..2ce171459 100644 --- a/.github/scripts/check-release.sh +++ b/.github/scripts/check-release.sh @@ -1,15 +1,15 @@ -#!/bin/sh +#!/bin/bash # check_tag $current_tag $file_tag $file_name function check_tag { - if [ "$1" != "$2" ]; then - echo "Error: the current tag does not match the version in $3: found $1 - expected $2" + if [[ "$1" != "$2" ]]; then + echo "Error: the current tag does not match the version in $3: found $2 - expected $1" ret=1 fi } ret=0 -current_tag=$(echo $GITHUB_REF | tr -d 'refs/tags/v') +current_tag=${GITHUB_REF#'refs/tags/v'} toml_files='*/Cargo.toml' for toml_file in $toml_files; @@ -22,7 +22,7 @@ lock_file='Cargo.lock' lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') check_tag $current_tag $lock_tag $lock_file -if [ "$ret" -eq 0 ] ; then +if [[ "$ret" -eq 0 ]] ; then echo 'OK' fi exit $ret diff --git a/.github/workflows/publish-binaries.yml b/.github/workflows/publish-binaries.yml index eee7449a8..215357f02 100644 --- a/.github/workflows/publish-binaries.yml +++ b/.github/workflows/publish-binaries.yml @@ -11,7 +11,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Check release validity - run: sh .github/scripts/check-release.sh + run: bash .github/scripts/check-release.sh publish: name: Publish binary for ${{ matrix.os }} diff --git a/.github/workflows/publish-deb-brew-pkg.yml b/.github/workflows/publish-deb-brew-pkg.yml index 7618496e9..dbdbdda7e 100644 --- a/.github/workflows/publish-deb-brew-pkg.yml +++ b/.github/workflows/publish-deb-brew-pkg.yml @@ -11,7 +11,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Check release validity - run: sh .github/scripts/check-release.sh + run: bash .github/scripts/check-release.sh debian: name: Publish debian packagge diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index 223ef41b3..0d2e2b60e 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -32,8 +32,8 @@ jobs: # Check only the validity of the tag for official releases (not for pre-releases or other tags) - name: Check release validity - if: github.event_name != 'schedule' && steps.check-tag-format.outputs.stable - run: sh .github/scripts/check-release.sh + if: github.event_name != 'schedule' && steps.check-tag-format.outputs.stable == 'true' + run: bash .github/scripts/check-release.sh - name: Set up QEMU uses: docker/setup-qemu-action@v1 @@ -58,8 +58,8 @@ jobs: flavor: latest=false tags: | type=ref,event=tag - type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable }} - type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable }} + type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }} + type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' }} - name: Build and push id: docker_build From 7f4fab876d5e52ce39a7de74919e45da41655123 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Mon, 27 Jun 2022 13:11:58 +0200 Subject: [PATCH 180/185] Add fix to publish-binaries.yml --- .github/workflows/publish-binaries.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/.github/workflows/publish-binaries.yml b/.github/workflows/publish-binaries.yml index 215357f02..298082816 100644 --- a/.github/workflows/publish-binaries.yml +++ b/.github/workflows/publish-binaries.yml @@ -10,7 +10,22 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + # Check if the tag has the v.. format. + # If yes, it means we are publishing an official release. + # If no, we are releasing a RC, so no need to check the version. + - name: Check tag format + if: github.event_name != 'schedule' + id: check-tag-format + run: | + escaped_tag=$(printf "%q" ${{ github.ref_name }}) + + if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo ::set-output name=stable::true + else + echo ::set-output name=stable::false + fi - name: Check release validity + if: steps.check-tag-format.outputs.stable == 'true' run: bash .github/scripts/check-release.sh publish: From 05ee2eff01ab0d7b429ef0330c3798d0d981ceb0 Mon Sep 17 00:00:00 2001 From: Irevoire Date: Tue, 28 Jun 2022 13:01:18 +0200 Subject: [PATCH 181/185] add more tests on the formatted route --- Cargo.lock | 4 +- meilisearch-http/Cargo.toml | 2 +- meilisearch-http/tests/search/errors.rs | 8 +- meilisearch-http/tests/search/formatted.rs | 599 ++++++++++++--------- 4 files changed, 341 insertions(+), 272 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3af8ab7ad..bc9a4f992 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3986,9 +3986,9 @@ checksum = "b6d12cb7a57bbf2ab670ed9545bae3648048547f9039279a89ce000208e585c1" [[package]] name = "yaup" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f8e1d3d18db742c8b9ad2f5f3c5bf5b63aa67b9933617c8f8350d39a3c173c6" +checksum = "3bc9ef6963f7e857050aabf31ebc44184f278bcfec4c3671552c1a916b152b45" dependencies = [ "serde", "url", diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index 0012ea9b0..b5466d13c 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -84,7 +84,7 @@ assert-json-diff = "2.0.1" manifest-dir-macros = "0.1.14" maplit = "1.0.2" urlencoding = "2.1.0" -yaup = "0.1.0" +yaup = "0.2.0" [features] default = ["analytics", "mini-dashboard"] diff --git a/meilisearch-http/tests/search/errors.rs b/meilisearch-http/tests/search/errors.rs index c2523597d..98da0495a 100644 --- a/meilisearch-http/tests/search/errors.rs +++ b/meilisearch-http/tests/search/errors.rs @@ -107,7 +107,7 @@ async fn filter_invalid_syntax_array() { "link": "https://docs.meilisearch.com/errors#invalid_filter" }); index - .search(json!({"filter": [["title & Glass"]]}), |response, code| { + .search(json!({"filter": ["title & Glass"]}), |response, code| { assert_eq!(response, expected_response); assert_eq!(code, 400); }) @@ -164,7 +164,7 @@ async fn filter_invalid_attribute_array() { "link": "https://docs.meilisearch.com/errors#invalid_filter" }); index - .search(json!({"filter": [["many = Glass"]]}), |response, code| { + .search(json!({"filter": ["many = Glass"]}), |response, code| { assert_eq!(response, expected_response); assert_eq!(code, 400); }) @@ -218,7 +218,7 @@ async fn filter_reserved_geo_attribute_array() { "link": "https://docs.meilisearch.com/errors#invalid_filter" }); index - .search(json!({"filter": [["_geo = Glass"]]}), |response, code| { + .search(json!({"filter": ["_geo = Glass"]}), |response, code| { assert_eq!(response, expected_response); assert_eq!(code, 400); }) @@ -273,7 +273,7 @@ async fn filter_reserved_attribute_array() { }); index .search( - json!({"filter": [["_geoDistance = Glass"]]}), + json!({"filter": ["_geoDistance = Glass"]}), |response, code| { assert_eq!(response, expected_response); assert_eq!(code, 400); diff --git a/meilisearch-http/tests/search/formatted.rs b/meilisearch-http/tests/search/formatted.rs index 556b0bf35..7303a7154 100644 --- a/meilisearch-http/tests/search/formatted.rs +++ b/meilisearch-http/tests/search/formatted.rs @@ -33,70 +33,82 @@ async fn formatted_contain_wildcard() { ) .await; - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["*"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - "cattos": "pesti", - }) - ); - - let (response, code) = index - .search_post( - json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "showMatchesPosition": true }), - ) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - "cattos": "pesti", - "_formatted": { - "id": "852", - "cattos": "pesti", + index + .search( + json!({ "q": "pesti", "attributesToRetrieve": ["*"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + "cattos": "pesti", + }) + ); }, - "_matchesPosition": {"cattos": [{"start": 0, "length": 5}]}, - }) - ); - - let (response, code) = index - .search_post( - json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToCrop": ["*"] }), ) .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - "cattos": "pesti", - "_formatted": { - "id": "852", - "cattos": "pesti", - } - }) - ); - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToCrop": ["*"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - "cattos": "pesti", - "_formatted": { - "id": "852", - "cattos": "pesti", + index + .search( + json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "showMatchesPosition": true }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + "cattos": "pesti", + "_formatted": { + "id": "852", + "cattos": "pesti", + }, + "_matchesPosition": {"cattos": [{"start": 0, "length": 5}]}, + }) + ); } - }) - ); + ) + .await; + + index + .search( + json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToCrop": ["*"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + "cattos": "pesti", + "_formatted": { + "id": "852", + "cattos": "pesti", + } + }) + ); + }, + ) + .await; + + index + .search( + json!({ "q": "pesti", "attributesToCrop": ["*"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + "cattos": "pesti", + "_formatted": { + "id": "852", + "cattos": "pesti", + } + }) + ); + }, + ) + .await; } #[actix_rt::test] @@ -108,108 +120,122 @@ async fn format_nested() { index.add_documents(documents, None).await; index.wait_task(0).await; - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "doggos": [ - { - "name": "bobby", - "age": 2, - }, - { - "name": "buddy", - "age": 4, - }, - ], - }) - ); - - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "doggos": [ - { - "name": "bobby", - }, - { - "name": "buddy", - }, - ], - }) - ); - - let (response, code) = index - .search_post( - json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "showMatchesPosition": true }), + index + .search( + json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "doggos": [ + { + "name": "bobby", + "age": 2, + }, + { + "name": "buddy", + "age": 4, + }, + ], + }) + ); + }, ) .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "doggos": [ - { - "name": "bobby", - }, - { - "name": "buddy", - }, - ], - "_matchesPosition": {"doggos.name": [{"start": 0, "length": 5}]}, - }) - ); - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "_formatted": { - "doggos": [ - { - "name": "bobby", - }, - { - "name": "buddy", - }, - ], + index + .search( + json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "doggos": [ + { + "name": "bobby", + }, + { + "name": "buddy", + }, + ], + }) + ); }, - }) - ); - - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToCrop": ["doggos.name"] })) + ) .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "_formatted": { - "doggos": [ - { - "name": "bobby", - }, - { - "name": "buddy", - }, - ], - }, - }) - ); - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"], "attributesToHighlight": ["doggos.age"] })) + index + .search( + json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "showMatchesPosition": true }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "doggos": [ + { + "name": "bobby", + }, + { + "name": "buddy", + }, + ], + "_matchesPosition": {"doggos.name": [{"start": 0, "length": 5}]}, + }) + ); + } + ) .await; + + index + .search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "_formatted": { + "doggos": [ + { + "name": "bobby", + }, + { + "name": "buddy", + }, + ], + }, + }) + ); + }) + .await; + + index + .search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToCrop": ["doggos.name"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "_formatted": { + "doggos": [ + { + "name": "bobby", + }, + { + "name": "buddy", + }, + ], + }, + }) + ); + }) + .await; + + index + .search(json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"], "attributesToHighlight": ["doggos.age"] }), + |response, code| { assert_eq!(code, 200, "{}", response); assert_eq!( response["hits"][0], @@ -236,11 +262,13 @@ async fn format_nested() { }, }) ); - - let (response, code) = index - .search_post(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.age"], "attributesToCrop": ["doggos.name"] })) + }) .await; - assert_eq!(code, 200, "{}", response); + + index + .search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.age"], "attributesToCrop": ["doggos.name"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); assert_eq!( response["hits"][0], json!({ @@ -258,6 +286,9 @@ async fn format_nested() { }, }) ); + } + ) + .await; } #[actix_rt::test] @@ -274,9 +305,9 @@ async fn displayedattr_2_smol() { index.add_documents(documents, None).await; index.wait_task(1).await; - let (response, code) = index - .search_post(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] })) - .await; + index + .search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }), + |response, code| { assert_eq!(code, 200, "{}", response); assert_eq!( response["hits"][0], @@ -284,119 +315,157 @@ async fn displayedattr_2_smol() { "id": 852, }) ); - - let (response, code) = index - .search_post(json!({ "attributesToRetrieve": ["id"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, }) - ); - - let (response, code) = index - .search_post(json!({ "attributesToHighlight": ["id"] })) .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - "_formatted": { - "id": "852", - } - }) - ); - let (response, code) = index - .search_post(json!({ "attributesToCrop": ["id"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - "_formatted": { - "id": "852", - } - }) - ); - - let (response, code) = index - .search_post(json!({ "attributesToHighlight": ["id"], "attributesToCrop": ["id"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - "_formatted": { - "id": "852", - } - }) - ); - - let (response, code) = index - .search_post(json!({ "attributesToHighlight": ["cattos"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - }) - ); - - let (response, code) = index - .search_post(json!({ "attributesToCrop": ["cattos"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "id": 852, - }) - ); - - let (response, code) = index - .search_post(json!({ "attributesToRetrieve": ["cattos"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!(response["hits"][0], json!({})); - - let (response, code) = index - .search_post( - json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["cattos"], "attributesToCrop": ["cattos"] }), + index + .search( + json!({ "attributesToRetrieve": ["id"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + }) + ); + }, ) .await; + + index + .search( + json!({ "attributesToHighlight": ["id"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + "_formatted": { + "id": "852", + } + }) + ); + }, + ) + .await; + + index + .search(json!({ "attributesToCrop": ["id"] }), |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + "_formatted": { + "id": "852", + } + }) + ); + }) + .await; + + index + .search( + json!({ "attributesToHighlight": ["id"], "attributesToCrop": ["id"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + "_formatted": { + "id": "852", + } + }) + ); + }, + ) + .await; + + index + .search( + json!({ "attributesToHighlight": ["cattos"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + }) + ); + }, + ) + .await; + + index + .search( + json!({ "attributesToCrop": ["cattos"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "id": 852, + }) + ); + }, + ) + .await; + + index + .search( + json!({ "attributesToRetrieve": ["cattos"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"][0], json!({})); + }, + ) + .await; + + index + .search( + json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["cattos"], "attributesToCrop": ["cattos"] }), + |response, code| { assert_eq!(code, 200, "{}", response); assert_eq!(response["hits"][0], json!({})); - let (response, code) = index - .search_post(json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["id"] })) - .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "_formatted": { - "id": "852", } - }) - ); + ) + .await; - let (response, code) = index - .search_post(json!({ "attributesToRetrieve": ["cattos"], "attributesToCrop": ["id"] })) + index + .search( + json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["id"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "_formatted": { + "id": "852", + } + }) + ); + }, + ) + .await; + + index + .search( + json!({ "attributesToRetrieve": ["cattos"], "attributesToCrop": ["id"] }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!( + response["hits"][0], + json!({ + "_formatted": { + "id": "852", + } + }) + ); + }, + ) .await; - assert_eq!(code, 200, "{}", response); - assert_eq!( - response["hits"][0], - json!({ - "_formatted": { - "id": "852", - } - }) - ); } From a146fd45b91f7cea2e452d2f5e1525ccaa634761 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Tue, 5 Jul 2022 14:13:20 +0200 Subject: [PATCH 182/185] Format API keys in hexa instead of base64 --- Cargo.lock | 18 ++++++++++----- meilisearch-auth/Cargo.toml | 3 +-- meilisearch-auth/src/lib.rs | 4 ++-- meilisearch-auth/src/store.rs | 30 ++++++++++++++++--------- meilisearch-http/Cargo.toml | 2 +- meilisearch-http/tests/auth/api_keys.rs | 1 + meilisearch-lib/Cargo.toml | 2 +- 7 files changed, 38 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bc9a4f992..82f83375e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2003,7 +2003,6 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" name = "meilisearch-auth" version = "0.28.0" dependencies = [ - "base64", "enum-iterator", "hmac", "meilisearch-types", @@ -2014,7 +2013,7 @@ dependencies = [ "sha2", "thiserror", "time 0.3.9", - "uuid", + "uuid 1.1.2", ] [[package]] @@ -2083,7 +2082,7 @@ dependencies = [ "tokio", "tokio-stream", "urlencoding", - "uuid", + "uuid 1.1.2", "vergen", "walkdir", "yaup", @@ -2147,7 +2146,7 @@ dependencies = [ "thiserror", "time 0.3.9", "tokio", - "uuid", + "uuid 1.1.2", "walkdir", "whoami", ] @@ -2229,7 +2228,7 @@ dependencies = [ "tempfile", "thiserror", "time 0.3.9", - "uuid", + "uuid 0.8.2", ] [[package]] @@ -3676,6 +3675,15 @@ name = "uuid" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "uuid" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f" dependencies = [ "getrandom", "serde", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index b4921655e..3ba5408e8 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -4,7 +4,6 @@ version = "0.28.0" edition = "2021" [dependencies] -base64 = "0.13.0" enum-iterator = "0.7.0" hmac = "0.12.1" meilisearch-types = { path = "../meilisearch-types" } @@ -15,4 +14,4 @@ serde_json = { version = "1.0.79", features = ["preserve_order"] } sha2 = "0.10.2" thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } -uuid = { version = "0.8.2", features = ["serde", "v4"] } +uuid = { version = "1.1.2", features = ["serde", "v4"] } diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 81443348a..17f1a3567 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -18,7 +18,7 @@ pub use action::{actions, Action}; use error::{AuthControllerError, Result}; pub use key::Key; use meilisearch_types::star_or::StarOr; -use store::generate_key_as_base64; +use store::generate_key_as_hexa; pub use store::open_auth_store_env; use store::HeedAuthStore; @@ -139,7 +139,7 @@ impl AuthController { pub fn generate_key(&self, uid: Uuid) -> Option { self.master_key .as_ref() - .map(|master_key| generate_key_as_base64(uid.as_bytes(), master_key.as_bytes())) + .map(|master_key| generate_key_as_hexa(uid, master_key.as_bytes())) } /// Check if the provided key is authorized to make a specific action diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 0355c4579..49bbf356e 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -13,8 +13,9 @@ use hmac::{Hmac, Mac}; use meilisearch_types::star_or::StarOr; use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; use milli::heed::{Database, Env, EnvOpenOptions, RwTxn}; -use sha2::{Digest, Sha256}; +use sha2::Sha256; use time::OffsetDateTime; +use uuid::fmt::Hyphenated; use uuid::Uuid; use super::error::Result; @@ -132,13 +133,16 @@ impl HeedAuthStore { .remap_data_type::() .iter(&rtxn)? .filter_map(|res| match res { - Ok((uid, _)) - if generate_key_as_base64(uid, master_key).as_bytes() == encoded_key => - { + Ok((uid, _)) => { let (uid, _) = try_split_array_at(uid)?; - Some(Uuid::from_bytes(*uid)) + let uid = Uuid::from_bytes(*uid); + if generate_key_as_hexa(uid, master_key).as_bytes() == encoded_key { + Some(uid) + } else { + None + } } - _ => None, + Err(_) => None, }) .next(); @@ -244,13 +248,17 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec { } } -pub fn generate_key_as_base64(uid: &[u8], master_key: &[u8]) -> String { - let master_key_sha = Sha256::digest(master_key); - let mut mac = Hmac::::new_from_slice(master_key_sha.as_slice()).unwrap(); - mac.update(uid); +pub fn generate_key_as_hexa(uid: Uuid, master_key: &[u8]) -> String { + // format uid as hyphenated allowing user to generate their own keys. + let mut uid_buffer = [0; Hyphenated::LENGTH]; + let uid = uid.hyphenated().encode_lower(&mut uid_buffer); + + // new_from_slice function never fail. + let mut mac = Hmac::::new_from_slice(master_key).unwrap(); + mac.update(uid.as_bytes()); let result = mac.finalize(); - base64::encode_config(result.into_bytes(), base64::URL_SAFE_NO_PAD) + format!("{:x}", result.into_bytes()) } /// Divides one slice into two at an index, returns `None` if mid is out of bounds. diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index b5466d13c..5cea82d6f 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -75,7 +75,7 @@ thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } tokio = { version = "1.17.0", features = ["full"] } tokio-stream = "0.1.8" -uuid = { version = "0.8.2", features = ["serde", "v4"] } +uuid = { version = "1.1.2", features = ["serde", "v4"] } walkdir = "2.3.2" [dev-dependencies] diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index 9dcbd9b55..7fdf2f129 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -42,6 +42,7 @@ async fn add_valid_api_key() { "name": "indexing-key", "description": "Indexing API key", "uid": "4bc0887a-0e41-4f3b-935d-0c451dcee9c8", + "key": "d9e776b8412f1db6974c9a5556b961c3559440b6588216f4ea5d9ed49f7c8f3c", "indexes": ["products"], "actions": [ "search", diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 3629f51a8..3d5505e5f 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -51,7 +51,7 @@ tempfile = "3.3.0" thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } tokio = { version = "1.17.0", features = ["full"] } -uuid = { version = "0.8.2", features = ["serde", "v4"] } +uuid = { version = "1.1.2", features = ["serde", "v4"] } walkdir = "2.3.2" whoami = { version = "1.2.1", optional = true } From d56bf660221f480b46a8e868361d8f9e38dfbbe2 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 4 Jul 2022 12:00:03 +0200 Subject: [PATCH 183/185] Make clippy happy --- meilisearch-http/src/lib.rs | 2 +- meilisearch-http/src/routes/indexes/search.rs | 7 +++---- meilisearch-http/tests/common/index.rs | 13 +++++++------ 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/meilisearch-http/src/lib.rs b/meilisearch-http/src/lib.rs index bfdb829d4..6485784fc 100644 --- a/meilisearch-http/src/lib.rs +++ b/meilisearch-http/src/lib.rs @@ -31,7 +31,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result { let mut meilisearch = MeiliSearch::builder(); // enable autobatching? - let _ = AUTOBATCHING_ENABLED.store( + AUTOBATCHING_ENABLED.store( opt.scheduler_options.enable_auto_batching, std::sync::atomic::Ordering::Relaxed, ); diff --git a/meilisearch-http/src/routes/indexes/search.rs b/meilisearch-http/src/routes/indexes/search.rs index 9e3a64881..62bd65e14 100644 --- a/meilisearch-http/src/routes/indexes/search.rs +++ b/meilisearch-http/src/routes/indexes/search.rs @@ -111,10 +111,9 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec { sort_parameters.push(current_sort.to_string()); merge = true; } else if merge && !sort_parameters.is_empty() { - sort_parameters - .last_mut() - .unwrap() - .push_str(&format!(",{}", current_sort)); + let s = sort_parameters.last_mut().unwrap(); + s.push(','); + s.push_str(current_sort); if current_sort.ends_with("):desc") || current_sort.ends_with("):asc") { merge = false; } diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index c92c58560..90d138ced 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -1,4 +1,5 @@ use std::{ + fmt::Write, panic::{catch_unwind, resume_unwind, UnwindSafe}, time::Duration, }; @@ -118,10 +119,10 @@ impl Index<'_> { pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) { let mut url = format!("/tasks?indexUid={}", self.uid); if !type_.is_empty() { - url += &format!("&type={}", type_.join(",")); + let _ = write!(url, "&type={}", type_.join(",")); } if !status.is_empty() { - url += &format!("&status={}", status.join(",")); + let _ = write!(url, "&status={}", status.join(",")); } self.service.get(url).await } @@ -133,7 +134,7 @@ impl Index<'_> { ) -> (Value, StatusCode) { let mut url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id); if let Some(fields) = options.and_then(|o| o.fields) { - url.push_str(&format!("?fields={}", fields.join(","))); + let _ = write!(url, "?fields={}", fields.join(",")); } self.service.get(url).await } @@ -141,15 +142,15 @@ impl Index<'_> { pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) { let mut url = format!("/indexes/{}/documents?", encode(self.uid.as_ref())); if let Some(limit) = options.limit { - url.push_str(&format!("limit={}&", limit)); + let _ = write!(url, "limit={}&", limit); } if let Some(offset) = options.offset { - url.push_str(&format!("offset={}&", offset)); + let _ = write!(url, "offset={}&", offset); } if let Some(attributes_to_retrieve) = options.attributes_to_retrieve { - url.push_str(&format!("fields={}&", attributes_to_retrieve.join(","))); + let _ = write!(url, "fields={}&", attributes_to_retrieve.join(",")); } self.service.get(url).await From 719879d4d28ff9cb93b4c2f0567ab28bef4bbb4f Mon Sep 17 00:00:00 2001 From: Morgane Dubus <30866152+mdubus@users.noreply.github.com> Date: Wed, 6 Jul 2022 08:12:17 +0200 Subject: [PATCH 184/185] Update mini-dashboard to v0.2.0 --- meilisearch-http/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index b5466d13c..566c3e2f3 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -105,5 +105,5 @@ mini-dashboard = [ tikv-jemallocator = "0.4.3" [package.metadata.mini-dashboard] -assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.10/build.zip" -sha1 = "1adf96592c267425c110bfefc36b7fc6bfb0f93d" +assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.0/build.zip" +sha1 = "25d1615c608541375a08bd722c3fd3315f926be6" From a9fb5a4d504febf5f6e75924a4db339505f8c398 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Thu, 7 Jul 2022 10:56:02 +0200 Subject: [PATCH 185/185] Introduce the Tasks Seen event when filtering --- .../src/analytics/segment_analytics.rs | 8 +++---- meilisearch-http/src/routes/tasks.rs | 22 +++++++++++-------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/meilisearch-http/src/analytics/segment_analytics.rs b/meilisearch-http/src/analytics/segment_analytics.rs index 1007a242a..b04d814aa 100644 --- a/meilisearch-http/src/analytics/segment_analytics.rs +++ b/meilisearch-http/src/analytics/segment_analytics.rs @@ -574,7 +574,7 @@ impl DocumentsAggregator { let content_type = request .headers() .get(CONTENT_TYPE) - .map(|s| s.to_str().unwrap_or("unknown")) + .and_then(|s| s.to_str().ok()) .unwrap_or("unknown") .to_string(); ret.content_types.insert(content_type); @@ -591,13 +591,13 @@ impl DocumentsAggregator { self.updated |= other.updated; // we can't create a union because there is no `into_union` method - for user_agent in other.user_agents.into_iter() { + for user_agent in other.user_agents { self.user_agents.insert(user_agent); } - for primary_key in other.primary_keys.into_iter() { + for primary_key in other.primary_keys { self.primary_keys.insert(primary_key); } - for content_type in other.content_types.into_iter() { + for content_type in other.content_types { self.content_types.insert(content_type); } self.index_creation |= other.index_creation; diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index fed7fa634..016aadfb8 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -25,7 +25,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct TaskFilterQuery { +pub struct TasksFilterQuery { #[serde(rename = "type")] type_: Option>>, status: Option>>, @@ -61,17 +61,11 @@ fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool async fn get_tasks( meilisearch: GuardedData, MeiliSearch>, - params: web::Query, + params: web::Query, req: HttpRequest, analytics: web::Data, ) -> Result { - analytics.publish( - "Tasks Seen".to_string(), - json!({ "per_task_uid": false }), - Some(&req), - ); - - let TaskFilterQuery { + let TasksFilterQuery { type_, status, index_uid, @@ -87,6 +81,16 @@ async fn get_tasks( let status: Option> = status.and_then(fold_star_or); let index_uid: Option> = index_uid.and_then(fold_star_or); + analytics.publish( + "Tasks Seen".to_string(), + json!({ + "filtered_by_index_uid": index_uid.as_ref().map_or(false, |v| !v.is_empty()), + "filtered_by_type": type_.as_ref().map_or(false, |v| !v.is_empty()), + "filtered_by_status": status.as_ref().map_or(false, |v| !v.is_empty()), + }), + Some(&req), + ); + // Then we filter on potential indexes and make sure that the search filter // restrictions are also applied. let indexes_filters = match index_uid {

aHYFYt>b=a!4^MG70@G2v`XVwA>^7Cat}g;%cN-zq;Epfr}Abon_UrCjAEi za6g0X5k7~mK!G3|M3GpdE&YIh=LgoJ&SEV>3LWnzoDj55<{@;1GlGg-cWU*rG2DHg z8Ph&3*E+;rI<(s-3`=|qJ@L1{{7*0~gsb?snd>3vI285-npN$#n#61l`pXnJ<5>U_ zcn4bL-x-sV_p;r5w}f;?6p7G}rrOsMu0aGX{W&Knq#;$pq*@aJvsv*@uhM*IX5#r% z`|8VJrVcL8uN%G8v4wjgU*6EAYPH*L6hLXMkY%~?VitU}p?wj1L&87dN9M|YM({EJ zZqEmuZioPD)si!O-X~8%LQ>+ahm?d7G`OgCQnzfg@h5x7Pb*o-rqEj^xgCqyd+2XT z#Wd!PNOKTNkF6Wz=5CQtHIR~1b9jSLA&gr%f0D(C=Y+rp6jPg5Dh80Q^dtRi@^SP_ zHl~T-$RvJ;%X(K7(NW=~eR1Et3}&s@^VfzuXhpBD(_reql@)2XE1g2HT`L6J=P+q| z^pB6$y&`&4*GwO&N0JDnrMaZwsub`ChA7Tbm!(k#k+0@5F7Y}#Go)=~AT=V^9QWeI zS~`Tbz1Sv5_P4+McMb56z$ELL=%NTc32PA5#u7$tjhwl!e?MHFwg=*|GkLsxOq%nf zqvP8~b)4NDT-o8?8w6}MS>TrhEJW33B6g*4)cEfD+==w zH>cjx2=__yTrzq-mwY%$Vl(>VBnd2*7>$HyAk77ZVC+NZI_yU(lH4p=amR`kG22$i zJES_m!dvKgnnEZddfB>AA!@$~`FQUoB`)@C;mBZa)`SSrFR_|1w&WL0ikEeg?z!kB3KwC=(v_nnl|-Pu8DB{2AVKZ zKF5b#=9Y1!JgpKT)Djk`obNGFZ-(iY*z|Wou5R_Z{aTr-_N90A;51(jYJ19?&W4|o z&1t=ArPFAwL~^x4B-e`+@pB&0Zm-mA&3>o47A_NYItYMeEYio(!Lm z(I6B|elm7+1zEdJK`xQ3Fc(GwPm>psTS?b5JYeIJ^`{+25q;5iAYWK$$(`!pj%*J& zs6XHR(~flltw-O3r$ICAfONXd+D?u>fQ#V&rw}_~MbEg#BJrXziRC_NMo8E!wIqB?q5!(wb$9}(aG@c?6E%SxzpZ8?$;~zX02GztQTV!G1e)zTaxxw z-k+-{&$?7G5Ts)7BDfxdOt}fZMmqR{Ux&0~kH%BE1a!DSsPlGt07@2>4LA5i^vYNi zSVEErr9#+de59+=!~#Ey*XdTQ3jIhFIPPaB2l7D%y(-zfcs9Dc-FL%&aCv|AMvppf!2lI?(v{0BBwy|6q;*Puh5#XqM0o*&CY*BMngYs2q&hwV zYe&sY-qTfM6ncQ%$<}G?m}F4|(sMbT<49OQUe9(t-=PSadv=6v%P7^Mkq?^-YoE@` zE?cmrQo>!Ei~!MuHDO3^NKAW$#YIe?(3Z25;-oXhg(bi_k2B~9zmT8_%?k^%koIJb z(w7WLLiDSooN#BJ+bWK;h>=@N@%}2CAGlKUK9=O>S+ z_RVv5a~+ACY@=uw>&?|rwC@E`0(HJbptxom8Bd+4V7(I~QY6`kVmPU#Bh{HmIeQyL zFm-)Y%f}-RjREMv2~ox~ImfX#CU}peH|h!jU&-fP{wetrYKbei>*Byqjqat^^+vYY)Z zM1#U2rt3#4W#PRfDO`iZimde>L0_&WQ`DCZV>OZ5k*uRYLNYR*!TYet65z}GwVp9VL2$o!n@HzdZSS=97^CQUNk;1Gpb=~UySn>7cC_BC1tBRXptvh|L z99JH@hs~&ck-3}gV71cjwwlEVzS%5RUlqM2LIW}t8}nc$Pbp%{CT^gxXI+wGx0SWe zxl75_4kQO{7s!H$tIOVbNTY*178$ZE#w#Zu7swaYc$BhrOIMtQyxgMTns^{g*$d0Q z-~<6_rHljRNE3Vt?tEZ5q@@^m61vq-h0YPy5K-o*tg-Y2AVSpcr4C86fLIplejeKD zSeTqYig3TO95-QPa3jT5Xf<2F+0NvW35W4s{%Lh z_J_VA-0sL>x(xfqXrLmx9f;|iTu-PqRZNvfh|ic4M+7=G@H?eW{CXr$n~h-9`qDkz z>reZ2H@pq5_l8G8+h%^lSHm{d7PN452Kl6H40F} zCm7dZ7JFP738l!p-?C8X^lIL%?9wvhi=qh< z&%&9)BskjEfG-gtg!o%#ec3Irl;L-!s${!%|8n*GbbQ(B-c9C>hk>dM&s*_mbIGsV zY}P8>BHarRcQBJa1^bsOoXGjtRVeIvPJY>{QM^$?m_Ma`m>TUqvIq36%qG(aHJ&OT z(9_hrhbFI-r@oPhUn#O;aZPt)Aj#HX6^~RQF9JqHUYL*&Dd9OM^o@}MT5yRKks`CL z0}OCCPemwr#36=JGAExVQq=jZ1BwC8Np9nL zXx$3W*9m?Hy~G|~<4lf5=;1vEMqjPx378Tnf+ikk3{xHS0*kmvZY;{H+QJmHCsq{e zS|ClNSM6~qtm#~><+L+pkyeoHHUuns=j z{}P_{d$(#ZypW@~J8)+Yd;Q5-TWu~_b!wIBO1-L8$bS!nKfxYP)(cewL}OBSO2U{} z%5h9r7wtPDdZY846H&Iwi*f`vQ+eQpPlpuGK|Ih|1W6RXVc|~R9NvzH&xfbgdQRa^T^x32GgbXr}na$nNyk}75g=HsN(6IneD3eI3dq5zQ5J1y%&6X>O3D-k1n!{!KnTCH0mbK>B*}SZ^%Zy z)n37jtwO6=ZPg`}^=HTWS&2?X|hzTX~tyb1}K{eg_hLdF+mZSsD(!%ID65JveHgCFAai6SX!& z`cAXiX?F@CTdPn^xDl~{>HRwW3nRvPBo~0(ST8nA6rjc#Q&#u3<&$MY6)?AmrUMGt zd*oGDH~$ro{2uXqe)ux^EIRj&eeh+{$zE^wYBwV{y6m1_ya6aHoqD}nwBd!2W-k`6 z-dYj)Le0uPns)&drN}5$O|3rZGJQcBpHHu`cT}W)DqK{f95FAas!adVy@q)UAu@j? zZ`=VuUoK2TZezF$mYj0R#W<33wd{mi&RI~5Lw92$Zz^?ksqa7rNz0+H<&`lg29WS8 z4Lo#uoPi`qoM!NdJCj3&syN5a1?+-e6D)^lo`J^+tu(g=89EkAOK_qAFMhn}?ajBS zfeAoeJU1aZ17^f0+=gj$xm<&wnH+M5r9o8|YY$_u5=I<#`@(`FwoWAb-g?$+k?H!8 z{IFBG@rF0A`{#|(y*GJM2i2SB6Ez)9C+S8ZYqZ<#N->$(Dg=8Ej1!1T7v6gJbdOYw z_~fCMjS(v|JgKW z^9hj{iViP@u2~LY`WazJX{*>tbS@@{Tr|4Ra{L+dmq4v>wyOi4(W$lUKT0nWg_*MHAQ(P2Y2+cxEIf`4{+FKBQ-2zfV4-k*$@+6@r zt%EE?D+H!nqgX~dcS|O8s9tznXFT>GuIcL-6-(A+$vOJGC0z$0`bIRHNZpdBm41v? zJEc*C(!L1d&kxN@`L=S+a9OU0KRRacWRDJ?&z*;(>Z_bj5BG1Lqk1rTxUkdOTjsc4 zX%$;a+JzP{bb-uC!iXblIix=7$r!Orw0}joXri<{0_k=dNFGyM`GI_X%yXO7Y8ioP z8IetKaYB1p!0p;5^P~$_JpUQ%kqufnPqPdOkf4p-R~YU*L5>AEJ`!CtdJ+6I&6JqS zirmDci!rfaT2hD`&zWJQ&gOhTi4^Tx2$(#yUgD;aPsXa04^Qh!0;T6nQuS6l!ouQd zrFvJzqqqgJg!op(GPCpNc@KH{u7C7f&P&L`Y%!^=ai{taLxX zfq?PMA4*gY$%}CwfUKOz1Bfu2vl~IJwe9WlsADz%9#Ex=Y5wt>$+?875wlhx7zZhJj*Bj zfO>EaLO#Oa#46$pBdfQRA#%NWSE&wT7wv=^X~%%yGbty)1>}W2?)foZNGE|1Wx+Rb zR+y*sp^Jb38P~4BvbR&@zft#lYtNgvZ@U+xEcl$bSzP~o?nY0|)@AfYn_RWp?N*ER z^>(w^r$7SROG#cQ$$wsD*oOyhDd)Z*9ytDxhr}~|$^rU#xCYG$85Uh$4x?f^o5yJw z3Jg)d!%#dL;U(zG+Qj4A6755wZccTx03L!&5sFP17$-mD z06*EL+bP8Zki8_T202z21Y<*PvlwNX{bNp@=F{>vTX>YFAR_3WTWi8VpWvb{^PySM zulJ!Oz-=R>*loI8--sEhOM(e}$9g&^bMz(nX6Lg;yp_=LGqpZ8XuvD7>1E7BOVJ;cSl|Nj$dBAHv7a>b~lx2ZQ?H__K5L zklCX_+S(jgwY$}7t5KAgc5yBS$a`%h|3az|@S;x$cc_bIi$gSWcf~W?u0np$cubv9 zlG@wO7-|pLf0+?Mj_|qFEN=*#>9IeZzAmBZNw*4Ub&Y^bm$l!K;IxqG1>pHbbNr1q z>Lm+-ANh7+vgA5(2(Vs;8#bK^^acrJ*Vh?#9_$vOY08Ah7M{;sCORkmHs?)b3kw;AB|#-ddcsBxLIr{s?5#Z`o! z<_ksshCHgTRRT4Dk~QSDVqVL-_3Q}&DtlspEa-1aP=IvYafbmO*MTN_XgHJm!89iS^+7?cR~^ zV%77-W4C(bo;>W|$1ex>$*eN2-5oUT`j^S(X2fp2Q!UQ8wF^m-6B(;@o39lW^=hF< zgOL}{=K##$=AzPM%Go+A2REA|p_zzraE=ZqWI4mh3bfk3OHA2p*fyR$Q_AzZ%PHV$>?~UNNQft?cs92DwQ`zJoDRNw;)fL>1j+)R(6z8r< zPa!RuzYnt*gUw@)w=N9PP^pQfGl%Koj43!Dc1_%E0Fhq!FYVH_&&wSurrz4imDz{;JH>U2vX#hTSOd3_t-m}mqp4ZNZyxOh1 zUIcGB(@$P5C-fXTPbeh05~X~0(@2jE=>r0mzEGcPrq99wtSIS+A$aE_a2Qiy`}i3e zqJBH)IiB0jVYPR=H$VPlpEpLIZ)?Fgx@%ouztybOs<(>@*D0i0t^|*=|Mh<@TDxJx z7WLeRue$Kexl`8IS3}|J4shMqBseb!L*o51 znx~Q78@yb`=O?$fcW1rh)8^gj-Jtz=dAvDo-Ds~`s{U1DlCC%CTH4Cl zWJv)^flhjQ^r@#cXreFlSV4`D&51o z^Yr4Wb@X)9kK$qdtwywZw^^+JbqYDVYcjvq16;<})j!AMR1xf{FGne(8>`B+F%n9~v7 z)%b91J6BK1%s-zwXSefydfR?Iyu9z7KfKlO+Qo6HMd}qY6Ssm4-(pA+xG_ZmW^D`r zSq3!ioQ4jeOJO<~F}ldnAz2BhoalmT6f1ZU#??xnAEGgN-YO+5o8E62Ga$nJuaIWW zixMs0ZJnM>&5@`XmC<=i)Rxe|K<~R~=%l+tfhfT(IBPCYX3wj>cKU6j_WrzrEswkRuj;mUe?0T1M|X`%{lTvHPHyMPW@OnyyJZnJwN`ui%{dj= z?+wl*wMh#fK9J1KyjC3@}wiwjyEW@V3J35u$a&*meHmPeMLyQOS0*o$Kt_+W&DXo~qaJ+Fqd zEv)=JNOVl|$!p7^y%}jH>=Su1KYW1C9k4~5Lli!QiHG=-QvQA=p4;;!MZSVC*m9JR z&u!u{JbI>XXCph0qk7omhPs6*x0Sp@p2{C4`k1X{Beov6({q=J24I2i^34U0ZX#cp zjLbnx{L|$O3YmW{hVe5@T@)k;$N8}^?&7E)h9qmr4g>&vY=!`MwM;ySv>+M~7nCqb zr5?fEA>={hgZwlgyyP5CuB5f&w}X1K=s3DHIUaQ1qm;K+V!WVG9? z6>FTGLZW2V^5mFlfaz=A$AMD8Mz9yhI#eJg^BSc!BZkyMGRe^yj+K) z#Pl=}y**uuK;zVJYMW+RC1{^`U*=!pMsyu@PNVSXWg0ImRhNE!^V_`zuoZjls%QD22gb|_)IQ@ zC#gQoHF2%y40#M;QlW_^F!3|BU5qkK_*6IgA09g6$r#UgZ#^-?WwOloVPqtqR>F)6 z2yT_RkC&@TI=uleW}KKgmJ7AbW$|_7W2hH-P6t)i&1viR_n&treHov{ZhZD|ele+D zQ0ST*+hKfnbGSK0-RW}mXxX-3WY?`kky4Q@K2Yl;*gXV-9IBvg4Hg~8hT7&Z7aZTi zlle&Qv5#GZE;xb^{eJYK8~$fNR;!TrQJe zp*2VyE@PZl;o-cCP|j=~Tf=A9TsSJ}{$ho?9r+Jkl$CtLo{!E4GS7^P`G85#%I7a{ zyikka6?S%<&8v4M-|_b!0+|Z_(+~gXGyeYHf2dR%?e^N=|G>QX_U}LZ5!Wv} zrbC{T=9e8p_b3fZr)XHo^G94o{*&g4fAnwqyyyCs{jXN4Yy2nlyZ!q=f5`RA4tP)2 z>>qLcWHI&4EH!th2Rr{dKxcbxya47FdjC5;<6HV)tFG&Rz1`a4zdz(6%$EhxJ+jIa zEq&a%uaS~F!{rB+-CA`Q$11YU9}N56tFG(T+l^`mT`y!5B7nZl-(mR!4v%(f#OZRS zTds5lmCnan`(w4Si$DL?u7CM|07?C8UYd3P@&5iN^nZ*0e*Y`y|L=wUaDBu6SDVds z{x`^eZ0-LaalIS=MLO5@%MQ-U%P3^t;S;VBe_kAG zz4NhN?KT>}a$s$i=%ru#iR#B@W4G0cP8%w_=0uQdBlHIiYY?U literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/assets/v2_v0.21.1_rubygems_with_settings.dump b/meilisearch-http/tests/assets/v2_v0.21.1_rubygems_with_settings.dump new file mode 100644 index 0000000000000000000000000000000000000000..1bad10b87d44bc373ee1c429773d9052a5a60348 GIT binary patch literal 7154 zcmZvcWk3|p)5i}E=?3W(kdP2mPP#$h=#Z8MB~BWo4iJu%?vn0CN;pco<>;=XQ}00X z;rD<2?2COdv-91VozEBnd1@g}Si zmu9;JvC9;Gd0BG4$hd!dO`EQI2l`D59GSPl)``0Mt%keJ%isXTag28nv9?PmaP+%~ z*3T8n0P!v_rZo}hq6^*7oKWCdpHe$-i}*lyF2b!6IdOd*?5biq;>I_3-;+P;b|et~ zM=-R!Mo2(jagf^OxxmxgSBMkS36j5&FK$s2@u%a@-W$1E3#P_;uPYWnMO?StMl6~d zpA=2ODu!X}8r+RmDORny=MR)~d}pz`2G!O^+gquvDv4g5Knzb2=C0gU;UcJ1LwYZ_ z7g^v^5c99=tLcZp*oVvl*{9Fas-hYd`Jo3>{3h=Fk>QEzLMXzYR{+ zP8Ae@uJK!Vv*9o{xP1PLj;+pYv70gnxy0|*r}65VfqU463p{O|PY!*uEe~t*>dd~q z4aJKf-IYAy>ocD-a?Eu*Ko=GRwRPs2ytHcy7VPu(=TSpj99M+vFA*>h{bX9T`)G;6#uUp(@qXfycr&(rTJya6YWJYF`w z42ykZOV5ON@MGcnWnObnd_f#L<$(?%X9B=R&I3QqbBw5^oN4EuALKCoxX#m`JO>&X}*2mcC<>l$&p`j$PA zvK`QhnSI}#%sGquwvc7`6R$+Mj==`%rPOyuWmr)OsEbC2C-b;nfT1nVSTz(x1-j*j z42!#d{RsNNmCS23IjB|#7)|ufz%v+bKa~!H1%5eKTagmjlHL4+l%R(u^#OuUlNk9 zgZw$Jjt(6~)H5hdYr&O!XDNm1lGrnH)MXV2 zX{%pv*G$J6c~Q+iIuMQ9{Jz|Ny-UPOqOSo)IpMM#rlDxXABja-@+Mx2`d`>l&PReN zvnGop|CqLU4`{8-YKR^GYF{aI-L9+V zYd>Z;vMs>Y!OfY}JG*SDHz50GqsX7Pu|W{!Hx6nY{+$vs!;U8{Im}ooKmNEQ=3UuZ z4E|!Po`r-7apZ%0`*3F~$AI_s(Y+krPJgo2-NL6FX!{#z{61%87;lZ0*BJjaKh`X% zpaqq-4)Cp#I;AA7uPPoEW)1A`?Np@r#iDw{(@N+BDb~#3n7g>m)>7ri*x<2nnnkRy z-R$vn4(_iS#=G7}(ID5D%?{O`|2Cl~D&@TY`cCgMHPyquO0&N7*lV_jR`)6M@8T7e z?L)8%?mLw#YpNd3@MZ4sWuti5U~i$8o>ddeh85XxnYJp0BADQ1R4F0BnfjCoRcjO9 z%Ap|E!mBf6N9aumu@m9I#_~aB7!H9@ErX1X&F-_z3bo&!t3K^UAsA8Q(kfBe=}nwM zh;n4gx0ND7P}z$uqVbm%DOO1pFT;wH?;Dew%<5|d;Qj|)+C9ACA3JA2$NTbG+rLgC zAZ?+aeqeW(tiacE65rIHph=SpHl1^c>R1=SA=n~=th%yEb5jl=YJW~_gCNX7`A!Nt zv^=&Mj*1qv9+`X(n84Y^A8zjy;sG6>u zx!cIq%(jX;r&|#+BFjE{t1)}1ci39+D=V3$c5utbIXCCF_fe>1CV+D=x%fUT(wMkD zhJE?u@PsD3ktTz?AZP76jDV#|D6{Fgr*jXrwd~^BK?YVoW0L$X-G@ND>_Hn|o_fM~ z+P5y^FR(vR6?Y6(3eO~gVZ{%}*NGc%i6j%>G*r$x=?B^#^&&hPdosdjC{B0-_!51d zKT|fNbRnm9d9hOaPpdg)lXzA;!%QTXPwWK`dAgLobon5ttWAaej4GeHFI>N7?9EVx zS=X2S6ILU#YYaptEwT3pE37=H)=-=n`A(PF05*7SHzoJhUCXO9jaK!<@d_UX^_g;s z5g7iUx{<6cFq#Sc$|PZ)ec32f+mMAKtdImtedf(@ee&3D&XSsUSTDhkD(qN;QhBh@ zyX5z-)8B?a^iuTV=8XL9yx)nfF+7fJ#o&qPQBZpZKKFF}r+$V}x9IfgBV2P`sRvx? z%h4o0!<05!n~|djIf@jnarkUY_&GOK8vYe&r}yx-&rv!kf6p)@Id=QdC=%>Nf7zP^BEwa`W4LNgjQeai`n%a7+#Y2Wg zu9oqbElNjicK;D4`^X(1rBiyiDSqUPxyGTlHdk`v7E=n_pvJ>rug-^+(|~JO;C*i3 zP3*({11jzT6ao(C><_48;l3_;iQsVJ3FhE^yuqaexc`dBYY8pn7yb7lSL=~tL2GFx z;9qoS*(MRsOx*p1uW+>`I2GD4tG?-+nSC$kXi3KEn;PP$dZLYsvepU?Va5=MY$P+c z^i%VGN_MSKd-HY*C6V}&d2R{!+CoA|Mfn6Tm{lZuZ7zsNQDA!zw-A!(OW&ecF@w8@ zv~;OAS`i!ib8{IRRI$$RHJ+b`=qYHrEBVc7%llUOKbiQwLa$KzEjY?5E`gvevy#K> z(c%2j?Yr|QbT{}0bW|zsMP7L>vrjn9wMdcfG5$ZM5b*q4)o`M&#%dNKC_Sk-RvOFf z`sij&tq^dQv_Fohp!LN5)z^XQm6dfowY&1)!8V5BR99R-&neHU%MJ9a)y%1sgKN`& z8zdYZSeJZEDtjwi9epF|?-claOkI{3SpC>ye}t#6hnB;nuIa1k49C%WlwSe{<0s#k zhJlF*_%qa96FX|<;P~5Ci-eLOrFVKSoyVw8e(@wA*jK-CE%1JmTTmmyM+HmhD3Y2) zY_cm)rrF$@1i0VOZzSEGQO2%+d?4qMDE?YZPlL#7f9p@-;-OGs5mVIQYom%59Rz-L z9#05|_rRx01D=}CFs`g^%QsIc!rx>Qwc1_aUjZ6f+Mbl+V=)%-tx$@82|jeHn5R7O z?5H@v%`meHcayM9o=9gv(!W#elY44GwDXbd7|gSNbZ7SwM}{rbxKAtT-Cr1wL!!l% ze`9be*}t|pZemCIO0mDTm(P@D0a_Nqi3UpClke@FFty{KRrJ@c3`(>*c~sQ&;{W2c zT^h@03;%w|z9J;8US1wXM(x+gT>_Qa3Lvh{gJ<8F&kIOf4JIrL_kEhhU-_p;c+Z}o zkj$(bl8@b!<)P~MBuQDXXg;}YS#_n2t|;C{+u7p}mDgnA%VrtLVsbNDHS@g)_;l7o zl-$`-jiX`!_`=*i3(D-DUN?LO%N50uMITCrdwuR*MDCyi;IGtvE!7PV|8> zN9HtY3xhUhDjE4Vjl%rhS1VLGTGjX#ceO*zhU$;P1iw!>+!RjJi7W?oNfYOZyiV$( z#P{tXZy9%C=<>(2?0;e(6Mle&(Tnf-%Te+5zzE0(L)6N3dH0z~PL_D&E45mXRs9O6 z$-lo1)xTk?JJCQN8Pb{q<>e$+vM64B6T#dx5tf+QI`t7Oj7tptF}z$zMc~4%b~`cr zy2|UMDgH$d-;7(@n$f6ey?fwi9rzb-*SlLWwWsp^7sX=Nk$;+L5o|^ z6EaiUZ{~Z?#^dudTo7%dAiy|9f(;{+e0W7E$R1pgC4YTvjzX>S%iRoE3~cw1>gp_% z?s>yAQNHTlyHWllC)r?tCbr_Ehg}eXnYTW`6D_orL(qY70>uv58zdrC^4x z|4m!RCJE!FcPbD_OIEX`*BWdhlHJRLKsf@Jip!$!{O|K3#8RkV^FUS8E@g|^#7So& zdbTm~{(*`ynBb)z=!JR{%Q zK88x()XgwxE$J7Gcnl=0Hh(Dc4mw6P@4`Gg7-n(xzSAZarO8##du|z4#`Dd@O(2~jAdAE2-VM=>3F}Zd?s|2Xo!1=N+nHKmXyy z2!HVBylM-4qJ5@4$=x4?9xgKnPP008lix6ZrUMi{NkZJrIC!-_sI+U)rc*(>IMU`#(qf9I;_mHN+F-hi`q{e`Im zMlN}ZVOUggGvc@Fn?C;zN(ai9AUBY1(FT)t+SW_8hC&%Te`kbEMQf1<-gAx*u?RW=Zym{51Fm!N4=Laae{R1qf;8AGlD*bNCPimP9}T5Rhr{_=28T=URs3TlvtNM zRLAyZpF@3;25Oc^gMj9c+Ro)TAZKeDCvau+_CbY8miaDl&1>23eyyHi_3j?0K73;L zL8=dhEUhMF%x)KnwEW&|$yP9(KWOPslW|80AE_y)X66LU07y{HsLl`KG zFdh2Ad2{tlOc_v6*VRosc(lN>q2D|RC-~FF7YH_x%>{5ulto`JbH#grvC-g1H1rPN zx#MJ#>>8fZrg_Nxx0F+AvnG?W{TP34?^v*!ywbqHu}@r7d4%03>NMI#II#$kDLFN@ zluN(nWx9C6X+x{Td@2TIzQ2=N`tfh~G|)e?B0?%*{_;+J4rX$C@qox@yk(r*%)NUk zX<21pMYr`|o=lQ3O}}2%Kau)V4eX2?bvnLh>)0|yPuS1hJFx`k9zW=qbp0EixxR4{ zMMi+$F1X(?FB=1cg2w{Qjacmnotc-@; zgBJpo>%S~Hmzi`1A+0wfcbqB@N&>)@`@1`w#)pa9)2$D0?wO-*HarR*N+?aGaRN^k z&~^_G+|?w=K%)<}`q0cB4t=gUY9p{O5gxVKE<~+a;Kcmiz{aU?DIG+brYi zx&Dw-Jf?5QSSO5UlZ!~-`;GPPfgH#bKht>5`o;p>1Lp1AIHqtYMJc{qcZ}V%)qMW> z5-XO)9OEbOQAvE97>6W~aD2GwNSJ9gV0|ORQ_QQ5q{<2I`xf0@Rj1%Y<$jgi$?5$$ zQz!sOys_yCLWDt_WwUj3SB5^;T;uFBW2RyAK(Bq~1o6NJ7s;md)5R6P+7A|2J^(z1 z0XP9msk3JnF(<|bzoOczhk?6>&|kpB?#dHVL^Jfddjmio?KC)P+6)}bDclQUyt-%9 zCY=Ns;Pex7a=1?x1EV}-aaR%88S(_+|j&gNJlhZy=c&UTG1&S(wuMl)FS-BGL)zPCHZLXmBr;H+H|cU>%SDrfGos0g6%+-Qb<-SOxw7SGdlKsqS`yVgS3e{T@i{2}2W$xHLY7{G5!7hQ@1OVGTE@@-zrE=P@#5ObC;IUX5*+w$VG77+3P ze08}*FEuB_UBuopq}upe!3-N!eHU`952Ba!o^E(T?fu>MF${6-td`siFS|IF62aN6 z2-{Ugz|?i|L?RPD3scJzq&rDgt;7%_om#p)z*jmN?sKcD9;eL<(|&z0j)&94nJ!9T zGVs&mNAqvc(gm?iNTel-NZYVq)VRe_L&$^vK`z@EqN#VSSmw{gAC+z3NJv$sVbeGs zccNVEyhOFV;02IB%-m&&3Q2ntL zo(JJJlPP(v+Cdx0VUCMMo`5PThr!A+<22`($r~m8&H?bd@%@BvDsr5YS;s}}DjgL) z6~(S(aor%OzCziSeYP~)**<=10+$D)Pm@haz@M=~QkJ%$unCpyN1F|i@7AE`zhW%~ zn$5WS`2y0Mf=YH3f5MOGotwBs$ZX3@ME!g$m%|BPA-l>7*W#Byh(1t|evuEp(nbO< z4!D(9B_Gide?_k}I|Sl(vz}UAY()^}4UE82E3V95X1dz#_@+bYg zQnb}e5@ie$&2N`Cq)oa86plFh_QDwa2>L|TO&4b$z({(z@$U#67E|gb&}v)=gxw}X zQj7&2(@`$C0{e_zw2FZNFxJR6@nhrCxf^HW|4N0gbMW;Q7YYLxd&zZXXb9w3HJU9N z`^kDwBSvQ?!ur3Y^b>awj~;t~-X~ulrA5EWhTMGKq5rN~lq-C6^?^2pT6tITgem@} zWwre;1X6G2=zp%(O@KjeOe&vwBF7$~4XbSm2=zUWIbf&u*)EBn^vN+zkuS57g7}FX zbzIFBT=S@8drgh`ra9}lYCU#R9){XarC!^k;nKwwu@D%gNO}{Y%?zZTTOxCYx6b*UzbZoYclTa6Q_W z2`Sg>F=NyN+;z1f&b*-PS_l38$3)Do7ivW+rYR?r!ZH@=u7(-%Wtn+PuEu}+Ky2l% z%=uftY2Rh2?|Z4`BW~p0@~y#^{h~_UtYeHt(D#wx|5VgB>yPA|;mYY{gkHP$_lCh=yho*Q8j)7jdS?vvgdCbojV1=3ojB)1iJ0`PZAijpp>48v;?;8Xx+(};^oU?fk zou;VQ`uF05ptz8`yEq?kWzpk#QcIHw^y=eCk)!)p-3z8DUx}e8RXQGChlZz)K>6WM zl4#AT#0;oEpoKmy>pbQep4W2`?{OZ}UA_$arL~YZ=F%qk_b~!UzJ7ttg0t4HPOEt& zE_|stm~@PKgh&ZAe>MZ58+zMo!4kk30Puf5FZ|2^ literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/assets/v3_v0.24.0_movies.dump b/meilisearch-http/tests/assets/v3_v0.24.0_movies.dump new file mode 100644 index 0000000000000000000000000000000000000000..3d692812bbbc5d16a3b81250931cda3e1f4dc8e0 GIT binary patch literal 11198 zcmb7|Q+Ff`!)%jGY~zVLW+$20wr$(CZBJ}xV%xT@iEXpLcb$K5>TciF+E=xTI0hE% z|K-=U--h#{WSSZOoQB~O(*mD3Z3pmP{JIMh&=OiUI++ug5(wK*N?%gZ$Wj%M;eY<> zmGK_`k@At;aoQOIiKl8t1Nd#TnN{CBEr|}fed~R5vL6=j`)6WK;5%F+tk%6L3Z%X@ zBYofZ{;blXRJHoIM!iPwvr@UHCA%fR`oy85;%hQlbvW%g1!tg9hrPNfK)5hXyX__0qK__UmmHs zP%N@3JEx*yKLQ`fOzkU{-9sSrE-j0nHJcgnQoTwH(B+EtAyw6bjy-SRPs$@mfhVu& zu|s`Rs%JT!C*Q-nf$BSVF+OY3^I`(ej}=uvnf6kbTn736}=a0 z{Fx`?_gCNIa1j(&17;9KIIRqWL3AZfJcBpeH!Z@F=WN}6*L_&M zFjteW1Gz^HhSkI{cGB>tnxR;O#Dgu;Iaj3&2US|}ph6{^I<_6)K@3;g!owqdoF&bO zJ|%Y!WO5p)&UlvMNBxHUoLgo`uXuJ$<^kroy}QZv+;A|!dezFn#`UyF2D_-IAQI2_HL6} zy`KY#J=Sy_Z6!?_U#eGtR;OQ z7w!UTrArk!7E?~tvCXB#ZHX*cNH=3N|7*m+iR5M9xq(W<(S}1CC;}R9)=x&dH@NcZ zSQZ=QmZ<~G1gBk~Qn(wARKrD`yp?2a%ImClo;W|kL6Wgk0PTVj3dGXeiAvm$Cw4Tk zFCLlL$mL-xs7xKRc@(Cy9ZV{t=ZlZa;;nXD16B-;31Cgt;ro#}nF;rbeDMk+Q(24Y zwAM*%L%=2cu)f)B$mN2)u~E}ma(L_O#dorY$W0A%-|1+>blQt{QK zJ3p*6OQd1&gba;FbA7-RJA*?o8JxgmCu9_%G(O;$()eKJ>8B@{6cSKDGb#f;qPihaGmbuc_@r;4 z_czOLz`|BI6EsU}RI-#X0%_bU75#0)w2`}?%AW3KEl`-AQ9bC$O(=ZY0%=2z_!xbeL>ssY=%HJK*4oF(4`JV8F zei-{Qmmp^WmQ5R|akuR&>(ta-(McD$b zIn*{#dgtd#{R}9gqx2}9429aD|5NWI3$qtRFN4ePM{CeqU~yROo3yw1>micQsJYk1 zm#ifDcTpwes9>^rg%{R<>qL}y}O-bLwjl&xiLc@dS} z&EJkpB}>LNrrjbx<0J)ZT(D+^z#5UEGzKCVE}W}0&bty5{Z?VtwW4=p5zqJ0i<)6a7&9+{uf3~}z2>I-(rPBLtSC{TO{?#k%M>7pr&3!;(WE$HOL z3PN4yNQx16K1ObXOP&Ar>gw3-l{qTFAyN#%*JOe+`l<4Z;yklOE)UmY;uFamScYS!Zmfqv`}l{ z1rxf(D*i|&!p~GM0TFx0d8GUD8` zE#e|ZRc8%=#3+pU$19ghR>f?R^0g7!cCKB!nr5oSUs8V=AzIc;_ei-oZXIP`N58( z8=UzpTTC6P2?w03R87nRm-tk)L|jNPdJ8+f08_v>q$;FH?u^*G3(QnJ&~WQeqYTCiIHYS{C=@W;4}TuCaJ z6Z#H0W(-*Mq9%3_osrPu%EEXcpf?3mrcs`h8lV?K`+{EG~D1ce%W@Oj({4o(nwf>1fvDfQ1tDLiXeYjSkh0k!#n&kHSIHne-5iF zu@-c|RDy^&Uf2{>>kB^%=MtvHME+rE1yFK5t;C-{fio$EL!h8e8)-nRJGu=UB2KEZ zmks5%CPyhYyk+Q(#>e5%vT<}f;pew+MW2yxQfJVPdQq;1#t*AJ{dxwyIS;&UpJ@4^ zA*7SRHu{><3Zao1pe(LU#F3XK;K3Hl6V8s+rJqX1&IuF27Z>H~8u8Mb+E(tF zL^W#c70)CFpDJ;$%6%fgKNll(VTO%^4-Y%oqqzT*U6mb$14qHC3m59Zdr$oBne0RL zy8om4WHYp3y6(mQq1=(lH$!B=K2&I3vtz{;<9k#6@IH>8gu(Mf!QPto3|pgJvl0F+ zIZ`F@a1*2%8_ZWE3>$aU?sg@lQva+nEL1HJ zxRlANT80{Qy!R3owaRH0k$*0#lXLb2#^M~l{FXMYLsIX;qJzAlBYdxj&K?c@SMp#{@g<74iLE!uYbLh`_?CmSK8)ZfqY|Vs{A{!P7 zY0>P3RmQl5Ma(}XUQZI_2W# zeP28o*n?_cQ`66hE(GvC5Ho+>b>xiMVZP1abM6_z;Hrc29_nJHCVV#Rdv%1l$)ZKi zBvh_#;MPcj(|&R=Fc%YPjg)Jm0ynDP3N)khuJzk0f-E8;s#@9St zE48m%@u3v8=}fikyECBnY%L9jcN6av-ZDEH^U-{#`?em1%JBjH+AT+j`L7cyiHWJ4 zm!uR%kVuSwbC(&mY`-RU@3*9}8)=#+$=ghreNes`DlJXz=&nW-MaIKZUB`Uh zm@(P(9VhYo z)sV=?ETPoW$|#=ZW{e19Dv?>|3dMU2AysW&RWkRuP36(^oFW#|NhD^N4MBgH|9Ld% z1dv2|Uad(~bf#vAyyd9u#HOUEPPa7Z1R4-3+%g*UAyVUTSs$nzAn_3gj)-2&6Xh~B z*cRzn7s0L}G_yvDTHv><2EQh2^CVXY&Hp+PKkbsF>d82ag$vz8*bhs2y@N@55>Z}? zeBB#f>V7w4@pH?rJK*Pz>KXePG3@SG)~s2Ta@&6}{#l zCBE$`8y?AHAi$fR`AsVPB*QD0@j$Z^qngRabR3&pn-1G9qTmEiBNPm)<{Yj?!BC3k zm4fJ%6FC3-NVxYv0F53I-BnkHN-o=*W=*u+|CbytP5z+>qstVha-Mn=sx=&f>g;Pq zg~!m7jO_;xsYaXkj(?S`+UH`Iw_{cvObg$!oW&4u;-p!>Wznix9nhgontUl1{l=AG z*`{ILYTl=!7n9a0Ujucpn_@)`L4GEERODu5r4T+hZixy{DS*e*8CHGcv|4;SuvC!gF37EC zZbv0S+%I+qQi;s1FS5`zNz$~GHSSQzLPKXq==j_hddn%DK6tXrOgyOOT}X2ALUl*4 zfK;NBGGZDVMWu*$)8ZE&r+&C5Wi4`>NF-11=k$}>2YszIkRJkla_eTczNaRdU)X(k zxbb##`Btg;JT|w+zr9O$tFaKWo{g^nque9zGK;;FpDxc=TIsaV!Kzn!kZUH1G#mqK zk}w8$mx=CI3!b#;fyg+pZo7;qf)hoJ`_TE3`gk!AGLq05Wpdf&o$}Ic~A$RA=-M_D=4 zw@031Y&xeY=eU-&*F$oiIzbcedPB*j2jIV(E3u|*wRd0DK@Pvm8ZD2O5-$&xq072m@= z;6vV{l1S+Lrv5ZmLN2m9S;Gg#QXLkH~!Bs7jAV z!(ygQs_VOPt<&rC1KIbS^^#8Nzyj_H zS68mA0!TP&4QL_Lk_ZYlHRTO?HAyMtV1P&H8vcjp zt1uCmkN01QDKhnWIW*0=EQ0r?xv~Bt6I>~+RaQ;A6LK$NB%DID91;PErZdCf=~KFK ziy-b?9Z7@)eADEzTh*({$l6-*)suf6t$ldQ%~PqfuGbY2d-03diG6FxigeeOH8$Cs zd6TA}TC;R0v?(q7$$)I0@^G5mwxH2mC;6B?olU+=dU6$^B=RO>A{3fNde38`IE)uP zi>X$uhCDwo5%~9>+Uzo|mJ{LKDJIzFAc)BfNdFm(0a@~g1v!;W?vAGYG3YpvXrw=7 zzQ&){>!DkC%*12QUdfX?tpnsC;B@F z0^T=~BE1OZ<`Z9-_58WR5>=$`D9hZz;9Qr}XooCD7!^}i?3~*i_$i+oOo;s+)R`uH zUS()P`#GZn+VbeA5XLQK`Yj--^465UO_cOku*(Yg8j3X6Aglo2%R~`_S+gtJjD&Bf|j40QEQjbdzZcE0^u4-%f4?81o(=Bu9MMo(ylqojdzU8w#U{59; z4a-9j-WA>HmHNLoDUW%o!Cd11V9=vfSYIX3`d>0}5=(xJEb4se!bO1JL-8vphJ$mG z>56C~l7C7tnG?=Ma6N7o5!;R6DsoiHJJ^IlKC%i?Q11@%xzXLfO;g{=!2Dh+cu7ML zP0d|ASOtCq7-HNz^s67Gn_tLM?~VcEOY>~mExEyV{)Emb7w?C%VV*`MhZYScld5^^ReVZ#wEyj8)6uq> zrl@d+Jc6WH3?}{XMIw4+r>W>%G7!EKG^i*wsPtiy!{($uwhaut8b%i1;h#Vf~z37PeS*_#YrD{@*Y_p5Je%SuE^zf z9Y!HV{>8mZqYJNN!R+eJx)xEeikt|2!`bLRXZ#_(`lD18#?hqI zDl@8O=hENY|BWQz^z^)AvoV2J#GF!+TlB}ig>Es~F{ay~ zvZjdK)U#Gh8ebb-sSQRJ+Z$*)4zEU;?&-{VGqC6hBon*Sa_run*nzs4J6|d1E(*LK zor}tb&dGPEP2=3V^k^tCbixd8gPb6g&IfgF^KO!&(einh8<~yG!l~TUrqfNxo0+`D z(OVTV&Qoq=85wA5?rTF{#Kc=eCny23l6kg&ibBJyo%l)7MHco;=h>!6Wd+C(Y3#6g zqGm@RlVZeQwc-U#e$XqhX0fqnxSz_O=+`+6(4Tly5~C3wG;y3y&TAX}w6PWiG0@8m z`XFE2xwtF|(F{&FGzvK=IlXuCh*(wX@OyBSTeE76EteS8g%V4tgbAsaYpeN~brGmh zaQm$LjH6=)5el^|X(XItbbJs7>vROA&%h5Bbo0ocO575|EW`R;Z7`=KPwgi{kyq#v zE@Q`thS~h^i;X9;=(!~F+e$7;ME4@iF)DQ)nimrmtjk%W6*qB&zTQi{X!g`=@ffF` z?ZoQE=R~edY+(=xU5*q`KxI!Wk2}q;Q=~86uV? z`0&WmqF(A?apY3dBuIATe@&P~(+MQaC3x94X)};9F_1OV@aN3yV>@=WPVk-wW6cGO zdYgt2<&MeJ;lAD7 z+ab_ROkVtrW8soquyxf;JEA}C?l*KNm$&7ut5KpUU+r@+zl|9s1%;`bCa-@iGUhf* zQ7^Qa?A{1FxVgv{QB81$OwYsymUj>6H6G}OEzuRf2{^e-=c%?xg4w$8ox6DiQs4pUH#QJ=6l`p z_wMH7$@SLF2sJ`4r9JjqUtZUOOOqR3&l06ZjeFOMv0u*){R>TE|BG_+n$&}V2Hwkg zkj`WWFcKzS;F$DYHs3jrpQu*IML1k6oj5{?Id|re6%i`DV$X=0DDDp3)y&TL`!29R zsZ$u^W96+&sRsprGa`kc0>o-n#1g;nw}24%BAXMe7fGgM4I)bmvj;u#|zs0x*@UBNcR8*I%F}c zjV!-#QhAK{E6_6tfK!4+X0nbg;d-$0sovfQ=Ihsy)E$Dr3duH8M6&Stl00Z|`$6URS9?|C;KN;p88gT&j+iDwT3kKp|GQJ8t`k=@+#KV= zEVye!_Ku?*7Kq~~+%BTWszXqe)J2!K;g0nXGHP-Wo4ma1KC0-`|1%W}>l`{)W9}ug zE}QUu{^SNHr_iY;SY-og+FAV_4_O{23j~`3W*s^-D*?4&zsB1TQYhAk#i?FB)O;N3_p)90^iyK8yvq%D zYFDjC)HmFjyAFr`?wJR!;m4>;6^xqA<|Xr#g;l}w&EQ0TA$h49R6$yUQP_;nMZasU z)>tUODsEu>G8GC?35F;+WUM2(O*6+0`#~O9g05uc5aU;PqbfUh%p#5*4!lHlqf51? zDpNcd@n)4gHtfe0X+%^GQR)=oIAnfJk*tQJ1stjY>hQ*an&Fl z_ILyzO4sO#6d4N@$(yBwl#Dz5QP*(f;kFj0Qp)L~wc*W^mZv!#54<0j_6kTquyKj*4$ z&O_MXs@{RL)Wz(EBLC~I&}6Y9_$4ol{l42>{Ak5~=NaHRmFfTZ;3Pa(_!ugjOs6zy z!4dbgp2$yPc#iN~Y5qY^&>x-bj~DF04-vr()tZoBNP4m6l6Tl~O`z`6=JR+6eDN}Q zdTzyzE$&)MtEo#AMw^f?+dn-ej%i%GL^JBy8y#ZVE zlM8+fW>Q6lTO<$7URmD0Q*0+Faf&YX-cI{F5&r>y%F+T(-ZmbD;G--g9jA%SU|d_lo8MC z$xvj5AuF9kBt=nc&*_>ePxPwnuIzJpT9_>4_^&XaLgkUQ(I;eW>h0_3<>jW@!*{cr zrw6~87?{9OUW0$F7PDQ6g;5N+lU(Vul3jd;)|1iU%BH6gq?wmeNFZ3y$Rm?|tkZN~ zd_<$|Bn$K}$mUCQ`zOMUF55wJ9XEiMdGP_&p=Vst2)*Gb$$7>|?r=&g(ZlF)*eI-j z+GK306^()aL}SIh2EGKu(Au2~_w}5Uynsa3lIib>k~$j>+C_ayKYAAMXS<+^NfP2a z5yw@^*AI+bT7fyOem$(6&Gjo^d!Dub@L!Z1(J%G!^vKiaZHv!K_)y}l?pupgzh#ek z?vp3A@Q^!M^A4PPYyCL&%%KhDdp@Q%a}9Cs#++JWms}OI{!rr#95D(+h1XQwd7E~h zKU^xD|47OJ*UA|Q90T`!0BXTw6KW0kNbqATW1hH%qOkE3Y|XB?usqIL%x+wF=kK=e zAp>uHPEl^zy4{_0t&_aNriW`thT3qIA?;TR4K6jygPBtyAeyTW7$SOShi{n?Bgnj`0N?TEMcG6r%iupN62S=TO{*1 z)S;qHAvYJX>hoPZU&QsG8ea*2qUtk!R>tytTj`68s^9gQ=}U%K*D5;w-*Yac5TUhE ztQlNHd(`v}TYQ{Eab{1Gjrekq0nLv~N}6T-BR5yIZ;L=DfPzlApSH(cU&`P$a>)JYY^XIrt2 z>9{r7V_fBZNR+s##ddi0mxuQa%K{kvyk2j|zXiD?gp?#M%cy|P93j%jxe+G+h{~8? z$bC)$>`^^}=b=PXhK(Swz=Ro8u)4G;m?Uh`UkAbHl$Fb5?L48pmcEJk(4k_p`$y8+ zJpOzY95M05TQ3bcGd>G5Z^!0Ye6SXE9Psg1uj1leGujQK8#Lt7>I@3;na~-%Cw>If zShkQpPSqk%#w-HG&E~_2+LDx9+(Qk{$uOtoyi7oxo~BNObd)1N2IhrHAQXDjChA3S zdgZuTQXYH?d)afM-+q$)(p5i(+jugGQZ`k?e40a*VMazCbWvNm44P%mhnn3IeHG+2 z#;w&4CP&d_h0A|o5>y&XhZEl?zs2xp#9Opo`&Oh$%eqL`l^Z{3%1neH-*)^96lgY= zs4#qEdYK5~L~s!>Hcy_^1N75K%jl09&8Skji~B8{NqME7t!kredQ8VnU{m1bSc9VS zE3b`Htp>|G7eIIOcnYJQqdu2Wc#J7#y(na4@6G7ux`K{X(|+B$YK!Mcp3H~C(+#>6 zc8oD~WW*vp<1XA4jrQ_70o|bP-{(_r^zMgE5sxua&%wfLN9)LkUD-o3G#B#eRT5LJ z-+(TAis;m6+xsAG;(;4d)FVYGJDd75PWEN?h>m}6M0wK^&Ix{30M`Mu&@&Li{C7i! zt5GeauCxr-7KV&{@K#F(tf)k*y~=OiL>S#E`xEEPFL=I?<$ab>D=ebfF4*JglLj_l z_nENb$!@k(v z5i)s@tFHndeM=Kzp#^zKwq(gL1d+Nr=VnHmlh@TQl0|IdV79HBi>G1#dgHlB1y2s1SwRy+v>lWE zW7uo*IUSIjknVT7Vm*YnU6Ee@yAnyixWq|CMxkq_?(^1_y7*U1O_w6b3g#o=?qhxr zggUNzXYhU3;PLg{2Pza!lC!zK1r-P^w*`QPZk@iZ#$3l!7Qd__=J}+D7SBs*NC^ye z&lUqBHqV;!3{lyAxgH#Y2RIB4ENJC9{ARx`_utqbpH0jqcN3*cA7Q0Dg*WO-j(z=W z#{;AT(&|D-oPu$IG-?aKK=OUoM1LKL45p^StfEL zglPuoYwl!F;MVvXWRm7J>+tF!==XIS@VD*a`FaoZ=%Xw4lY)am{F&|d({P!770{xr zcPLP}SMv?hs};Bod^Lpzp@9j$XzWcZyLfolKN7>7+1AD3kn9NG1{8aICI6(@)@*^? zs~T58dxwDObf^Ws#z{;Dzuk_ylw_WvW0*fX+9qwn#ErAWh@ZRJwU?_~an>+;KvPTr zq+oX6&>;#Z?`EZ%b&GSUeyuCa%IvB1rAt$zPMybQ+~iV#IqZu<59{R*=<`#c5Tx_< zx@Eux3ZT1+h`A0ZeP9IX#BKfu!oT*mvjGJpTn@bf7d!q}`(Jf;tnoD=|2}*v+sEc~ z=Hcg;=xNC*C(-G_*^uMg*hs@FPyXeKeeu0T{P+x+cjXI)nlt*bw1wjNyj*@&%Z_)c zlAT)~HXUavTZ#v`FHLXtTkRhv;zEJF8`_AJGv3)=O)@?l8ikKTVkAE2eR_vC4gYJy zYjhK}jgwN1v-}yz89( zFOgpb4pa^l{JWD8F|9P_t2c*kjoOZCDF4FURBf#^bj~EQaX#;KY+YB(E;_Yde*#;- zGkx~7byv0$-ph8)SMz|k7EfXU9Us|?A0nm|Z4J~o_0|E0gKjlPV$K_nT3eqnCOJDn zUEh;qqvk6h!_|h(G#Le}mo4Y^9rO5n9Vn$Hr(Qwj37s*YlepJCYmR(kPoHwem;ZVS z&zKX;GtNY581I5ExO%r$EAPL4u5EZFKW{lU8S!8SU)smYR<3+ZRlMgAvDJJ|KY#AV z9iI8yYkO(3-S1D8W=otY1eG=v9&qOPxk-@4f>Mjd1o0Gb{qn!Du+@V+_rmg1+x}F8 zoWI=pIn z`8Uh-Gyj)bA^5w45Yd%krQOeH-lWU7)-lzhB<&$-em<4Ly?Hb-pPy-C8{95K9gI02 z2_NY1b+$|PVr8YFcIO8^IUaylHq*EicZbs0boW(*vrADKEko`F;&k&RAb9w7}&r0otDy~cJchP!TX$$oqYA@ZDlrL8Q1_v@`GQs6CM#HIU z_R4;6vsZk2#dD=ZcE#&seaxeglk>{7(ta39QeOXH5{FCHx_WOUnJ6gr?_`vTbO~lz z|HRoKJ8@UdFDMp$i}bBA)1>>8HJ(7PiWYsmaqIweHjL}u5yRYVh%`~3WBOHmIogi@ zddH0+^CqOE9Jw%H_a)uPW~SCmxs^q=?`82mRj$=@rOTGCCMj1)Z}|D_2a_f6-lTG# zg`=d=V#44+xjyI=X~4v0D)e*~U}4^Pl+*F#Yj5IW)t=CrJbw z+FOtyZt0naZT{rn#<*pK^eI^_mMmrNWM}bb8g@sM7#eA9g!4u)yt9}3&8yQQ0v4DI zByfe=Vsg@+80oHEt!+Dg=0$A(#gaeMUqvT~l*DBf-WrCX=6M75DGp@uTd8~*u(w#D z=KTzN$WBbhh}z@@aU|5Mq$pzoG3I=NhHgkJ9Zzlw8+??tfn$9PiJ6Zo(@I|uU6N?1 z6`Hav7kp{NV1@3r<;h~#(@cKBm8w5HMryw8+p?2wKIL9M?CbC0Jtzfko*&+N1or5p zjBt1vPA1VWo-!E__UVa4`bZ5sk1($_`JGAvVZc%?vCFE`!EJ4GrJpmeAxXPgh?Ivp z&e>lRe~J7<9z}j%M*!g&xk={Mxs_VTf=cPGYfC~rMD86=*(2qny{i5GiHbxte^3@0 zMKtVA@-6w$0dukyKK|-sqID@8^n)BQw{^i(VE}2&62#nIjQa1hoX(p;*&9>Fm*Jw{ z8v`e&0s7_WZ?JM%pAMkZ$%ciWbW1Ap58PBChL&sBPX7D;W%Xrs(Y{7EU! znP19J@DpiOATe6h<2G^~THzSWx5L%}__q4gNW9b&SZ5(ubhbMP_)H{Zv3aUY7g0Q!3uXnuG0}Bw$=8$O zT2VTf-$O>-d>>{9OLo}uy!6*a9qc#Z(lLKZ$1ES}wm8td_1kz3Ns$|kk@ck7T#zl1 zpKZj*ab@IB%)bu_x>NcE&28JQapZIgk4dC4DK13S(tD5!Fv?YTL`{tArZZ$M*Yt8a zJK`e;3EqwKT!bWEcI|l8neyr~<;W1G?bYO@D=8}{Mjc?-D}#xV5wCzzLQ$<<|1c_^ zz)=_Xr1Q*&+wsOziN|gamjrb+2Q5WTY&Ris2IYbYF)C@1Qfj9zF^Mh@l-5e@Ojc6!y$-NbJ% z%qbf09uUN;a9L3?5NuXDPym(fUi2$?fE>z9z~NL7jm>?=vY731=it=y^`&W+HvXv{ zYjyVR=_Su|$!VesygD{%-vhaLbg+)l4H<-=nQLNUhlI*mQ~X1rii?#B49W~_hpYE7Y6)%HAy*>TZ8Al*_-n_kh?F%qY3bNA*4f$_ z71!(OK|+rzRnZ$vESPyTR=S?fifD>}Bj!mu_E8 z_cuoa+QjhshkQ7Q9&_SEmjJk?c=6L~JL+At=bo$9qK}Yg|2>tFD3TXUqO}nqNVjnI zqA#%M-yoImqd2qdUs4SUkgVIf;C&v$GLSKeU{829o!VTSn`_Lfk`kjaVCbAH-Jx;( z>(3^UnC9I)v578$GtpyZS=#|6zMs|%3;dLsv`L-B9w`X#4*%juRZcEVmu{!+%2-uv z@D7jr9hL~Z0r8hDbR2@QUX3Pk!mms~Q$>}= z>X5Fi!3p7G*pzi|Yu)U#gr#R!T-mlUSlOiFvQr6!TS>mia9*lE#wq-i=t&j(WM|E` z?u)5_rm5e=^h@W}?JVX(9iNw*Lzk|)_E?Wl@m)!v@xV$H`(9j+4*0v9*^id(Y06Xa zx%0!q!ggq68Moc9_BLxIcG>D0wQ@f0qZzW*0~zb!U@c$vi$hJo5d2=YK7^v|>n;1J zcBH(EM5dnkNyGQ+okmAEWK0BjGdcI4un{0v4coceUJ9%M^&xv2CA2if?2*68w8R29 zs2~&QZlI7x5Tch_TzF$0$#8Bi+4PTAy#fqVQQ@Ai5$S%fvMGxvcS}iSu`gf)fg*gI zbdWhNTos7M@B$++8DgUB$}?B7weFPOJBUy-)o`Mp{2>mtK5ga)n$BlZCm;E$K@G80 z68pFH{Ij{LMSna&a<~JL4z4;`7!>CstDbnOS}nmF*1^VUxmt>27_qDr;q?kKk8;kZ z`}kP;L@Wt&!TD@)5!oXi?#q#AJb72mOq-Ml`VC_vj4TdwDPH*|WT|;$(qeS>Ct(Ui ziSaUP{A$?86PZ_>>mDLWs7a$-fr7!FiF0|!cpl+JndlsT*u+ zYMd#cyaK*NXbh0evyzu6sKNWKDLTv%smoe(xjwBc;^U9DvgKwZH+=ow0$2iYfs3dE z%OJoL!H0m;*H(N>XOQKqir^^D(d=9726e0`HrGQ-Cf8$SnSl_e{Lj0s%3B6Q1ZN72 z_xL_;HtQ=AU9-ojm{MbSSZhtJpqH_@n#@sYwmwD`!2uj_f$3XOa1H@La&jdvl zgh9vA!PNiiO>^^3G|f?W+*LifR8=(-9V}bndIWPezl#LosI$5+43rKK`H4csL@nft z2%1~0NVhIXW7Oc;+F?db@w$}5P17`cQEPZ(JbB}p7U|Zi;=Ti#TvGSx+sxUDv zD?iLixkw;{P`Zb)%Pn(gs6||ch&}jIDh}Q_3LG@yggr-QYB^hjBB*^?2}E+0M6;;t z3Eo|k;Bb|cY5TYm4nic^AM)E#UlXIL`IL0m>n&s^T5xVq%TepDa0PQp(4!+wnpw3Y zSDhtAH~kc%VqlHee#9yZ6c(J)z+B8|Wl1U~kW2o~GS`(+ zvXkDVmLjDizx&pn!1iPcha3t=vNWeKe_%4kgHf|3jzv3m_>?fd&KZ|dszV7f?1RyX z5urVqWPiuZ{0K{^H0`jm3`{lrl$wrv0f06tH1e%n*2=qZ0wP9%F5BN?~#LuMWv1d#Q z%7$Zu;P)f^O0i0+En27cq* zr+vye+C~l}nTr!4RNKd#*@4NQTEl^+H_+x?-^Q9{veYm`RG>^jGS9qDtWm^5eq-J& zGHPeMb13LNW)~7jK;y(;7(Ez_8QnkcCPEDzf34&LetXFVl!8Jtice;~M*;~o35Za> zmX#&f82_MnwpXL8*1F+>LolbVP{QZlO@Up)R%fMLyvEQMvNylUsmUmqf!$MWQMDYV zj-gD*89yeL>w~!gwwykr%FL`cQaKvQmZQe{FB|K>a@T?#|LQ6tdOLB#NMk`j99Q<~AqQ`mTapbmwn@~GYJ$c1kiVp&qC#P>?8n?X?4&oysOs8rO`7WMEcgq8A*SAdMDCyQ1*9L$1iC~ie zB~rw**)Zq<%d$gSbd1Yq1CiJYyao8G_Q&#`6q-|;vKY!6=!-YU15%byKo`lBYvn1| z6BWcws^eH-s2v`ECjqk-&$xF8Taj5v6^o&2LwQ7>>`hApvyx{|-k%Ka;jY=G2pRTQ z3%;pEgI7<;x#*=Qz@(=W+7x(xV3^cq;3g2U&Ac*cj$0sMPB4!`^}FH9BncZJZXp8! z#^XvI!0wVIri>*QQZv#NnIzn+nZgUc9`}3mrnUMCq>@ z#Gs1|x_(1&^C@CKRo-X<_S8JRxTR!oUM>z!w$0{>EyX#+zxR7Bbd9W^d)1koJ_wz= z!EFIjD{G@SG4r9*qpN+{lS{Gdt+Dvjmm66}>(|Cs57&$L?BSC_Tac|J1{SR^DT`($ zNMpVk;O_oTR#V!Gu@_cc0T(k(Ntyr*TvUY7OHRUcOkF~Pn>2@hx)mXce(@HqBc9Xz zCXBdoQYv|f;1EfFs|C>q!k|;wKWau48Hh6@dy1hsc`_5H?Pt`^win@y7{rJ2U;T$P}MWv!bZ`SZiVHT z9=!({Y7H9uf}VZ}xJu_~!S-mK3^>c)G;JW!X4k&=ckTEud-Shu1D*Z3f&BSQU(1I-pIA=>P8MH%HgEB=W_Bemb;Nk^t#&S(rXV44(oVD_=@k8amEEmG}cx4qI zR=iTC@$XadmWj#Z8G1A8c%^{}xP(RDRBrd4V1 zr&pChPF}v{W0X)la^$OtkZ;mRENeWL*1b7*a+Msk{Ttk_;FLu?6IDL8%%c%5Qnm>T z81kp33`&-qHvNsk?m>~8APNIb?JJ&cZrXPPYkv)HiGK$C?rCsxYlK%clpyra6a!OF zh&lw!3+rEQxNM263Y2{=ln&?oPKu_|I`6kX8oU9jT8U&#OzTu%AeO$92vG0KIB zQ>uySF`d}u`xtZ!_)${d3aROGX*Cy~t6-y{rxXCY!cBm%!&A7PP!kKZT31>+th*~x z*uvVSTR3GzYzeG*uJCeH;7=K2YQ8`FI!@+_t5{6Yo-7HpbCsz4*bt8zKBVJb@1sn4 z6mPpVjCLV3U+e|;26T%Vltv>~wYtSAm=|gy9 zW6i2b-3gEVeKSy}H06^NF(_Ol#}T(eHgc%wA_c@zpR$`)!@BA%eJ6V;Kv&?HjJJ;f z$dB$t&W&Tp3H8ZqL`b3!?C;C&6zI&3%SBT=9Zj7*z52M@+a8zryr~Kl^oM=K$hu7F zGAIST0zoV9D33fA1BJJfMNIJY=6>ZhPWNlzxuGzd2ayu(Y$G58d80m}T%|YWvk5r5 ztSwYCFFCS9Znjqq94d-&@ixPsAfHvxE*1^dGTj+WeXl0uw`jyc>UW&$Rb>u^6(|b^ zMdM3WP!~t~Dw6fJxu^ck;r|jGjtRI8cMSTPeH&edA0> zpWK-3{2^_Kd+yeyfB5@oBVN5Pb(~yH$_Vh+qbp5HD(AK=<(0>??B8)#`gz49fBpzZ zch6!~2cCc6i3k}ap0y!nw&?YV`BJLH8zMd0z11d!1Om0eOwuSby)1edw@?0OLOGO!)o7Ub=Sm0 zdzz$g5Hx0h@0nVt3}Ia7YU*aFh~P;M&HP6cmS>C9i%G}E>4PGLQK`0a8%BNb@jLH3 zGS!eTHD2(#J{zwxcHR(ymhNKZ{GQGd-zQ4K8_mDxw=2&FnY#;; z*X`4))UM479sn)CV@0gX2UZ;docz(_*snw}X~JyM6-#*&W~d#&eF;Oan5a>fVJJ0c z{Sw06on~TrStkFk-Bu+lKv`Q-q4+u96)uBJ!5KvDybdjH%Bmt|ZrO(XC(CVYkUz<- z`2=J*X`uc~%G;9Cnbp&y{8Id@!gzk|{%;QYuL)O7D8e4Vte@0!SwF5Kg_5PhD)@(| zXwE`Ru$UukbAApZR;L&hej|zbjl(}1j3;LD@PKQy&HQY6x0xwY9{MtQ z8 zEQ^|#TzcBLSE>^J1Ax-dI&)({NkG#vmr*N29WZ)fk|fhZC5~+f^^XW#J5#vp!wB4Bf1~yvZB;Y< zrb0iitkh6*|1*AtFO?IA5wCpH8eC~2#>;lQEbi-7o?|RD8=A=YU~#u3_J1R?sCJP+ zPFZHGb5jh~4EpLw@s}>#q+x8mKs8aZgZ4>aS9EVv^hil3e|{J9wwF_~@7x!roX=nP zBVSpJiwuU~QX8t!{HAuQ08@OZta-01)HnCft{A8{Yb7ei?_yGO1Ae*zu6K90 zi*=IGB){9(_$B3SU3l8>n~VV5$FEcp*MRHVMfy`!e*e?~(E~*=NUTb9wbA*zw1=3> zsVMby6fePY@AAX0k>8_nTJfj*l0PS2X(v6BPEMflt1I5_(ZJla>r_a+e4m-eFr) z+)dHZh=WW)1IPhm&@Vo1Unda>aKD?N7Fq~q4%^`)ebeaaR~IJSp*b2`9eCM+mMHWH z!-hykM>5rc$4G22&!zGKs-BHdq#?=}(JJ`(bYiw;Nb*13F4fFJD#9SzJ0R!rm%=0JogKw;uNY zrG5jlUu4DhXBr+p(FU9YoMlg%9VQ_emu%KZ4^8hh@<>_Wsl!tj1BQZ)t$Ct|f(H9Z z(SGz&M7f90hoA)Hx#&a=#~2zxre6j<5+vz|R2ao7 zyR+!g6iKnmV&nIgF<^Q4M&bWfaB&I%vfIbS_3+D~rHc?0XP^JnX*I3&Hnfg9{&Q#% z+{q_|R_RoQ2;T{%DTL6k+jen`@m|w15UH^3 z0dP5VD)X*hr8_>6!X5A~WMd!|fcqFEh_Bc0g_jFq!SxVjZCmak~ zb3~}bt*5^we0f(vL`sP3=nN?^$}q=%5rR`ADw3%prK5K z$Q$0NCp~08f8Zb^(`9%xYqmESBLKzK)?US(?$$Cj_IDCj z-j>3GIIFN9(O}Z00j9d;5>u;%sHM6zF)i;PfM+@NcuAY*!L*|kj}BI>EkJp#d47n0 z(~pY3h6jv7&G?7umruM%HFV>Nc5hTdLk_48RtT}dPF z+HvIXMn=c9&De537s6mPl4Mz&rbp)HD7=Q&>m}`O*xz z8{D&&QZK>5zu6W_GmaZwpqLn6%n>%95r~Jdmqi1^!^^g&d{y1o1gg*gk2ky1555jJ z`z;vCg&kA(_ePV3**{vVT)H$X_7$(_U$;J%8k&iC9Sx=fo=T9XvxKZw0(fw{FQAqJ zR6=e;*tJm+=a@m6iwo-w^v0-j{QDXr0HsAMxqvYD^i`-NqrKiRk#h24S>_nxb^*1D z$o|w-RVF17N5v1vjQRlD(Kh{Oi%j)y0SYHyulXc=94_TjN>c#3Y`9jQv#7>!QlbqT zo%Y0E`EpLF5lft!13(OeRiMu1f*Ajc?fDE*?9A3;xi8^L=6jx`f{7K2fJ}N4ei#uYP#wdB#s}ekZ zM!?y?5Eq-7BpngFq98#mF-~|G+KyEAd=FV?<Q|W>oA|)-NvJ#TtLxI?RABzn{S+WPa87a1q&mpQJI6%WGcG4CTtgVpy%-5OYaPGu zF=4RrRykM#29H0ryd3&uF@}iT?^9cQgt~QNSI+awLuBkrLmvW(Z2H*StQPnGuuI=j%8tGKmI6^mT-(V=wTjchzY zqQ|&Sb7w^&%i#9Fu&9p4_h2k zZN?VoSthQ0z{5x+`>j08N;i*-n2FBUC2$J%)@?_04?tVp*s&j7)0o{+deMu*G z9slen{1cg+d5PbieycCO(g5M7bharV}T4*kIaWFZ`_yVt|^)yaeE2UZQ;;)bD z&DVz*v&(I}l$`I@f0y1awwqTv)!5z(UwkjmXji6&S?bm3n5x0K+HS#YAchLnL*nX#*sNu=MPm4l8?W|}$1uyJC9wxSQt2EtoymSR+`xfuq8%~jBMG?$OC z4jGnmqdhiFA0|)l|8Rjdrp=up_+8QprzMFw4{N3mTSw}$@b-G%`5*JBcR`2uH9?;g z3FX^CFFOI#Ps5)dQq>TE$I|C}{MQ?3vd@oxGS~MuOTj$_yTEk{kNCMdkFR!$2=MrL z;35==W{2XCeYpw|Slqs3Iy?k=rF+>81?>HA2MXkT$%8gvn-oFjVJ(&7Tle15@0 z-V3(9=Z;Spr>pyr914lw!5LILp1uhq7eS@mZPvmOxw^E4=J{Luy(>&u+nh9*JE%OE zUz%R%dw1VdX+rR@=@m97h3kh+gN&8#$lT|C?j$5Pp#I}iU|jg{@s%%c^Yc=SM`&_r zxaad$&e-}R`5^kgyHZr_@`d5mw04p2N$sfh+Whk~@^qNwDyIrE4Gm>Qa6ie+<6R0b z^eHHQxVs93o@2TH@CFX49S0S5mw!$II$rZXg9c>M*pLhStbQ{XzBIXSQPX%2RifE< z#6keEqE^{r_-I{v-@p>-Dq;-Qp$Q(+=`3_H-HKC3dw%=8E(7*;Plz+Va`FYV=myYIKhB(F&e~M7)1e z)p+mr$RC0O*Z&&$5`+y2`S;-{I@~p9Opm!XTx7q2(!)2hNQD&KwswpQn_T*v7uWy@3g(x?xZBh2W&Or z4|wG}H|(0HXpLOBzUc380R4h(r>;XgL%nsw%eM46gm1U=OzO04GWKQrLAh%OR# uOv*Mc+P^#EW^6T`RP#1ob832aF!h)vr><#Oj7u&k4SrK2IZ6K} z2mL#yK|kyNQ(PzMKS=#DmPwJ%Ugv2N-S-`+VB0hY^v`TgDe9jy%cQ0acEXrx8xM%_ zF(+PMf8_dqH!0FWMDrlMO`=o;`Q&_JyQVuinjGPU360fqgb7SY_&E1j zZz%@5)?&M+N$sB0bU3RBMCo6z+-%=hENN1Q#xm4Fl}SxC?%e7p^d zHMs~<_jTpEs z^Yt7)wRH=zOTfznDUnPCl75;6M|61qo?3LzWM)TnIw(42u5BG; zU&}1rl+$1_&3B34nnETvk_3x$d~mMYokuB{PfuhK1UXU7Rip*K-pWk*FB)38tvom{ z(oBNq6}J-Rqeq0>aauI=V495e437^Po@LpMn2gR`6cHFsYa)Ls@*O!mQy!wCY{S|6 z7Il5-0BM74v-Bp!+_DgP=%0}VL<|rTP3{&ZTSnwo>~QMUizGN+7NP+{oPoIFYYzmU z>&BVA&)J^mbf(USotri5@q7Ar?6&i9Pn3V=x-ou>*oZYLyq1`KU{_ogTqAMn(gAgZ5@)eMXj!7++pTW zB$y4+mQe}eZMu!hRhUd)ziG+$Og#*UeyTL4^V{n;5FFu_>}R#ZfQcRvQ=5P^7)4U) z8pF)8Gw|&?g!%V}S9X($Bv(!Y=86A8rmFkPn>w{tS~4hiJ8NMLnu<$lVv8E!zakx35eWUktc}5q&8LUwjkj%;mbvuZgd*aEx;{;(k~!71uI$P%C}UnXOLts zA)5CV?sq!0b{xwzjm~N~upk4=;WbTmC9+kzi`v0E592L3uqZ)ENpWz~_C$tS)!5SF zUC3~kxk72%u-0lhrg`dz(6q9jw$av0_T}~0FDzI}m<7@8_x7eDod~0x@Sga(o1ey8 z5PZ??3*kB1BI<4rQ3Wc~?xP_#1e>YX00%jwO33mmdmZMY?u4LLfB-U5?}Uaukpb6% zQ50aTd1XI<^C7r`F~`_}Yyp+uLTV(dvH}o*5)Xa@>4J6*#h95xI)T#mAo%Nau?X8{ zazWrGymJNaya&|-(Th87Z_Vy72WnBLQERBIpO;&NH`8>B_y7i(N9k(SLGmgUH&Fsa zW5~gVLLeog?1A2Hvb!_kxlQwsN*)^;Tk}z%oeSQUFq}%m5_?Jij%;+gOaFPt3M@lr2230- zivi9;x7U@`u9}kW(2T#`cru))A!f2>mG!ob>Myl_XkJGdtj<_H;V#3sGFoLKkmTDU zjKU&pl^figbh#9JkV|Ps<$Ss6tf+`&{wxJhMZQYX+eiX#A!<>yX!Y70DPGkA)=T#g zwH06-K)YGufRB`pAhm7w>Om(_Z(O$Dh%am)Mas;6-WjZ?Y^w?9(^cp%(zrd#Cnlv+=itXjx)J$`nD+;Rs%h4!+DXWQ9JJ;9YKElmw)*6uYUF8KmF_f{l|a(?|=TX zC%}*Y{`bHB$AA9um;ZeT#?&*rHaom%+-PzwIl!)InE7)k6h5*5yoKEVFJ+$Otplt5}`P%*hNwxfmBb{B?R~>;G#e|0T#wb(yItC zE`r~Ok>dAs!F*P4BdCY<$S5UwC)*Q z^Ze1HGr%f12ce}4N)5&JG;n40J04acQU8|uewk^$74AWiQan1$xYy}epJiE;nS3<6 zMrV%!r6IATnBKM0vm%@TMM0WV9DBS|(7uBF8^{fSv~HmcfE$oJ1h^n_aC-4$WM;q{ z{1B>Pbk_rHhVQy)F$}>amDSf^8ahNGJHYdRZpFXb)Z0cb%~3osQ(I;rw>O?dPKSKC&hCh88oD|Q53uQ9}x z8ndeBbPe+r6rIb7Bazs zwTN|x-t4~35j8+oSS}^LH4Bl`CjCt*tbWv*#cOWp4Lu&RlB+x`*Pgnxi6^;XX-+-85 z0ahb|x#BSeI`c-bHmB-Y^`j z^#oKop+EJrw9QF;zkfDd+^n=sW3fSS0OTrjU0STgjnw%+E0d(&5mVSeA)s|pZ1eNS zk5}L?I9#e*MDdvDkYa%)y8;WST?LJj z7Q;y}@I^>U z9AUtawe^upu~TiY3h)8}^LYq;9__c<^xI=dTnc}o)^XZYrg$K#lZDAO0`jkP13Y|DQL$c*P;_pDF^XLi@0ygRCAtC>l|?KJn-Nt$JoZdswue(Wtr)V|f|a_m^r zefw30=W(aMJwLSTWwA!80?U|h#0qM~b{Nk>b<87QBLJ%IgJDQtl;DKbk*SbzA=&FuFn+_zHy)xpK{x{#RF{cKqdfAM+1uQ?12@3@8@oICE?%V20;6WV@7aJ2 z>M42JeA+y3YtM)Dfi6knXYSb5%=P zldDE=vz}Ume&q3H0TiUGRU7lYx`H4{cy(X`nZ_xp4Vw2fVW~N zgNvfbLI{bF(9VB0>F^ru5O8qDvtIc60XnJVfXx1#V?s+n%~&b)TGqqkU=aR#4d6aR z3Czo{%1C!9c3Vs$cu)v4@)F*4jxxh%2b!7;Af`!ifSSVzCu%QiVr$|!n|uMBaH^NmznLBZtx?_2AKOZor2EW_aA zeFyOL@Bbl(@cH{+pW+yfWWW=L;5)rCHN9aeBAKt%*X33jUmUk|zL0uP@6)n7w z!Qo5SclB{>8?rHS(I3Y!-c6p?e?{aX2tv&wzndhB`Tnnq0EBQ2`$J%<@a z+j!`~cxkfKq|*C|f+l?40pxB9aO^J+Qjj5R&-qrN8hQ;_R2i=U_< z*Y&6~8U=A<=9mtE!M1|fI27aafk zkUitJ$1S(5AbTVV@;v6TQxv4^(J|S06y(kzzHx)M737RVK`yw1b&>}udvxvbc#vt# z49c0;L2>R#6l8MB%u^Jk?9rpvcoYO#(`K&ewH4%zL_xO2nQ?-Gv^~0PJR#0BXPmh< z?-r`YNHpYG7C%WtXpr1v<0*tp8&j>>jd64&T!M$E3Qy7yTBJu^Z#)|EX54fXJ=N0C zNXP__d#-y*DFhY?3&YrWYE=up-ZNMy4bhRX37+M8I)&a>LuipMM-Fc+8ZyBSP0#H5 z&`9V6&!v!7PSB9HNLcmpXvmqlu4!3a9~uRp;LV_+!%nG<o&mX^uog^xU8`ip;&) z(aJ-XgnG+XEw}{cca`W7zNOf#qATYTy2pa9}7|diQO5eprEdX zMnNfnhFrsTPSTLFNXr?I4^ax))-!l#U!IPFQ}AZgfE;R_q9J9G9ve>{q83tH%<1?L z8wIBT8e$N!PVpgak@jfnA&}Ucxwh*VR!2jl;1obZ7M|feMMG$jCbP#voYHeV+R8Y0 zI(tKG6r2KR$ngyG{`=pRMS9E~OFcxf>H{R_bvXq#3QobBF~f9D*uJYR()GrJ6gZo? zhGn^39~uRx02;#oYwG?LYh{rp_r??DxHGeuLAz+1jf7KRCa?IAdum8oq)kWULnfPX z$7bDAuxuop0<$S+&M6vF7O77CjzvQbL<(kgkNL2Xa0<-vxP6KbX^V8su^a}b$n{cm z`_3^EHWE&OdA8-;pWCi1k{ixgcJL@NiBMxR*4YJTBjFS{{>Mr8-@&Ual2SgFD96qX zHUpnmM_*dUfk(nAaBg_^DQH_;q+^Z6hd@IPRB*P}Jyg6i$S@#<{2;HKnSy{Y1y`@w+2w9WdZCja++5xH`lKU2vZ2Dwo!ifIMS%0l;82lA2ZXUiOUHH zQ^+QkJGOriQYA=Y=f z*O6W*)JIOzgQbF}Xj?&;LiKHKBa`HHWI9UykeuB5Q;xCOlR^kgL9Ti_lAU*GI8qzK z^djjB#gTohsEPV00#g_Y2@|bnG(L`W6_kud8Kvu~Sv9{y2uxupWX!&v(fBx0B2buu zT;~I3`-$Zcn8Hvf#AD|>bUCt|aUun4NB^76cdUxQ6ox|mz}j}+q05nCFqOt#p*+Tj z;hOG;6oDypg@`CNsL$g_A(+|+9nmMXa+;`*A~1!a5SPY!uFvgAZVe_+K}V)J^~dBX z*T)c;!ca&(Y@Pbtj-*C}lDDlV%aLM6Q!Hl)OkpVGJuX|PK93_sVYr;0LPr>9O_72j zFomH|HrvRoe;nzQKnm4SLe-}GAp>CwRM9qW!5>Eo4ms69<5@U$ZvF?t6u9qLxgKpl zj_juk6sDl3`H}r}5ED5y5T?MKw9$h0aim=Rv>+6+v)N&z`TML82vcBH-csmtq+C51 zKolY!6;BltIW-Wbz*fGI(fB&D0wq$g5!FX{Opiu8l}5Bpb1QTQVG5(k=|*+R>&Q%;cgU#6LqR$#1i}=8wKn>k zzK(Q9iMD-YR`fmYIhX&X5C~HU4Q)H$q1%yeDEV@JQag-IZQ^nc!W1GnW4&7cab$>~ z+zj2vbOp;`Q&)N&3j&z(Gycl0j%JiDpwu3N* z{sQSnHrC_Fd)NwyLPUp{$IoON5T;PdxzWAsab*AJqeR5X8hPmFn1UQKdh3IW0t`d3Idsw7`Wdf~i7Y6rQ>JeeaPm;xQGn`?udK90QS8iFWf z*5e`7nQaFIQ|JmYr>y78J&rVjsbkHNKh{PL=By9|Q=kz!*G9kX*OA32=b;=OAX--Q ziJS%mQ|JnXnyjCPUPlJZ9cx}a+DI+u?+^r2=n9pZt#i58kq$G=ILCbnaqy2q5KN)} z_U^8YYW>HNqfHKsLZ(N5gKX~R91u*QD6l z&&GNjSp?Gouuh78>WiPL0tvwsx$}|JNToozAF3-~##{1q$C?mKp(|9BwxiJH z$i&IUs#C`l^D3Lzc7iYk9_4B4wcw8$&rZB6v{-BdWAWT8pm^;6r(CtVM5T?+7vDHBuKi$g` zgeeG1Tc7s%$C2qECsw1AxPNWA%`=S?geizp-{v+Hx*b_4Ij3DAwxd5}$P~*J2vZPm zygtM0k0TwWBYOYEGUftTv}u8)*8^hMtZLbAANv zyyn)%b*yM#R1EF?b}mQrp4(+~t!a)bDJ`BCg*>IJYE%Z@ZWly#QZd=8*fJJAKG_p2 zX-0I(wedfVt&$E5=gIm#Y~N}v3GZtGX>EYi5*CV~PAgn;qJYmu_z^Ap@*vh7jfc1K z=kfD#?PisCW&Yb(TFvm~61QEyZ{LC(8{S&^oTD{cj}K5-riC zN1Gye2v=OKsMO%bd9)cTk>uqpWKqi@#@-(Pvi7p=CHLm1v0|L6JhU(trokN8tod#Z8}7HR3m89bkGY8x|@~!5dGh-VwcvH ziEhNchGFF1(1(GNGql>QjEQF11`fd<&WEC)0lDicx?2E@T7iKNUH^b5lhcPb-=;5JE{l)B#PhARBjY3N?zWGW-R1|_vPlAKXM|voo@SxpEsh-m$pPX zkn#yz@A0CIX{QoE@~515anbhiLb_(Z5jW|@gT+jLeaeTu zZ_~ch-!0Ok=xj6d82(R^=Xop(5rtkdNcj>DwJT{7ll&V*T*OUOn%C2J^|gDMH~aHN z@QdFzX=dv2^)sczf$!DtmTX|t_MZOP_whp^{oQdUXOBQP-VBFxpGN(q6$&2Nk->O> zSwnVVIU>W83!h#ZyOF477+%LGYUkIk8TMKA3ZoK3va>Akuc|cGf{RsUB<&$A4iy*~ zsdrQ|N<~2`TIIQ@2Vo*FF|{9=1Lyj*M4$XWe#A}reQFQ_E3;v8M1qzL z#DNe*-VX;QAiil>adm2NcG`;FkwLJ%t2xBnm1~cPo%69@$BM&xR;Qd80Phf{1CnVZ6k8?X&Q9qf#DM65^HYLH!O zi20RR+*V6)*iV`~AuAm42Kh9^!0maF@^<6sg=?`zqqRgyNEwJ@qhP@Mx&#)dPh0fo ziv#b5LlM^_wlYUBfB!C5a4PyN{7Awk0bd`HbNk%Aw{faM0#qoO6>Jm6Hqj}o{_#@o zFf{_i={%}nWD0VKGA1s3OA%++2pZc{A@f~qgX=-ckpQ-Q`YoNGSL#HjZ7M6jva>Z6mqZ>FmLgX+v(^zE|I2O&78Y6Xdoo-J6lDyd02nh3S|k7J6!;66z%7L8IMm zN<4wB1^rp&8#~GnFnS3YgBwH!(&IXS3?qr_g3LRBKLl{^65|W^h1nse1}7ob6AhjG zY=LMcK4KwtQAI2PVIU7>rM$*WB7Vah<%oQiG&9D7ZJ}@kDJ(08c$R<-7xbjC;o^6x z%nfy!++{3a-=u0wO3G*0pl+lRI>VJ(VE-pni_uMK5tw}Z+Cn;Y4Y;Yvq_Tt zT!Fi>^!Dsr@32}gGf=lXHt*E6a_>$p5~UqD3vqs*4jr#eFHL(@b+kl*Fo_6!9p2w*Wd#Q@7i0VScRJ zl1PVnRbZ|Li<^)if>FlDNc#KbH5r8H_R?;M>A5CQBQR>yL$n-mImsavE{;jk49)j+ z_oO8M@^m4hhm)B77DCLGcRf*cRLG#62_{*Tltqgy;#J%Y#8kwlmDi#YZlsHg^(6nH z!uOBXb29;~2$Oc=LGm&_i0^Q9Pw9k2qrHCnH>X;ryW!+m~w!K}pGS@=uxT z&FyK74g z{tDmX@BZmlU93m2NH#iq_^Wzi4P@#L*!mUzUz00erF-mbo;m|}5L2+-PuJSeStD(+ zZZ*dPl48sgPIf~ZGs6KR;7Hh#j@BBXThu2;XW>3i^s@KF0-K6hsllIP%Vq{Au-u=Wa!GJjQW1^vEn=M*@&hEwH zILT|kd^6`t3R%Kn=ziK5CT+P*9biXAr8lT> zue-gkBKFqFhmqDU*wnHzo{4n@+;9G}`!aUI^MFg}Ne*=0sz`Ni&iEmj>|`(7(^(WR zUL2@;3nIm5eRcn9x$4{4G?&yNS6jJ2-qUL+)1YoJx6d7{+`)NuVOT!`e^j8mLRJg$ z$}pu7BjYL_t!8rDtb4oH<_Ly_4Fzu~?i>gb9qOQByU_fbfMQH#(wYIk#W%f)Sw^Lj9q6pmCU-f%UT0S zEg?=(m=6g`YZR2m8@gZ8il(X;+j6OCp-bV&99-R8!?RY>j|}q4xYhjT|GWeQS&`_C=9|}r=Nu_*pjm3PZd;=^bDuM+{*AE!mHJC?&JbWOC$l&0{qe#=*>)Xn5J)Yzh4^(>>{cB?xXQjPd6`HC zW@W`d-9ZES8AKA*ykcgRB79uk86)dUl#8>(!IFTBViV`{aA?32+Sa>4Aq9Nx|3*T% zPZ%J($~-TE;fH-Cu8Jj(*}~;^G5m4w;Il&-@w0Fa@Xu#EgN6=YkG-(<7)rM8fM!#Tn1sJSVK?@ zdyU8%)qX3Mv;M1yPF{Yeur!woofpSBO*AYjAEs#Ounf1+W+jb9;S4<$R=F;Mdjjf_ zRce&&dkF(ndc>q(KxDOa1b^r(=h8eUr^L7}b{FjP($m}L{mo3Mb3z#{AJyUf9fXcT zB^-Dj8fk{MtAZxpVNg&iO~$i5P>RZzzOt1lh7e51#L2TS&T}z}Zy`s#VgSRK)AL6# zqQoI7OQPokKzKTQqIuCU;TRLXlst~M7yEQ42vfBDD>VBYVB`2DV zp5BOcQxa(qk_0Ad24e`{Xk3h+pb!3g<0+Dge7gB0VOazHl+<3Kw-n_z`MA^c*_Ywn zF=@p7d0ZM*i;fu=c3vop6>e}Ne9mnnUX83%yXi0vgVwmMHo-&6EikNo;ejnoN$k1F ztxROaAMNpB&qYS>dUGgoaAyY%Af|a}Ka`?)p;t0h6GUy1;@T@VK;RvvDg}!wR*}Ry zLt{l9JCSWVaRd>XM}qZ;6Cl>WB(>l~pUY-}W zPKn#lq{skadA{-7AyjYo((I#@Mg7L*?r_dZ;BM7Dg);o*S$O z&^BU-fykp^1hgmBS$GbNkzXwsRkX13OCw-{ z(d!o)HE@24=v=oJl0J_LP`xypJ)Rsq{d;c1{*L}W4>Iw>?W^_}j!L~s1YNMoilQ_o zwSyoSZcC|8bYzA%7bR{o{(nG7V#K^aLnv~b|7L+riI?yQfZ0gS%Iz(yo1l9K?o6&( zU1yjoLoB&)=N?IE3H9aQc=w5-u)SWxY^Nz#iINg=V66${rt7!;mMsnK=D-0hg0&e+ z*h|O&f<;GX2o#cAP>)JeM#0Mlx)53{p!kyrqh(=Xnv&d1;+HbwB!!-9N^>8~@wKbt zTcD7(XB>X1bL=~np4!pn=FiP51Ya5O^zZNg_qhqZ?yvo5zP`EP`fGni%u+L2vBw`vPf4v8 zG7?quf+%cQN1?isj%dQ3xRqE_EF{9hk~jF#FoF7ahOLPdJ>lGdT0dk-7%H^`zTL{GYNJ|Szbbr<+wPCq3cs|@?3>@M zOSf%VqcDA&bQ*>jz2qX`EEsrCzbO={ji)K>3mPnPlTF%FTc3IooQ#=yhha@>Wh$z{IPV|B`v5z)6GmOXKE^By z09`kc7dd*073}JAsge=%8EBFaJnZ+pr3#JJzrxPME{bUPqUH@G@!vfM9XPP_ipX_s-7?#BjU}GqbTy|XsycBl_ z3${H#dD4O{Xbwp2yQXy^S{t1f#6IDJU*+jN8+Uo0y~`zi9%fBeI)CMCJMOsE-?fF5WGpmq_Gfmmyl!hg-*NrfJ^l)HHC%X5W zl+K^6_gdQ;#JQQkm?w|prJK`>CZ%FLJy&>}yGE{;SdOBLU{U9XGKa{xw$=g}QAk4H z(r5lP>Hel3c92n@biV<xm!VkmsS6a8S9&1H8&E6yk)rgg zn2ye}@5Tm^>4tQBmcV(TFaaKiLPMos=eY|VV$c_YWoshPGwld|2X=eWSSz!rIVI`3 zE=4uk9|s|le2DuSa4I|MP$k1VIqk5i#Eqp|%vy zw#CepAkEDMuwr>pt4xC{R4}84V@pit_-08dpEU!ipt#68W6uiR==lsO-w2*jeLQ2) zxN!@^Uy-r>TJtD>%M`$5dFC+7xh=rc9RfNVsp_raRTuKKlxu8)*b@6};zf<*&#dXw z5q-n^B3B&K(+Co~pAsa65lG=l-0cOJej5`RbA~>ow;b|b{Vm=Nzc1PE)(APs#2?)a z;_cqPP*m8mb{VWvtya^#RIe5J_96a+UGr=)sK6!0P3}@f9UD=JGwJHG+(lW$%RS49 zXp%QbA??VV0%!E)De*)+44MN-6}=(AdpuhIc=P#g3D!~MLo_|HHsXv?7g56#ro4>B_tOugR&v7Q#T^EMllymCXI<`a}s7 z@KMW45QbuD?&Qt7!%0jN-_~PX^CI_t1y8v@0Zc3}r0Fv0GoCIaobkzw`pXkkdGc78 zcU$u-Ot{22II!E?LF5ziDt@enG+)~?S?>$Rbw#@PJ{OJ9HYq!_YBHHrFIcbPQNW=z zx`mmJt~y>C4VR|$>D;T-kKH-%@$m)171X$1zSuAR!4k>J)#~xI5kF>Sr~RS@A@PsL1`YpBB0Up?=m+@z3%n^%@ z`y?gWiR{FB6Sg&{xd1zSZcJ=X?ebhDKj>FdnyBG4t%=ev;sY84K3DIYATr4u^ZN+i zWYMrHuJJj@>tF-49mGXG@OaX$|I#--*P^2zjD+gx@Lb%^W>f}83+(c=7jcG~iBRl7 zSUz>`gc4@uK1P%csiIpW95Wz~;kpEdK}y=@N&$OcYSsbEFZ}OD<<2Jv^#<;Wc*VL7 zj}^cwO+`s-o*adxSdlf$_U%+-mPU0L;4L;Ka_U^q%k2eWCHBLlC_NsjhPkJT=BO{X zga~}G1({KIXr{pV-0@P<67C(3rJ<>*Y=$jhDD-$zgIdON0{09Jz>H2{0H@!KKlIEx zl(3}Hn0&4$humrDnt$@fdde82b6{(XiGi5zlQ5PFWbXbW(Y&C~eDtaFnhfgYAOaQMv)YS?7S=H|e7LdKHB}9qA^EpOp<@t_%#GUYe}- zJ@1(7gy4`e=fam^VScKFN%1E2SMqugTM`OIFEh-)<%>zZh2B78%6k`$yd?2ro&!%v zP3}4|{7_g{F(fD*xtoWv6yU{BYbKuF`#PjmqXjBZs>xu_)XBs zWzs+&x?OYt8F{KLnp?vtRDiL+4mTY(SV>-%!!=6=aj0<=U!u>S(0YyLujfAhxasncG4$W1T<95)BONL&( zTLLy%DuJ0|LLH@s^JZB-a?5j=D)9UQ?QPpUPkdg{xMif(!^rg_wqRyr__Gu0AiuBR@#Q^ z7xZ_Bm9M?6htr#@+f6EX0ZMz!b;10uMU!SX!rmnc&06>FRb#*2T{<9j!T?YuXMmViII(J|F&fm^ET#p@j*4ktOO59%KTCB3@bPJ#2@m$= z5C5Yk?wePMgW6r6%?d2+ix8oktBvz?F8B??{jz&JbS!KIl z(-#+LB39X_DWh!SN-XoUg&M(&gsgR;^neb}zcL8mPq<`vD}`7>W5Fo6Az=EW%#V)E zyZd8~ReQE!0x&164%{fG3nQjMB7{W5pnv@CQ(4!J;*qW3_VJVX&-iq^EE9|Gr$R4s zhz(m}DY~%3G?y0ve_}YVZyxv~$JZ|OJ%6-!HFjMcl!);C-4SnDeAuLPv%#y~+jZ>N zqfnO_=0-uh>SK@dr7^$^o?#CV-4<;dBPDL@Jk7V&Cy;RCD_Lb!e712!Z$Lj7SpgF$ z0_|BR2u7^@!1CpFayI@84ba*;l!S{WATMeTCQB~}U%zEr|Dr^%Dak$QK7(AE9wj2^ z+O*?vnQG7;anSWYp5eKWT>zVW=KaRdQ)YCIFH@kz5p=E=tv0vUIZuXkFx8G_FUJ(e zqXadcJmaxFCbPzL!M$kFZUF3XE2~GXxQ%xEzp~&LzHTWReGgoM-95FN+(CPXB|k5P zIqS>RV7G3aY7AYo?a}Kvq-y_MPy+`#9r7UDbZ#G++YAFe@pCHy)nkW=;(t2iO5jx5 zqJ5e(uT6FQJ|clrdDz78|FTihi3vQsAidhzhN+%CVDdPmO1Y^Yy@mvt<-?bKO~|11 zJ}PtzU3rXg=wI{$FP&k5^!UdaJq2fJM+bi`QQ9|J&v=rUR>SxYY96fJ-XW; zp7!ims$*<8edN2pnKpM~HflYpMseERNu-_0Oj_-qT8oMd=$|c_9uME~195ir)UxEb zbuCW*n!Av(p)qsJEgpo@S!`$r-L_q!s&e49#darV6r9?zZ=5}dR#|s5-W)X-=Mm}c zm(NZ)*G$5pp<1#T}Yqwz!@?yz4_mxw>T`eT|jcwIZ? zU|a#$!bosw*c)9>ne?xjA^7~JH$_8kaFuEBgT>bYAN$d3_8Sf2{I9pw41CRZDeS;i zrME6{y;pYFIrt$ZZZfH4WP1%&mtpw>=Y%opxJxJ9Dhl39+MMYXsDvE=G{dztBsIIS z_VL85zls-r--3Hr^U849@%$Lhwnb7*b17pR>`6Ak5~dF)eS|X0uq(~u39gYT7u3H~ z0yjDwFQBo=sBKIN4QnW$+JW;eN506Z=85=MNNv0ZNADucmway4c;YS|U;cQxFmR7fnR`s2e;j>WxdX^LTv^%Mv!FHx4KDE9p|~u*3_P)1WAa0=ISnB<@!JE14H8vmt- zFLoNvD|bGs(2UyMcA>lELJwN5U(VEv6YueqpR^K<%ak7Um4c!W4cc2+%P?vE=WVef zpH2emB?He$tk(w=prGlH)4Uho&Eo-#HCX5Ey!?hx~Yt^M$X=JA30UQ09VqBQx*}X9#rT&wu!Z|Dlaq-KrpjGMW}u& zyB)D^3&(qei(E+>L;a`2fZId=oz!eXr{ie~UT^cIE1{aXg>kU<ZmN4bpSjFesTX$^Z#cxlyCx9}W{w@J!tQxfKI}J;C%y z-R+E*T^>GMPi4@Yu`*wCb+2)CXOrfpn*UCKJT*v7+QB2K(bb8M9t2YVg0v~ z!85{6ynP;(OfqIXnU9MQh4e8lUg0{9lKyw{>h!9CVf6r1G^1W%hF~(-hIY{zp`(ki zN4WN8p;mAa&2h6AeDO&d`MEuH_Aj7uHq;(A2}zdmxBPsykpn#KJ`yUya?jINlcj@1 z7VL*u-V7(vUq5JDlr!WoPNPGTzLJ!F!^ktBuXJKP@vk;~@?*bj&tLI*mD`&Wb0*b< zz>Y$DqbWP``h@bw@W?%Lo6U$?=-cNVvm#Kff^J`z!Q$izEDF-*MO4GC9AT2D`B5hS zKb6tJ5C>fQm}C03FT)9@^qWB-f$g)%AoXdH(21C0;D^De6qPHa9lW7@mc9vhP@(BL z!;`5k?w{^T4wwWI>o=m@*{?;_56i1;-DqE zTS-3{{Um+Y-F zZ4KBh$-UuE5__?9(Jt4>H8NVJq^`#30Pv)bM znKr!&JBH|bQXPv<6 zYRT!gFF2C){qx$^qY)Jq97L8|Gx@ zw2|G{eKM>#>6THWVmt9?L0o&1^7`%Em-uO>B7-vnqSp8OaSFZ_#^ASE_~y=*2Y!4z z!mNk_pY5&R2gSy=4}J+Hmi9eU|E5pny2G-|Pl5Lqcc%=;QzOELZYYGhZKz=dL{f6g zzsKsrSrb)f5?eMB3${Y4K+9mAZ2Q97(Y3D$ZdZtYv@Of%96 zRDbo)PV?PH@G)=V%t z?diJ{(4X?nkPjPe0?q7SswVE?o9n;wAH4VmWc(S7=vx?g_4z!nOr*FDEOxTl1o8q% zmW4F}e?86r*fM-RA+G%}YM*lV?>h+fzP|=c#BtF4cixjEzW8@C7MjC{S-zpiXVOpA;r!MWOk+hU}|dq z&sAVyz##Z?QvWNfzwl3Ws<)r~&ws$@{r(U|9P(u;45T@B|14gnZK;2*SEg2_+E;E;x#5;-q$6itkU;G% zw}VEpl1zZfPFZ|%$v%pg;5On^0|>rNt2HWDr&7H;mk88&i7xuRO}dvQA}{`HuF*M$ zPS!_8@XYV0rLXwcq-}l9b&n=xx)u#j`n%MCy;0i~xS!cA)g55w1w0oLhM8!-@oT;1 z$~mf)wOomP<*Jd&YE|hiZsPYBXR%_bYUQ`gzlBQng|mG6th;d{$AMW!$NjtK7{j3e z2*Nf0pVmG5*8Q9RG}Hu?nc#Cwb#ad1b_5pGImdTlv+lK>x|0S+hmBVQS3||#-3s@G z|EQfhyMF-m$syF`}1&1e56<-I5T`9YDK``M=j zjz7o(iTDgp0*ZHQdbfWB4;R8JWe8saQhznzbd*g%9P2*6;*T#UyqAAoF;d7TxU}>f z1iTA#BHVAw8#1w`O+Dn;Wc74QeTSOb&`Vbv-5x@t^xY@$^mF)Iy8^}gee--b{zdG^ ze!N=*PxUwvk(}kXxwA{G*;mL2ROns&qJi+DecpF()~<95^p+L0(>EHI_$FW0RIE#m zT%vq!Y>W!|_--i;K)hyGvBJLd)vhtVGU?s2W4q6zQcK2%T3lDG@vm8HyGoU4T`i>i g{>;DY`26w|+>3ub`hTw0qaRS=wvtzn0uYe@0j`gY`~Uy| literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/assets/v4_v0.25.2_movies_with_settings.dump b/meilisearch-http/tests/assets/v4_v0.25.2_movies_with_settings.dump new file mode 100644 index 0000000000000000000000000000000000000000..4374e131c1767499dd3c570209c579bb5894dd88 GIT binary patch literal 11545 zcmV+!E#}f6iwFP!000001MGdta^pz0Wv%-w@U-ZtdPXV<&Ws&3&r*n*B3?ZUa{^8R zA^?XlkfPGktp4HkruV(*{*L;S`jWoqx`PB6RAnk6GpnmBxQKxS9q>DzJ30NxmCrK$ z;Sayzt46O*tA)SqPODb_yLkPm#>WwTI1GwJ92++UvBOYO7rzHad0Ju5~(JD-?csuQ#Fp zwcm2%#p_?3|37Z|?$-NyWh?WuKyJO?>L*(h`x2Yq8EK z5g~mtQVA{TBz`C@Pg<_;T9GQgx@{dt)}!!)3O)nBvecpnauX>+OAJ+>S%DN&KN?$f zYx-6mWyzdx!&l=2VMTIg1^!5GTSwB8sUu>E`>W836QAVS8qzwMM;E7-tD<+AHC7gF zA`V0pNq2jDo5(VjQ6kf=_y4iAM^7&3on1UM{#LY+`PP3{wqliLG9kiwSr(`7fB3-< z#aM1KvxS~o;uP-aI4fu~}6XXGcu zn#dn}nMGU2hmUGVdj!$&KTl;cpbKfGxs^wGniHGHxlD=nB9Vsd#04UcLznxZ*SaeC+tf6|wzeABZUd1{{L^P>b z=l(=itP|x$EYsI=ZjEJTg^KtT0Qxbs&O3MHPAm;($-gt5`EXl~soxZ-QrXlYDFiDsgFTO?3y_9A)&y zIbEAYGEH%zUdamc#Kl!~$23rL5+6@Sju#5L32r=s#15qck%1`W(aaA5EI?Ajk0-2D zOo>C*SU{{smP2Cm-)Y}DnHiDlZ9^DCnK(qrG^Gb2jSnU)?}^IC9^WcYhD4>uJQ@?Z zdEa!uOppMlc}kK*%eh9Km-5qhGG9gX){U3YgOT7A%KjEnNyEfj_hGF+%jwd=WIhA=6TY)6QkYYI9Fk1yh?p=?0WAx|ohcGxcnT67&8C3v#QL46 zAz6PnSJrjCx>x7LcB_r6OFR?Z&&ef!VJL}*{wNP%!ZitSDP564H)09Xgm>VG*sf@q zH_yMyZg;A8L+>tFKX81KSxL@Lzw;K(w+}|USwFw)-*+EQ9zS*Dy*%@W*Jt-PvyGgu zHyVw0yIJCV$t{z&bwSDt8LCz0;~mmk(iXD;*xfm~K%#q~X3>b8bj4!*lFu=zzH~`T zz}QH8Kj^|Cxzfzq5ea#JawAfXNMeYoY?GklgaF7!tI-Hl6i>NG{5*t3r+3Lh({)&8 zrV_Z)Z~+3&MH2YhY6-d<5fe9uqa_I=_JehJKxXYEQ5>A)#FMD>7 z%9CuBH9txJp>6D;T_R_oVg^bC+ap>zXNlJ0g73P5oFrXKdMq-+ZL|`RTdV3O!21@u z&W7_|!)|?exjS|5dpj!X%h9;|czAQOnXbJ?r_pM(OLQ#(++9EOUr0_*MI_fP;4F;^ z%_pP?3|^=t~BN#ae+C6MeOv5crN)6 zxJyz7LO}Qq5hf3^3e%F zN(!9O#Y1er%DlJyk&eK1bR8-VL#n`l96FomDxlz;Fjtg^L!ytCCK5$&o_Qf{o-kIH zvQUCzd6F3MO8;Is{1&^yoZO)R-)By)$OvC=q$zR zW}`6m-M{|tKrZ-R*gR_>65(+KL6(n9B`9L#>s|&rxshh{HX|X5UHR)?o({3p32~k< z54frqA~`V%i|`A)l?g}=yp#F_AOyImIGP&!Nw83F$Y3hw6!dcPgJh76e@r*GGNLCf zr*}XGNJ}@}%3)m2-d4xM5M8ypL7|s~=~D^VPuCFpcVz&%4vEDC$H#MR5_L3~u9=J@ z0v;+^4TB0Z@+N2SN$wooMY~fms1Ik8VfXRiL+xtzY4-5684lU?PPg4E^SQgq=L3I4 zPHaDr*w|XuBeLrCRtNSY6VE=$yANV93igQ#5j2wH*0B#w!PYa8P9W#0&Ljx~qM1=9 zTNR6h?K3W)$hkJVY!FD>^F%ui`p49t!tRsK|0BD6n%f zTQ1pKLh(=%Hi`!Ot}Y$Z^Lk|X!8)CW%2JaaWXs2{9lVC`C3itrz=-PzxX@h+DL0$B zZ()pgd)iCnY&1GOe99i?(NSw~-O>CiG~&pqa3J&u@YS1%`Zd@+Js3=)R+4;pdN`R}Ux<2ge*g3m?&se1D^XUn z-CIeb*6mfZvQIH8G6{n`3|R?FwA>;5Cas->;%cN-zqr~mfr}Abon_UrCjEl|c#y;P zh=4;^pg@ofqDZXKmi~sn7lhWL+F~t23LWpJoDj5*<`Hy+GlGg-cV_kTG2DHT8Ph(k z&^p9kI<(s*3`=~AeDSxx{7*0~f~)xB%=3|R913Rw&9Zy#7BQQH{wf8|xEFu~o`F{R z6Jt{HUXEAnmXOYfA`$x1RQp=OHHe^PFy{n?G^9$IRBIw&HY?ufRa*D0Ts(c~TztBj zslD^l%Vs}w9pN3x=T~&ewtJms36$1LS(Yn5VZk>W+7*dEB>WS-XRhq#1Rsmfb^_4p ziU_b)Ejhva1M(CkBo)qjNJ$t$gA2Qxc~ysvKiNCJTg5^)g`P4k>{!CyLw`ytrZI0! znuAz+Y+WHYca4Oqfs~w@!xKabVcf#`lPperHv%@GnA$v2Ie_$J5bJM~kE3t0F--+W zCdnII)|;Y;u8Jm|v%B7TIBUP0zBIj?cKq@(4X43tS&>es)-46w^-{2X3X`@&f4sNu z6w$+8Gkv5UX)2JG=8}S^GQb}gq9o5emPQ%Iftt&t!t3bFkhYPb)QDJf+)oy3=@8oS z6Nez#-~RGn8sH&;N!ByfMG?9a)*z~lC5+k{ISXI^cDOw4+=z$nwIjBbhSrVw*#c~fQS3!F;q0S1~Y|h zvCF0eVNaqRqY3N|bBed_MN-rZ;ylYt+@YnNIFz_Ka+X7dik}GM%3@KC*fna)7^icJ z%7iFa#2jP;`5vTV8h`1EfF$CvUmv0BQeh6^eNJ^VPEIp1*T!xEesw&(y}bQ0pj_mr ziBG9N^aB7E6RAUcbi%}Ph;EsQB0N9T(j-!yZQxP~?n`RzN}>iq511RB?;@S_6OJ<` z5Nbby7fxTpA4JkONli1^eG#ALtq+swv)6hSckxl;KMc-p#+yrxw%s7Xsh90!Daz_u z14VdXRcg*fmQ|Lpne3yF;DQhYMdk`&j&Ks9o(|syzwHM^l#1m$a>!{b3iA;+Xa3R% z4@mMnGI{}*d^kzsF#6*p2`rWvjf8I?%^8JY>_g`|>_;k++$>pf*Gd#I+fm3nWIDjY zQ|Ne_LMS47)jCrVYQHJ@c>g&gF7_Sa%5ZMhgb2|$v6?XDQ@R$ZGV8{aGb*}XBH#{v z#VMo|2_jU<8D>?ql*Z&ua^!JBLeG#3`Ha6}SQsSexRy?uHuW>EiE#Y}nlMrU$A?_z zmPxF9tr8K`5*Dah>@iiZhUw?n^jAWzo_*PQsZCYq+`qVYThDv-9pz7F!w>1^w4Pn- zHrp$aT)h;@^ZsFkEJoMU5%634nMpCDuu#r6;y(R0a&B$vQAeMsT|{MWH0 z;}1LCkFD$7^U1XSGJ82V8s45fG$ws-+TY0iMy=7RmkXMWa_l0;I>mNP(!R?3Q}yUu z=PCh$RP0>@*JF?=H^J9P2Vd~(kaq0Rcq)&84i^Y@-mVBh$)d912A_ytIg0{INE)M5 z2-}RebXA&I5ah`^-O5#=Z;1lOgZyYu-pjCWlg&$Jqx0)sFB*jBcNeep81&kmb_oYH zO4Xs;2=z3iGS(UlP*EpcInN{V)xJ(zXXIxH5W+}Q7cgkTDF?18pj*X(m(w|pgazdF9M2D2ilDh?M>vj*GaVWQu(`1I*}Uqp1zRd5 z+_lLF5KUMUhV+EQv=>-h#Plg`InO9gI#FC$0-WZLp>HKtlb`oCqv-!jR{>RhH>6LeS^lalUh1bor#RIw{Z+pH$b&~ zJo3>PfF7I(WjvE}9Q$K}_egr9-eB4V_OJSjRL$a<;0r03AZv@3lYH!>u#vv5EQ?az zSJYC|#;|+y>3$YYK05VNb<%p7bUIHv?W>oMufUgXtFe+KZj@ZkJ~&HubC5@9P*}ut zgIHxOyyql^Ymiuxwca7<%hhCx`m$l7CQ>_+breWQMkX_O9~M~xoViAJgaT6CE2V2t zI>;ciwNlq})uOPlO$?sF_f!n{nV~UA7J!nzYLJr7w4k?cCgv3llTfOXrrC-c=itkL zn6Oe}YTy9Dl4}tGr@s`d1;Ti~1vxxYm{q2(TfJ{9zCIu1$9FrnxEj`b)2G^D?V-2d ziaTeyx7iNbwN9_yDo60GR=N7B=qV8zkg3?12Qzs}kys9K1BE^7oE*EOtXb}GNNcK?DxVOaF(8@xI)W*g|6Y zxiS(-QFOoMkAdAoNznK$omRDF2bPDYzce$`g1Uh9?V zUV^wcGZ|2@f3Bj5oPSw`!k*{km#sR*8x@54Q`(2A(e5I9K=;ZWGL2B7W-_#6xmpQC`&+rldWIqFC1gX=1%< zpF?3yXS-fV2R=jJL}s{4+m6ZI`N*T7hLJ}QJFgfRFtHD@8^ZW=TJeo_@X_w4=w#5p zRyV^lIZApr-t2y7FgfX{%>}D&y=Je}tJcG;V?OSrE~m_1l?UpI_+biSE%We+86Khw3XUQ1WnxbV!r$CRq) zmR<_TEK$PM>Yc`cpDiOQ?w}OtB;xK-H1Lz|pihK5KQs;`5@%@ck7I!C6F($L{qxpN z^;!&ve)i`rYY@VOpO7x0B8HGE%_w3X)0;5v+@%?E!xB(Ar%nrNg#P!0R!Ic;Z%&O4G-dZ~MS^rFNove9UFRxo3` z)N0lrrV0_Ub%kWcm(8=OhAqAvk&F{CUWU>>EDmvt+d(`YJJ59oVW5j{h2_erE=kbV z`RgD$LxjZmZSp zbW0&yyHrfL5{ZE6{WARvBgT0o4}jcQFE&gRpe8v}R`<3QlVw8{GPj7PHx#h<$*Znz z{tF=aHRAbn|7r43bnjee@6)84zg+LsuSQ;c-a9^f1yI(yjYh9*!%HE}P9k3XwIcGF znpFcd?*b@FkyEIeSp(8#`hYaPm|kP=s7U=(w5Ue8VqVQumHyJbhD8e@GJhm*+yOuz zE=)sVW4H^JoN~s+IFfR;?1Wm*Sx}5aS7Rb?Ds^!i3j5zA{g#||(ok$M+^{m$-)AcR+VYhbW z53gQ!Pn)AVfAXmI?5n3EH62bT*+wC2b~>F}Ihoil1$+056NpM@{(ARxpHz(aMUvL#G55m8F=;TM~ZV0#ZQ_5Rc~aB%v>@y*xrI z1g2Z#M8-OIOD1%vo_SnnJoX{3*~=IeOV(t`Ir^d{T?ZlhLNuF5-IAx3evDQ-rBQ^^ zz6j#aH!Vu}j`GZKS)qsDI%e?bjP{>S-TMRkMb4-DyH`(fBb?lyIa&QRbKI!4%Pl3H zQVSTmK;|T2#F4ce(g5{jj98}Hzam^TQCbm!^tw$Xk14KvM?OE{xlP-yA~3BYvMDc4 zXfF@BUE5@ybis<}KNCH&K?@gYmLUNWw6O;Y!<|RSu^`7sqKif^f}f_D5_4Ixmzs1j zCKgOf3UT8pH;mNToOdXZqFoCClaJO*Tr>*ESXGMQX+24x^qfhiUTa5KSUjy%Zz`Hh zr+a(u^Tpg2A72hzmsfZDcQ@&1_sQLyB!m6N*VU;9qePdQe`Q|Wut3jEkJcG2kslM2FJV0Dy;>Crj1o^l>9GqFdS zla7ZJoRGkd1qZvLJdZCN-Pv#A7d?J&@B(D=Y`ed`&|BRclj=z_&LDF$Bj71-aGR_+ zSK;)NQT(5Jg=DYpgWDls2nc06fBQ zaQ$zdV{+=_G#q@W?>V9wU`18(n?`)KZA5$++8Xr%+__5VKk6&q%v+Z8bF4xyPt#Y3N z32Z+jd7Y&Hw#u+~_uNv>eL*~M{H_RzXZn-_^zm>FniDcCy1X1l#dJ20(=ZemqJD>= zcr?OG(3Q1`^}`2(M(GMEteF%sS73FOJIN*5ha%ma>g6Fk1eqcfn=mj=e#QlUa!j{V zh8rMzNmLCBtS$(~M%-pG$~F7PoH{M0T6MP24ytEM0CUAy)g zI9}F5zNwi$2%49z>7YM~KJ5+LC&TEf);_$-UOJnbk-Du~qq*YyOPPWlZGQ=p0;jI> z`7&BmifH$D^u&>cdv(jYvN8krPn?O-vRj;_(3zVAzL1xnn*42$81n%{AF`&a-8U z7C8#9Oqk9I@>XSO|s{GjoeI-?}Dx1BN6 zzG44mMg%#+=T@_#A#A3{{&f1ff~F_kDxlRh1~OgNen*1SLZ%mh=NHZKH`=IIEChb! z+l9%J>%<|zdKGTibSlsjq>$Y}XV`hLTSTTQ6CPXm0dtX)4+hX^{GbzS6{{c*eGg`m zAPP5HuZ^;e{87e6(~r2DMU2%lD14V{_g}$q?aW(i(OTB6zN%n7&o9zi&zr_acel2) zbFV9hr<~mhVAZd4!0U}gt29*ODrZm09n*uW7(LBriu?_E z)Ih5QY5)~$$ZI9MmUZnr69QDu!~j{)-;|&L>A1@)EV`u`Mxnn$@U%fD%|&s*6aJ}b zI-C{wInUZZQ5XWTlaB*~FX)Vj^rii&!*(4)b~ix+bM zW^i*+X)@((ot1-|Es)SmBse%nhZC}z<75R|ZPz2FY&L8gPa`ow&D(0Q&*M*-s_(vZ zj?a$zL3sU;wQ9%pVY2f|a9pc*>PS>9NYt%ua*z}`uF~oPZbwH==p;%C*QBSA7R}%L zd4j>_vCnZ+hG?kN#L}6=Y;ndEoDaJu?lyo(FZ`EwY1-!%4wchPrfrrkWM^9` z{GzVNFIw~2O<_%UWwz55OJ`NsFufa7_uw=DA37$DDroOnY7;MNXGC7@);&Llx18xa zFP9T~4xJ|y(n5(cKD%k8$AHhKRBn&(Z}n0IF4`ImzS?KYqcAlvch#ssg?`DqwIhEUyIgm*sw)C_x_76 zJag`pH4fBJ1iAxU_caO53&K#jBuJA8ZdOxs!=i9GNaK`8^|&onlO6RH&Sh!?NN$k> z7xturlp3)ta^&VbsUi+vXgjSe^{^@VvvJP$nIz+E>5Cmg`R0VQ#o0{@VpPm9or(jw zMi?25fxh`f9ZSZ(vzSmZ2P0+53?UCyw4A#wrjYqjc~gJq=%bZGZ!|)GQ>W%}?DTJ* z&y&-m>+9Q-{^4=!_W1Uu^KgE+Ic?qStXkx5y_^^op--y*MPrh#H|Sd0%GqRT2}*%Z zdV1u5KJr~*nt#Qzlb(-I(H#Nw1>Nj{tOFm?kz&dPX>SaU$V}HS$n6qCm35O3hlGP} zBl32ha@F(lGA=6=bv`SBmSs4xL7$SQa~&}KGL_Zce8`9QjHFKBkhKKdo0x=766y_M ze`!(w<6EPCvKSfEV{U{I_2~tOI6ZxE&U!!Y>fG5^dR_nIY7nH&OzhNp`?sgr*<<_Q z@n8@q!^UfkXpLU0T>tBqa(0(weyw}Bj4!KyuFt6=*wa9cGDbHj$4jw1ZG&W8WTsz* z16Uje2%&&Q)s4?=2Yo&r3auZh8IZ+U3|J1plfTJQu;|DNUpw-#l%g=FBfP2c;m~m} z9@ANHI&)91=Y#CJ^Rj<_*FU{~t>LwY<5J7iD`h6G1sT4@kRotnh62pm7yz;iXxcfB zTtb(^bTDFck)uPh5^gos1=ToF@FI+>l|Da2WAdU^N>~m(-!Nf7g!x|~&72o6i1Uar zU{KgPJ(*e{Q8Ox|^OmSBp@D&(chS&ES4RR-f>(0ZT%gS3mFznzb0iiQ;>9P+H^%Jr z>qhPEc?Cxv_U>NPb^q>g=1&i9o3+Nh)94>v&(qDwvW<4jGHhzE_VSx^DzM)xoJs1C z9IkR{hr94egU@|(H-UAea!Fz~l+M#Toi0sqb&6bVME?pZRzFjrk8k}^Ooa8?ZT>_G z;N+>Ed%)s?R)<;Hp;&^VD&({I2&3hZCFpLcnhf^hmFzAO$O;ByBYkrWVx4`JdVzO+(2UxnxP0!fjtAPlw~CFFCPcnpv3 zsoUAe&f};a_PCL5VJd7T?~teRhp9ehYuSjc2k!LTWvT&KsJna%!J~`F7p5a~&=P-j zIfFvxpNnC94^tNf38HavER44}>W3l8TCxKH03Vwn0A4K<4elOge{wO?fUK<(hwd&;>XSb9n zS+zU`X1c-jHSc3jsc<9MOA;L_5R-Y0%9;^FY9XBz=nTioNgkY^^1!@Y^v>!oTt1MC zU{a)y0TxKiYD1_CUZx_K$9gJEI+)f&_Jv2fO|t_})FDfPL~W78LR-G0!eMH98i?M$ zE=8bm>Q}W*v%D5|j{HyaPf0VrjJwBibZ~SrJ)GQaK2x;WYT9<${o1usOm{}GrD6>- zOvi!<@VkRhpHj4=PbT;hePK8nhcK2Vw9V#Bo`Lq&dTu5-M%6+_C zRnqAVcroL|%&}akZ7z$iqX0v_$aA`=vTjaWzrFvwI~mC2B=M4y`_r?DeMX^cdgw&S z?bZI~6m_@9)uUzGewkg@5=BZyws=pik6`x^2y&=`wl!FE92@GG!(4ED4^QSJxyL?s z5xU?ALiGQI=>Awtx-eHlE~RCh?MWmz!EIYi1fnBbx2YK(C<3l!H*>X0eudT`bvRFO zT7{4EE+RQ|cx(;tU2@^5qWg;#>UQKmbWv9F1v>#cAILm2D&_+wL92j2yz(P0f*07? zVLrF^UDxMhL-U9e51V`6z6$pCA8)dgi#+@^zukLjH14wV;J`lZo?NAy`Pc#MEkgnu z2jQ10f#f8uf&5}<^VpPJ2pdhv8BHh(Aex}ordz1{iT|P^J{+&eA`g+4^W-Y_MHeW~ zFB6{-?F#zzncJvc9D$^8&M)eQ#5UGKbgjcQ6X`VAAOi|3r#rQgWls6vLv~_lhcyqP zyBn=585L#648w2pDvx_2kHKl73TCrWTSU@>A%N*GT^_nNM46&uapzmnoSi!G_UwAm zsf;FtgeR;CBAg2ced0Xj`g4)a+snRl+itoySL!M1PU<($aoW79XUDzP$K6*tl+l6O z(OszjumkF_yT{-E{fAnuRqw3+{zu)m+nw*f|L}Xfe%vx0@}x9B zZV|diX;`}bEF**ij0;`T25X^RC1bVFrZrt4HPdH9!GLUDY_SGiJMwV=!k5>Y8)7w; zWM6zv?{iyQG``LQ{f!xNK(u-`kCDqKu>MTOf{)=9#aHRy@|hq#4kF{VGZo+S_h7cwEq{4?1iGAm^1fBHwW^%uWb|3Bxs z@BVhb0rmFZ|Dj;I^ZoaKevem~|D?I%ZNKdE%U(Zc|LtaLjsLAy=ezy?U0y$KwEl+{@b<2 z8vet-+TZp6_jnQJlPLvMF-J>ikREXzX`oN`>|Y&QrxaoR@fPK_OrL_ypDYf|-PJaz1{A1y6yHSdWJl9f7zvo$~SMm@8Tq} z^}TbQA5+~rhh3cOPBHN=rlVB!Z`PjT#?M@TfiqEYJ`5;&|AI<(y;kWoDxDV5r&npU zDqT%ByVhML)4%_E6l!)Fzm!6J>37Th|NRSYeu4XbqkH@u{-bbz?e||BB!A!S|L^f4 z6PSnb9jc0K0`Hn+Al%{djoNm-wOub@;oflIzv#Xm;*s4&*JEK&Hh+fY_c%SaZfff9 z?fQ1D-fh}2HUDTr{ENOc>;515{b!B;I%F=s$N#^>tKk1{75n4$bNIh)x7YYj=%e=C z{{J2?Q~keXYLTs5hF;Ci%6>k6Twb z0+N3k_0@};cC)1~9^j-F7wi7GrGH-xEpiAs@Zf$ZNnXQv(?X5jcGs@=>UCP^r~fQ! zj(NJ^PJEZx5yl|U+2VTQt9Z-{k+)4*87ItYc}zm ze}`rE`&M(i-LsqBreWDX+8TU+eSdv_eSdv_{onTBfCK;l06@_H*X;;Y000000D!9i LpCdWv0H6Q>S--1~ literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/assets/v4_v0.25.2_rubygems_with_settings.dump b/meilisearch-http/tests/assets/v4_v0.25.2_rubygems_with_settings.dump new file mode 100644 index 0000000000000000000000000000000000000000..1dfc22e8e8c63a7caed02c0f7ba957e71333b4cb GIT binary patch literal 7511 zcmV-d9jM|TiwFP!000001MFQ(liNsgp0j^NZ6CbS21%jt1|2^1O4iK8XlC2$o*8>* zC%8qRNR|koICyl6Rw%-;;j3@<#lOJ5*x{4cXaAP<_-8n?3Lxq!qXz3yvk)7A3FyM*tP8e`J1jqDazk69h=%N$lo+=7vyie z&xP05nalspMIj3o&mwu3#*#((#q`4UZU16)k+OutHxT-oXXE^iWyv^?*6<`RlZ0hk zc=)oMZ^`A=r(_{B@=UcP*VqzTMLZ8Pu_=U1p=I4D%s4AVx+I}Y3no(N5XK+`p5=t4 z5y^RWD?*-=GRIcUF(-t4nnGh3xNbHgsf^&?g5_(Hu#_!1Dk#h@BAArArs)On?G}dD zbkK=82`=9L*`bAd1`YNLXL-(x*-GZcER2O7wX<@4U#;sBTYmzL6+1)<3t}OzebQsa zVAq;#-?pjSqnZv-O-QUnx~bbgX!P$mpqhNbU=p3_tc~ZwrO`rr-P(Z(|4Uc?JJ(ibvcga z<9wTjttDh^%}F$$;)AJfH;pBzPmg34MLAK;m8S)}-tbJ>FDhEOsVq1zWX8eriaQSL z(Idj`L>3J>*tTi*6wew`JjZbxJ{gUPa4EU8$TOin6*U#*1&J6;u zvvh0NuiGwdu3Y2-P#2er*3#Se+Y6+oWjn6t2Z3SiJss7y+z2iPy~h&#prK4LE7Nc_ z2A;KXz+aalT!Yuv&j~(<7`@@4SP1aJBGtDh+oZ2s`(W)*r>hzFs5vAFc7wD=@@V6wtKNpSIH0Ec@i<^A`@V5>`QU`<=ciPsc=@6RStQ?iR{q1BB1JeZf7)8${jh zAu2~@yL~id4#8#`G{8X)p%S9JieCG*s5=703J`$DYEEd_BN=cVC`AFvnpgS*I3JuV zD06}x$OcgP9fU@*EGqy3C~FdM*vp=VnHE&}8gwN>{riojLsCmNC?EeOl87Xe@o zLfv=lAia0!ypz07gP@x;4F@$?xY1yLEMp)+EYsPhoW~+xb$I@weon6H=LC@W0rmZ9 zuIvgT07@bPw;sz4*^NpVcs|T+P`5$TF=uIbeUo+*Kl7eN1cnD-YQKs(XPBw8d0Q4Q zI8vqZfGbci4oQ;EtueLDHSEFJAWma49|OwC$+Th^K2`(3O;CbQ`=R3Z9Ap6rWD{UXTuA>xIN30%kSK>8~ml=yV`LYnPC`2pWuAzF2oS%3_{BmjJ5BXQ{l4Ip7wen4(Fmd2=9nRSc|I_XwpGp&vlKS>S+=m5d;? zt#{%Mv5v1|%qoTBYWE#9wcs*Aswq1RPq9We$a+sy*d&YlmfO%2!}BhV4c{&W z#cH6ZjhGcu#_hO=1Re*4?+|LI@3Y285@05>3bFfc)^!D;3vSeXH92nA%rXs&zM4B@eBclUTg zUB9ZdU|klhC48h^dkUDVDwdTS9hg}hqu6w5EROeevdc8eq=*nJiNs7k4b&MO-X&Xn%!|6x92E^0lf{3@*=Xryq7h@zOIKa^P;48!+v0+SK`y5K1etVygp^j7y}j;H~m!eYU( zj#8knP}&5bB1Ke}5vMR+Ux^mA|>uQun zmPu`+o42@wp2Tz;;A75V%4h~d&(xjf`oB!&Vj)z~P2vBi`%T}Ut8U;izkp7N>h3XZ z&whuxxDzyLTA}NXfg35yMVTDh7rtf-4k%we!IB1sSIs+Q;)aKP=^WLiqTHee?PIAy zuVffj!x+%#wSCfMEjiqQz(5Y6In9sjd3P3s*og$)z}S{216pFVlA1bZpxsC`dZ%-EAY zN!p{i0qsJ?;hf$z8;b-jk)6chO5Gx=j)?}z=9sc8uz=E4&?s$HI0-tw_y9EcgNCZ{ zkKMPep@OkznGKg0ZQ9=M+12%-Eh3efAW5n)C|c1H%Q7fTMJgjW!hj-c?ITxgtJ+={ zUuZkh zS)mBabi4Kx+><>!%}6RX<2+oYGRrQyX@xrbvC|w@+g78?p<_wM_Nx>xu)O~E{LrqK zV>Ut+XvS>KmXIs9-FPO{F^_zO0I0eTioruxf)Q3nrgD<;?@F++9F*lik-Z)T{WoyF z@es`k+!?^2I!#O)=}}~C;etEV^29zg@Si#WT_lDqmmffcKzO5Y3ZkCY}4h}fyEbA0k zE>pr;z8x2GjOvqh&JljT?}mpE`_}wU-6sSYbpR0WJ{&+Vq=? zPFbb9Pp_2*@SlPdKOlS$H88d}m%X)Ym4BTI@0o(^KfpPg|8S5^^c7s6# z4+>#KUcg*aq!~VgQwJyxNg{k^^)T?6zlIzo1#jP9hVF{f7jpBp8dJ@hZ&UEW33Bf3 z2zdv542vCY7t^$ie|0)8tw=CKgIQ?TW019R>lRn-qiufXM9T@ zsolgv@0X}2yOT+3H<71NU99Ma=rX;p#a+sfl; z|QYTo~qexbHp9@euwq_51&g-~X^Y+xy`E zPjP_@ER)SwyK>rBSc{G3Up4o$v36TIp=a}J@pq8fQo{ooQQx!;qjo8WpW*P&7-qwL zXJZBv-?mI2oz0KUh%>#k=>CX)|3~9L&v8HG|EIWW`MTh4=!9T;9zz@pbj#Vp?P+fBP1YvN|OW!1+z} zR&u1{5QQK#HvAjIpW4RMq7y4HJP+g0=*`9c*C~399XhpwiQ$4io4WCJ8HPN5suNAI>7`2cVKy^5}DQ!FtoPF{IeICX&HfWjL2+vV%vuA2B#5Ots|D#c8tDv z_98PN4R((anPp8p>bl;EtgO}%ouoHJ=AOIAfFqW3l*mkD;<$mY0NhDLrga3eskX?R zvlp4`n#NHgGrWm!-rKd@yYaKECZY(?d>Ia4JPLC3q;ZAT1%{vWZtaZfm zj5f&k&c6CV9m}$}WAyRFfJq(0JWcgurz56M+af#r>IeRki*uBfQGa6ifG$ra zGNmJ6XC0B9ef7ieY{zJo$B4|bCT@T~ z-F+&NDIKw`pcBi^y!v54vg+90Q6e+V3I4BQ!HGbAr6ZtG?adeO%&Q+DGS@z4^949a z)j<(z{vao;{=dDm>22eP!Ek?zPYqBkhU9QQbn7oD(4xnnP+|o&>ezv#G|9g&xgSn; ziY7d??15fg1bMxX_I-DT^9+Y$k!ucFru_m#N3=n{^}74Xa~uH(+`m> zrDh@d%==mC5}badIcX{x z`Z7MS=JwR$738lTDctv1xP+%46>~>JU&cCp)d@;*Y8W?EVl_oF?5n_^$6BFNEV)cAW&|GAV2GB^G?X<6y&cS!It+VpTpA+2^W(k{K%Sh zP^Dq)PT|Mu5$I_MKf==wxp~|Tib4gcm#SLW~Fm@`jNW43_(5}GRh#1*lXp>>Jb@L z?8A>7o_-`9 zO_Ar!)R#%joeQ<%CYgGKdL2l9j!i#QGOVSVp0bpt)^h1?;m6{UvAAE3O+U28ePt>J zV1-&BfSKK}@a7R5B1Vc$KUA_r+zh}9buNUE*^4o*9?2c{lP?sTeu!k1WGroH7;zpZ zF++07W86Ffc@L~8HvJIEN{4O%R(PN?K~33ft@P@VT=IUtjAGLdl}sxVm>R}89gB)z z!>Ar966Wu-g()`uP{|TKr6zt|GSsd`w`y^dZ1xDWeUMMF>4!)r@dR2D2~aDiv)}ApO3s&?Bdf9XF3Kw|$V0;pqpaTrJBeK>1NZYJN}I z>=Eii(RB<@KcMzxDeeac=rXeH=(MxalSjt2(ion807J8+>wvCmXgOnXYZzybv~dFw z3{O7*TPH*6DQHE@-g-eNEWCPzMv@QV=?7^2JVWX!98yyl(Y+SlZXQ{d+I%5dc>0kf zaJSUS7s>Keh}di8%jOXpf_w;1KN3?hY*MUu5VUS=C%1-i@kpXUvhegHVI@mnmXy-A z#>s2GZ1V{6K9K;z(+~Mx6%9k_q-lknIA^E6Z1qUyVfS_jPCuG-@Q^Io!b!n;(#=er z#$~HVavzpaKydmYlCkc+W05TPiE<7J-2$%+;tMz3+%>XBN< zh%q?*5XnGZrtl-@iLirL1@GA-wM!GQPwjfA~^0|7G5#T*|OnF z(j^U4dh<$R+$=(ApQ{s`d^GLc(K5Z6w4Ot4sb;rUaj^&uX{W)#hbUI5S#k)H?$;;J zV*$TsY_GM{o-%w1q8EIVH#?XsJ z^022tc+0OvymoEhcA}}n8tL%Rqv))LO|kjp?p0K0vk0dBGiG8QznrdD z`5BjA-`&b@*H13%C;I;O?jm>n)9Lod>-Bv1>izYkck1Th`1T~1xx9a?_g~)Ko-VJ? zckkxk@x#rZr}JfbcYVAb@8#!wc({K&-d=8A%0Hcs=hwd|gPv@I& h8AdPE%TtQ-chAT3@q9cV&&U6E`~_*5KbQdU004!xmbCx? literal 0 HcmV?d00001 diff --git a/meilisearch-http/tests/common/mod.rs b/meilisearch-http/tests/common/mod.rs index e734b3621..b076b0ea5 100644 --- a/meilisearch-http/tests/common/mod.rs +++ b/meilisearch-http/tests/common/mod.rs @@ -3,7 +3,7 @@ pub mod server; pub mod service; pub use index::{GetAllDocumentsOptions, GetDocumentOptions}; -pub use server::Server; +pub use server::{default_settings, Server}; /// Performs a search test on both post and get routes #[macro_export] diff --git a/meilisearch-http/tests/common/server.rs b/meilisearch-http/tests/common/server.rs index b439ec52e..884036228 100644 --- a/meilisearch-http/tests/common/server.rs +++ b/meilisearch-http/tests/common/server.rs @@ -79,9 +79,9 @@ impl Server { } } - pub async fn new_with_options(options: Opt) -> Self { - let meilisearch = setup_meilisearch(&options).unwrap(); - let auth = AuthController::new(&options.db_path, &options.master_key).unwrap(); + pub async fn new_with_options(options: Opt) -> Result { + let meilisearch = setup_meilisearch(&options)?; + let auth = AuthController::new(&options.db_path, &options.master_key)?; let service = Service { meilisearch, auth, @@ -89,10 +89,10 @@ impl Server { api_key: None, }; - Server { + Ok(Server { service, _dir: None, - } + }) } /// Returns a view to an index. There is no guarantee that the index exists. diff --git a/meilisearch-http/tests/dumps.rs b/meilisearch-http/tests/dumps.rs deleted file mode 100644 index 843347bde..000000000 --- a/meilisearch-http/tests/dumps.rs +++ /dev/null @@ -1,22 +0,0 @@ -#![allow(dead_code)] -mod common; - -use crate::common::Server; -use serde_json::json; - -#[actix_rt::test] -async fn get_unexisting_dump_status() { - let server = Server::new().await; - - let (response, code) = server.get_dump_status("foobar").await; - assert_eq!(code, 404); - - let expected_response = json!({ - "message": "Dump `foobar` not found.", - "code": "dump_not_found", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#dump_not_found" - }); - - assert_eq!(response, expected_response); -} diff --git a/meilisearch-http/tests/dumps/data.rs b/meilisearch-http/tests/dumps/data.rs new file mode 100644 index 000000000..a5d7f3426 --- /dev/null +++ b/meilisearch-http/tests/dumps/data.rs @@ -0,0 +1,68 @@ +use std::path::PathBuf; + +use manifest_dir_macros::exist_relative_path; + +pub enum GetDump { + MoviesRawV1, + MoviesWithSettingsV1, + RubyGemsWithSettingsV1, + + MoviesRawV2, + MoviesWithSettingsV2, + RubyGemsWithSettingsV2, + + MoviesRawV3, + MoviesWithSettingsV3, + RubyGemsWithSettingsV3, + + MoviesRawV4, + MoviesWithSettingsV4, + RubyGemsWithSettingsV4, +} + +impl GetDump { + pub fn path(&self) -> PathBuf { + match self { + GetDump::MoviesRawV1 => { + exist_relative_path!("tests/assets/v1_v0.20.0_movies.dump").into() + } + GetDump::MoviesWithSettingsV1 => { + exist_relative_path!("tests/assets/v1_v0.20.0_movies_with_settings.dump").into() + } + GetDump::RubyGemsWithSettingsV1 => { + exist_relative_path!("tests/assets/v1_v0.20.0_rubygems_with_settings.dump").into() + } + + GetDump::MoviesRawV2 => { + exist_relative_path!("tests/assets/v2_v0.21.1_movies.dump").into() + } + GetDump::MoviesWithSettingsV2 => { + exist_relative_path!("tests/assets/v2_v0.21.1_movies_with_settings.dump").into() + } + + GetDump::RubyGemsWithSettingsV2 => { + exist_relative_path!("tests/assets/v2_v0.21.1_rubygems_with_settings.dump").into() + } + + GetDump::MoviesRawV3 => { + exist_relative_path!("tests/assets/v3_v0.24.0_movies.dump").into() + } + GetDump::MoviesWithSettingsV3 => { + exist_relative_path!("tests/assets/v3_v0.24.0_movies_with_settings.dump").into() + } + GetDump::RubyGemsWithSettingsV3 => { + exist_relative_path!("tests/assets/v3_v0.24.0_rubygems_with_settings.dump").into() + } + + GetDump::MoviesRawV4 => { + exist_relative_path!("tests/assets/v4_v0.25.2_movies.dump").into() + } + GetDump::MoviesWithSettingsV4 => { + exist_relative_path!("tests/assets/v4_v0.25.2_movies_with_settings.dump").into() + } + GetDump::RubyGemsWithSettingsV4 => { + exist_relative_path!("tests/assets/v4_v0.25.2_rubygems_with_settings.dump").into() + } + } + } +} diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs new file mode 100644 index 000000000..8395ec3aa --- /dev/null +++ b/meilisearch-http/tests/dumps/mod.rs @@ -0,0 +1,645 @@ +mod data; + +use crate::common::{default_settings, Server}; +use meilisearch_http::Opt; +use serde_json::json; + +use self::data::GetDump; + +#[actix_rt::test] +async fn get_unexisting_dump_status() { + let server = Server::new().await; + + let (response, code) = server.get_dump_status("foobar").await; + assert_eq!(code, 404); + + let expected_response = json!({ + "message": "Dump `foobar` not found.", + "code": "dump_not_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#dump_not_found" + }); + + assert_eq!(response, expected_response); +} + +// all the following test are ignored on windows. See #2364 +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v1() { + let temp = tempfile::tempdir().unwrap(); + + for path in [ + GetDump::MoviesRawV1.path(), + GetDump::MoviesWithSettingsV1.path(), + GetDump::RubyGemsWithSettingsV1.path(), + ] { + let options = Opt { + import_dump: Some(path), + ..default_settings(temp.path()) + }; + let error = Server::new_with_options(options) + .await + .map(|_| ()) + .unwrap_err(); + + assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards."); + } +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v2_movie_raw() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::MoviesRawV2.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("indexUID")); + assert_eq!(indexes[0]["name"], json!("indexUID")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("indexUID"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks, + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(100, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000}) + ); + + let (document, code) = index.get_document(500, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + ); + + let (document, code) = index.get_document(10006, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + ); +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v2_movie_with_settings() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::MoviesWithSettingsV2.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("indexUID")); + assert_eq!(indexes[0]["name"], json!("indexUID")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("indexUID"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks, + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(100, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) + ); + + let (document, code) = index.get_document(500, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + ); + + let (document, code) = index.get_document(10006, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + ); +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v2_rubygems_with_settings() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::RubyGemsWithSettingsV2.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("rubygems")); + assert_eq!(indexes[0]["name"], json!("rubygems")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("rubygems"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks["results"][0], + json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + ); + assert_eq!( + tasks["results"][92], + json!({"uid": 0, "indexUid": "rubygems", "status": "succeeded", "type": "settingsUpdate", "details": {"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "rankingRules": ["typo", "words", "desc(fame)", "proximity", "attribute", "exactness", "desc(total_downloads)"]}, "duration": "PT0.008886S", "enqueuedAt": "2021-09-08T08:40:28.660188Z", "startedAt": "2021-09-08T08:40:28.660766Z", "finishedAt": "2021-09-08T08:40:28.669652Z"}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(188040, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "version": "0.15.2", "total_downloads": "7465"}) + ); + + let (document, code) = index.get_document(191940, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "version": "1.1.0", "total_downloads": "9394"}) + ); + + let (document, code) = index.get_document(159227, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "version": "0.1.0", "total_downloads": "1007"}) + ); +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v3_movie_raw() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::MoviesRawV3.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("indexUID")); + assert_eq!(indexes[0]["name"], json!("indexUID")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("indexUID"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks, + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(100, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000}) + ); + + let (document, code) = index.get_document(500, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + ); + + let (document, code) = index.get_document(10006, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + ); +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v3_movie_with_settings() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::MoviesWithSettingsV3.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("indexUID")); + assert_eq!(indexes[0]["name"], json!("indexUID")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("indexUID"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks, + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(100, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) + ); + + let (document, code) = index.get_document(500, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + ); + + let (document, code) = index.get_document(10006, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + ); +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v3_rubygems_with_settings() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::RubyGemsWithSettingsV3.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("rubygems")); + assert_eq!(indexes[0]["name"], json!("rubygems")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("rubygems"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks["results"][0], + json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + ); + assert_eq!( + tasks["results"][92], + json!({"uid": 0, "indexUid": "rubygems", "status": "succeeded", "type": "settingsUpdate", "details": {"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "rankingRules": ["typo", "words", "desc(fame)", "proximity", "attribute", "exactness", "desc(total_downloads)"]}, "duration": "PT0.008886S", "enqueuedAt": "2021-09-08T08:40:28.660188Z", "startedAt": "2021-09-08T08:40:28.660766Z", "finishedAt": "2021-09-08T08:40:28.669652Z"}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(188040, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "version": "0.15.2", "total_downloads": "7465"}) + ); + + let (document, code) = index.get_document(191940, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "version": "1.1.0", "total_downloads": "9394"}) + ); + + let (document, code) = index.get_document(159227, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({"name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "version": "0.1.0", "total_downloads": "1007"}) + ); +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v4_movie_raw() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::MoviesRawV4.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("indexUID")); + assert_eq!(indexes[0]["name"], json!("indexUID")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("indexUID"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks, + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(100, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000}) + ); + + let (document, code) = index.get_document(500, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + ); + + let (document, code) = index.get_document(10006, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + ); +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v4_movie_with_settings() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::MoviesWithSettingsV4.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("indexUID")); + assert_eq!(indexes[0]["name"], json!("indexUID")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("indexUID"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks, + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(100, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 }) + ); + + let (document, code) = index.get_document(500, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000}) + ); + + let (document, code) = index.get_document(10006, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600}) + ); +} + +#[actix_rt::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn import_dump_v4_rubygems_with_settings() { + let temp = tempfile::tempdir().unwrap(); + + let options = Opt { + import_dump: Some(GetDump::RubyGemsWithSettingsV4.path()), + ..default_settings(temp.path()) + }; + let server = Server::new_with_options(options).await.unwrap(); + + let (indexes, code) = server.list_indexes().await; + assert_eq!(code, 200); + + assert_eq!(indexes.as_array().unwrap().len(), 1); + assert_eq!(indexes[0]["uid"], json!("rubygems")); + assert_eq!(indexes[0]["name"], json!("rubygems")); + assert_eq!(indexes[0]["primaryKey"], json!("id")); + + let index = server.index("rubygems"); + + let (stats, code) = index.stats().await; + assert_eq!(code, 200); + assert_eq!( + stats, + json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }}) + ); + + let (settings, code) = index.settings().await; + assert_eq!(code, 200); + assert_eq!( + settings, + json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }}) + ); + + let (tasks, code) = index.list_tasks().await; + assert_eq!(code, 200); + assert_eq!( + tasks["results"][0], + json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + ); + assert_eq!( + tasks["results"][92], + json!({ "uid": 0, "indexUid": "rubygems", "status": "succeeded", "type": "settingsUpdate", "details": {"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "rankingRules": ["typo", "words", "desc(fame)", "proximity", "attribute", "exactness", "desc(total_downloads)"]}, "duration": "PT0.008886S", "enqueuedAt": "2021-09-08T08:40:28.660188Z", "startedAt": "2021-09-08T08:40:28.660766Z", "finishedAt": "2021-09-08T08:40:28.669652Z"}) + ); + + // finally we're just going to check that we can still get a few documents by id + let (document, code) = index.get_document(188040, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "version": "0.15.2", "total_downloads": "7465"}) + ); + + let (document, code) = index.get_document(191940, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "version": "1.1.0", "total_downloads": "9394"}) + ); + + let (document, code) = index.get_document(159227, None).await; + assert_eq!(code, 200); + assert_eq!( + document, + json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "version": "0.1.0", "total_downloads": "1007"}) + ); +} diff --git a/meilisearch-http/tests/integration.rs b/meilisearch-http/tests/integration.rs index 45b632520..25b4e49b6 100644 --- a/meilisearch-http/tests/integration.rs +++ b/meilisearch-http/tests/integration.rs @@ -2,6 +2,7 @@ mod auth; mod common; mod dashboard; mod documents; +mod dumps; mod index; mod search; mod settings; diff --git a/meilisearch-http/tests/snapshot/mod.rs b/meilisearch-http/tests/snapshot/mod.rs index 5c626a888..a0645733e 100644 --- a/meilisearch-http/tests/snapshot/mod.rs +++ b/meilisearch-http/tests/snapshot/mod.rs @@ -41,7 +41,7 @@ async fn perform_snapshot() { ..default_settings(temp.path()) }; - let server = Server::new_with_options(options).await; + let server = Server::new_with_options(options).await.unwrap(); let index = server.index("test"); index @@ -67,7 +67,7 @@ async fn perform_snapshot() { ..default_settings(temp.path()) }; - let snapshot_server = Server::new_with_options(options).await; + let snapshot_server = Server::new_with_options(options).await.unwrap(); verify_snapshot!(server, snapshot_server, |server| => server.list_indexes(), diff --git a/meilisearch-lib/src/index_controller/dump_actor/mod.rs b/meilisearch-lib/src/index_controller/dump_actor/mod.rs index 16e328e3b..00be3a371 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/mod.rs +++ b/meilisearch-lib/src/index_controller/dump_actor/mod.rs @@ -256,13 +256,8 @@ fn extract_dump( .parent() .map(ToOwned::to_owned) .unwrap_or_else(|| ".".into()); - if cfg!(windows) { - std::env::set_var("TMP", temp_path); - } else { - std::env::set_var("TMPDIR", temp_path); - } - let tmp_src = tempfile::tempdir()?; + let tmp_src = tempfile::tempdir_in(temp_path)?; let tmp_src_path = tmp_src.path(); from_tar_gz(&src_path, tmp_src_path)?; From f65116b2085ad403f0d45c5818da20eb34de6949 Mon Sep 17 00:00:00 2001 From: Irevoire Date: Tue, 24 May 2022 15:03:11 +0200 Subject: [PATCH 014/185] chore(ci): uncomment clippy from the ci check The issue has been fixed in the latest release of rust. See https://github.com/rust-lang/rust-clippy/issues/8662 Fix #2305 --- bors.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bors.toml b/bors.toml index d24e6c09b..b357e8d61 100644 --- a/bors.toml +++ b/bors.toml @@ -2,7 +2,7 @@ status = [ 'Tests on ubuntu-18.04', 'Tests on macos-latest', 'Tests on windows-latest', - # 'Run Clippy', + 'Run Clippy', 'Run Rustfmt', 'Run tests in debug', ] From aa50acb0314dcf4cc8ee696e1d4088c41e4d907a Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 16 May 2022 19:50:45 +0200 Subject: [PATCH 015/185] make Task index_uid an option Not all task relate to an index. Tasks that don't have an index_uid set to None --- meilisearch-http/src/task.rs | 8 ++-- .../index_controller/dump_actor/compat/v3.rs | 2 +- meilisearch-lib/src/index_controller/mod.rs | 11 ++++-- meilisearch-lib/src/index_resolver/mod.rs | 19 +++++----- meilisearch-lib/src/tasks/scheduler.rs | 5 ++- meilisearch-lib/src/tasks/task.rs | 6 ++- meilisearch-lib/src/tasks/task_store/mod.rs | 26 +++++++++---- meilisearch-lib/src/tasks/task_store/store.rs | 37 +++++++++++++++---- 8 files changed, 79 insertions(+), 35 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 7179b10db..c8e269e56 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -137,7 +137,7 @@ fn serialize_duration( #[serde(rename_all = "camelCase")] pub struct TaskView { uid: TaskId, - index_uid: String, + index_uid: Option, status: TaskStatus, #[serde(rename = "type")] task_type: TaskType, @@ -313,7 +313,7 @@ impl From for TaskView { Self { uid: id, - index_uid: index_uid.into_inner(), + index_uid: index_uid.map(|u| u.into_inner()), status, task_type, details, @@ -342,7 +342,7 @@ impl From> for TaskListView { #[serde(rename_all = "camelCase")] pub struct SummarizedTaskView { uid: TaskId, - index_uid: String, + index_uid: Option, status: TaskStatus, #[serde(rename = "type")] task_type: TaskType, @@ -365,7 +365,7 @@ impl From for SummarizedTaskView { Self { uid: other.id, - index_uid: other.index_uid.to_string(), + index_uid: other.index_uid.map(|u| u.into_inner()), status: TaskStatus::Enqueued, task_type: other.content.into(), enqueued_at, diff --git a/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs b/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs index 7cd670bad..befd70963 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs +++ b/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs @@ -187,7 +187,7 @@ impl From<(UpdateStatus, String, TaskId)> for Task { // Dummy task let mut task = Task { id: task_id, - index_uid: IndexUid::new(uid).unwrap(), + index_uid: Some(IndexUid::new(uid).unwrap()), content: TaskContent::IndexDeletion, events: Vec::new(), }; diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index a302f12da..7ba91dfca 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -419,7 +419,7 @@ where Update::UpdateIndex { primary_key } => TaskContent::IndexUpdate { primary_key }, }; - let task = self.task_store.register(uid, content).await?; + let task = self.task_store.register(Some(uid), content).await?; self.scheduler.read().await.notify(); Ok(task) @@ -569,7 +569,12 @@ where // Check if the currently indexing update is from our index. let is_indexing = processing_tasks .first() - .map(|task| task.index_uid.as_str() == uid) + .map(|task| { + task.index_uid + .as_ref() + .map(|u| u.as_str() == uid) + .unwrap_or(false) + }) .unwrap_or_default(); let index = self.index_resolver.get_index(uid).await?; @@ -605,7 +610,7 @@ where // Check if the currently indexing update is from our index. stats.is_indexing = processing_tasks .first() - .map(|p| p.index_uid.as_str() == index_uid) + .and_then(|p| p.index_uid.as_ref().map(|u| u.as_str() == index_uid)) .or(Some(false)); indexes.insert(index_uid, stats); diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 8ca3efdc6..9db808d3f 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -204,7 +204,7 @@ where match batch.tasks.first() { Some(Task { - index_uid, + index_uid: Some(ref index_uid), id, content: TaskContent::DocumentAddition { @@ -285,7 +285,7 @@ where TaskContent::DocumentAddition { .. } => panic!("updates should be handled by batch"), TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => { let ids = ids.clone(); - let index = self.get_index(index_uid.into_inner()).await?; + let index = self.get_index(index_uid.unwrap().into_inner()).await?; let DocumentDeletionResult { deleted_documents, .. @@ -294,7 +294,7 @@ where Ok(TaskResult::DocumentDeletion { deleted_documents }) } TaskContent::DocumentDeletion(DocumentDeletion::Clear) => { - let index = self.get_index(index_uid.into_inner()).await?; + let index = self.get_index(index_uid.unwrap().into_inner()).await?; let deleted_documents = spawn_blocking(move || -> IndexResult { let number_documents = index.stats()?.number_of_documents; index.clear_documents()?; @@ -310,9 +310,10 @@ where allow_index_creation, } => { let index = if *is_deletion || !*allow_index_creation { - self.get_index(index_uid.into_inner()).await? + self.get_index(index_uid.unwrap().into_inner()).await? } else { - self.get_or_create_index(index_uid, task.id).await? + self.get_or_create_index(index_uid.unwrap(), task.id) + .await? }; let settings = settings.clone(); @@ -321,7 +322,7 @@ where Ok(TaskResult::Other) } TaskContent::IndexDeletion => { - let index = self.delete_index(index_uid.into_inner()).await?; + let index = self.delete_index(index_uid.unwrap().into_inner()).await?; let deleted_documents = spawn_blocking(move || -> IndexResult { Ok(index.stats()?.number_of_documents) @@ -331,7 +332,7 @@ where Ok(TaskResult::ClearAll { deleted_documents }) } TaskContent::IndexCreation { primary_key } => { - let index = self.create_index(index_uid, task.id).await?; + let index = self.create_index(index_uid.unwrap(), task.id).await?; if let Some(primary_key) = primary_key { let primary_key = primary_key.clone(); @@ -341,7 +342,7 @@ where Ok(TaskResult::Other) } TaskContent::IndexUpdate { primary_key } => { - let index = self.get_index(index_uid.into_inner()).await?; + let index = self.get_index(index_uid.unwrap().into_inner()).await?; if let Some(primary_key) = primary_key { let primary_key = primary_key.clone(); @@ -503,7 +504,7 @@ mod test { proptest! { #[test] fn test_process_task( - task in any::(), + task in any::().prop_filter("uid must be Some", |t| t.index_uid.is_some()), index_exists in any::(), index_op_fails in any::(), any_int in any::(), diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 0e540a646..94de2a5fd 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -125,7 +125,8 @@ struct TaskQueue { impl TaskQueue { fn insert(&mut self, task: Task) { - let uid = task.index_uid.into_inner(); + // TODO(marin): The index uid should be remaped to a task queue identifier here + let uid = task.index_uid.unwrap().into_inner(); let id = task.id; let kind = match task.content { TaskContent::DocumentAddition { @@ -443,7 +444,7 @@ mod test { fn gen_task(id: TaskId, index_uid: &str, content: TaskContent) -> Task { Task { id, - index_uid: IndexUid::new_unchecked(index_uid), + index_uid: Some(IndexUid::new_unchecked(index_uid)), content, events: vec![], } diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index ecbd4ca62..d7a73a2ae 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -74,7 +74,11 @@ pub enum TaskEvent { #[cfg_attr(test, derive(proptest_derive::Arbitrary))] pub struct Task { pub id: TaskId, - pub index_uid: IndexUid, + /// The name of the index the task is targeting. If it isn't targeting any idex (i.e Dump task) + /// then this is None + // TODO: when next forward breaking dumps, it would be a good idea to move this field inside of + // the TaskContent. + pub index_uid: Option, pub content: TaskContent, pub events: Vec, } diff --git a/meilisearch-lib/src/tasks/task_store/mod.rs b/meilisearch-lib/src/tasks/task_store/mod.rs index bdcd13f37..bde0f6360 100644 --- a/meilisearch-lib/src/tasks/task_store/mod.rs +++ b/meilisearch-lib/src/tasks/task_store/mod.rs @@ -30,10 +30,14 @@ pub struct TaskFilter { impl TaskFilter { fn pass(&self, task: &Task) -> bool { - self.indexes - .as_ref() - .map(|indexes| indexes.contains(&*task.index_uid)) - .unwrap_or(true) + match task.index_uid { + Some(ref index_uid) => self + .indexes + .as_ref() + .map(|indexes| indexes.contains(index_uid.as_str())) + .unwrap_or(true), + None => false, + } } /// Adds an index to the filter, so the filter must match this index. @@ -66,7 +70,11 @@ impl TaskStore { Ok(Self { store }) } - pub async fn register(&self, index_uid: IndexUid, content: TaskContent) -> Result { + pub async fn register( + &self, + index_uid: Option, + content: TaskContent, + ) -> Result { debug!("registering update: {:?}", content); let store = self.store.clone(); let task = tokio::task::spawn_blocking(move || -> Result { @@ -305,7 +313,11 @@ pub mod test { } } - pub async fn register(&self, index_uid: IndexUid, content: TaskContent) -> Result { + pub async fn register( + &self, + index_uid: Option, + content: TaskContent, + ) -> Result { match self { Self::Real(s) => s.register(index_uid, content).await, Self::Mock(_m) => todo!(), @@ -335,7 +347,7 @@ pub mod test { let gen_task = |id: TaskId| Task { id, - index_uid: IndexUid::new_unchecked("test"), + index_uid: Some(IndexUid::new_unchecked("test")), content: TaskContent::IndexCreation { primary_key: None }, events: Vec::new(), }; diff --git a/meilisearch-lib/src/tasks/task_store/store.rs b/meilisearch-lib/src/tasks/task_store/store.rs index 4ff986d8b..912047d1e 100644 --- a/meilisearch-lib/src/tasks/task_store/store.rs +++ b/meilisearch-lib/src/tasks/task_store/store.rs @@ -109,7 +109,8 @@ impl Store { pub fn put(&self, txn: &mut RwTxn, task: &Task) -> Result<()> { self.tasks.put(txn, &BEU64::new(task.id), task)?; self.uids_task_ids - .put(txn, &(&task.index_uid, task.id), &())?; + // TODO(marin): The index uid should be remaped to a task queue identifier here + .put(txn, &(&task.index_uid.as_ref().unwrap(), task.id), &())?; Ok(()) } @@ -325,7 +326,7 @@ pub mod test { let tasks = (0..100) .map(|_| Task { id: rand::random(), - index_uid: IndexUid::new_unchecked("test"), + index_uid: Some(IndexUid::new_unchecked("test")), content: TaskContent::IndexDeletion, events: vec![], }) @@ -356,14 +357,14 @@ pub mod test { let task_1 = Task { id: 1, - index_uid: IndexUid::new_unchecked("test"), + index_uid: Some(IndexUid::new_unchecked("test")), content: TaskContent::IndexDeletion, events: vec![], }; let task_2 = Task { id: 0, - index_uid: IndexUid::new_unchecked("test1"), + index_uid: Some(IndexUid::new_unchecked("test1")), content: TaskContent::IndexDeletion, events: vec![], }; @@ -379,18 +380,28 @@ pub mod test { txn.abort().unwrap(); assert_eq!(tasks.len(), 1); - assert_eq!(&*tasks.first().unwrap().index_uid, "test"); + assert_eq!( + tasks + .first() + .as_ref() + .unwrap() + .index_uid + .as_ref() + .unwrap() + .as_str(), + "test" + ); // same thing but invert the ids let task_1 = Task { id: 0, - index_uid: IndexUid::new_unchecked("test"), + index_uid: Some(IndexUid::new_unchecked("test")), content: TaskContent::IndexDeletion, events: vec![], }; let task_2 = Task { id: 1, - index_uid: IndexUid::new_unchecked("test1"), + index_uid: Some(IndexUid::new_unchecked("test1")), content: TaskContent::IndexDeletion, events: vec![], }; @@ -405,7 +416,17 @@ pub mod test { let tasks = store.list_tasks(&txn, None, Some(filter), None).unwrap(); assert_eq!(tasks.len(), 1); - assert_eq!(&*tasks.first().unwrap().index_uid, "test"); + assert_eq!( + &*tasks + .first() + .as_ref() + .unwrap() + .index_uid + .as_ref() + .unwrap() + .as_str(), + "test" + ); } proptest! { From 5a5066023baa514a6779fc77a6330cfef28ac4af Mon Sep 17 00:00:00 2001 From: ad hoc Date: Mon, 16 May 2022 20:16:23 +0200 Subject: [PATCH 016/185] introduce TaskListIdentifier --- meilisearch-http/src/task.rs | 1 + meilisearch-lib/src/index_resolver/mod.rs | 7 ++- meilisearch-lib/src/tasks/scheduler.rs | 54 ++++++++++++++++------- meilisearch-lib/src/tasks/task.rs | 10 +++++ 4 files changed, 54 insertions(+), 18 deletions(-) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index c8e269e56..56a181d29 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -216,6 +216,7 @@ impl From for TaskView { TaskType::IndexUpdate, Some(TaskDetails::IndexInfo { primary_key }), ), + TaskContent::Dump { path: _ } => todo!(), }; // An event always has at least one event: "Created" diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 9db808d3f..3b8bdd631 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -351,6 +351,7 @@ where Ok(TaskResult::Other) } + TaskContent::Dump { path: _ } => Ok(TaskResult::Other), } } @@ -504,7 +505,7 @@ mod test { proptest! { #[test] fn test_process_task( - task in any::().prop_filter("uid must be Some", |t| t.index_uid.is_some()), + task in any::().prop_filter("IndexUid should be Some", |s| s.index_uid.is_some()), index_exists in any::(), index_op_fails in any::(), any_int in any::(), @@ -580,6 +581,7 @@ mod test { .then(move |_| result()); } } + TaskContent::Dump { path: _ } => { } } mocker.when::<(), IndexResult>("stats") @@ -608,6 +610,7 @@ mod test { } // if index already exists, create index will return an error TaskContent::IndexCreation { .. } if index_exists => (), + TaskContent::Dump { .. } => (), // The index exists and get should be called _ if index_exists => { index_store @@ -648,7 +651,7 @@ mod test { // Test for some expected output scenarios: // Index creation and deletion cannot fail because of a failed index op, since they // don't perform index ops. - if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None }) + if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None } | TaskContent::Dump { .. }) || (index_exists && matches!(task.content, TaskContent::IndexCreation { .. })) || (!index_exists && matches!(task.content, TaskContent::IndexDeletion | TaskContent::DocumentDeletion(_) diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 94de2a5fd..67aa6d8e5 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -21,8 +21,13 @@ use super::{TaskFilter, TaskPerformer, TaskStore}; #[derive(Eq, Debug, Clone, Copy)] enum TaskType { - DocumentAddition { number: usize }, - DocumentUpdate { number: usize }, + DocumentAddition { + number: usize, + }, + DocumentUpdate { + number: usize, + }, + /// Any other kind of task, including Dumps Other, } @@ -63,7 +68,7 @@ impl Ord for PendingTask { #[derive(Debug)] struct TaskList { - index: String, + id: TaskListIdentifier, tasks: BinaryHeap, } @@ -82,9 +87,9 @@ impl DerefMut for TaskList { } impl TaskList { - fn new(index: String) -> Self { + fn new(id: TaskListIdentifier) -> Self { Self { - index, + id, tasks: Default::default(), } } @@ -92,7 +97,7 @@ impl TaskList { impl PartialEq for TaskList { fn eq(&self, other: &Self) -> bool { - self.index == other.index + self.id == other.id } } @@ -100,11 +105,20 @@ impl Eq for TaskList {} impl Ord for TaskList { fn cmp(&self, other: &Self) -> Ordering { - match (self.peek(), other.peek()) { - (None, None) => Ordering::Equal, - (None, Some(_)) => Ordering::Less, - (Some(_), None) => Ordering::Greater, - (Some(lhs), Some(rhs)) => lhs.cmp(rhs), + match (&self.id, &other.id) { + (TaskListIdentifier::Index(_), TaskListIdentifier::Index(_)) => { + match (self.peek(), other.peek()) { + (None, None) => Ordering::Equal, + (None, Some(_)) => Ordering::Less, + (Some(_), None) => Ordering::Greater, + (Some(lhs), Some(rhs)) => lhs.cmp(rhs), + } + } + (TaskListIdentifier::Index(_), TaskListIdentifier::Dump) => Ordering::Greater, + (TaskListIdentifier::Dump, TaskListIdentifier::Index(_)) => Ordering::Less, + (TaskListIdentifier::Dump, TaskListIdentifier::Dump) => { + unreachable!("There should be only one Dump task list") + } } } } @@ -115,19 +129,27 @@ impl PartialOrd for TaskList { } } +#[derive(PartialEq, Eq, Hash, Debug, Clone)] +enum TaskListIdentifier { + Index(String), + Dump, +} + #[derive(Default)] struct TaskQueue { /// Maps index uids to their TaskList, for quick access - index_tasks: HashMap>>, + index_tasks: HashMap>>, /// A queue that orders TaskList by the priority of their fist update queue: BinaryHeap>>, } impl TaskQueue { fn insert(&mut self, task: Task) { - // TODO(marin): The index uid should be remaped to a task queue identifier here - let uid = task.index_uid.unwrap().into_inner(); let id = task.id; + let uid = match task.index_uid { + Some(uid) => TaskListIdentifier::Index(uid.into_inner()), + None => unreachable!(), + }; let kind = match task.content { TaskContent::DocumentAddition { documents_count, @@ -161,7 +183,7 @@ impl TaskQueue { list.push(task); } Entry::Vacant(entry) => { - let mut task_list = TaskList::new(entry.key().to_owned()); + let mut task_list = TaskList::new(entry.key().clone()); task_list.push(task); let task_list = Arc::new(AtomicRefCell::new(task_list)); entry.insert(task_list.clone()); @@ -182,7 +204,7 @@ impl TaskQueue { // After being mutated, the head is reinserted to the correct position. self.queue.push(head); } else { - self.index_tasks.remove(&head.borrow().index); + self.index_tasks.remove(&head.borrow().id); } Some(result) diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index d7a73a2ae..c20d2151b 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -78,6 +78,12 @@ pub struct Task { /// then this is None // TODO: when next forward breaking dumps, it would be a good idea to move this field inside of // the TaskContent. + #[cfg_attr( + test, + proptest( + strategy = "proptest::option::weighted(proptest::option::Probability::new(0.99), IndexUid::arbitrary())" + ) + )] pub index_uid: Option, pub content: TaskContent, pub events: Vec, @@ -165,6 +171,10 @@ pub enum TaskContent { IndexUpdate { primary_key: Option, }, + Dump { + #[cfg_attr(test, proptest(value = "PathBuf::from(\".\")"))] + path: PathBuf, + }, } #[cfg(test)] From 737b891a41ac2deb0954da88554bfda398afa001 Mon Sep 17 00:00:00 2001 From: ad hoc Date: Tue, 17 May 2022 10:58:15 +0200 Subject: [PATCH 017/185] introduce Dump TaskListIdentifier variant --- meilisearch-lib/src/index_controller/mod.rs | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 7ba91dfca..4be90489a 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -76,6 +76,7 @@ pub struct IndexController { index_resolver: Arc>, scheduler: Arc>, task_store: TaskStore, + dump_path: PathBuf, dump_handle: dump_actor::DumpActorHandleImpl, update_file_store: UpdateFileStore, } @@ -89,6 +90,7 @@ impl Clone for IndexController { dump_handle: self.dump_handle.clone(), update_file_store: self.update_file_store.clone(), task_store: self.task_store.clone(), + dump_path: self.dump_path.clone(), } } } @@ -234,7 +236,7 @@ impl IndexControllerBuilder { receiver, update_file_store.clone(), scheduler.clone(), - dump_path, + dump_path.clone(), analytics_path, index_size, task_store_size, @@ -269,6 +271,7 @@ impl IndexControllerBuilder { index_resolver, scheduler, dump_handle, + dump_path, update_file_store, task_store, }) @@ -425,6 +428,15 @@ where Ok(task) } + pub async fn register_dump_task(&self) -> Result { + let content = TaskContent::Dump { + path: self.dump_path.clone(), + }; + let task = self.task_store.register(None, content).await?; + self.scheduler.read().await.notify(); + Ok(task) + } + pub async fn get_task(&self, id: TaskId, filter: Option) -> Result { let task = self.scheduler.read().await.get_task(id, filter).await?; Ok(task) From 2f0625a984815e259e4953dc924e7c5d49e781fc Mon Sep 17 00:00:00 2001 From: ad hoc Date: Tue, 17 May 2022 12:15:37 +0200 Subject: [PATCH 018/185] register and insert dump task in scheduler --- meilisearch-lib/src/tasks/scheduler.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 67aa6d8e5..1f76f179a 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -148,7 +148,8 @@ impl TaskQueue { let id = task.id; let uid = match task.index_uid { Some(uid) => TaskListIdentifier::Index(uid.into_inner()), - None => unreachable!(), + None if matches!(task.content, TaskContent::Dump { .. }) => TaskListIdentifier::Dump, + None => unreachable!("invalid task state"), }; let kind = match task.content { TaskContent::DocumentAddition { From 7fa3eb1003a7df4fda090d5f6dc0b3648a60e37b Mon Sep 17 00:00:00 2001 From: ad hoc Date: Tue, 17 May 2022 17:40:59 +0200 Subject: [PATCH 019/185] register dump tasks --- meilisearch-http/src/routes/dump.rs | 3 ++- meilisearch-http/src/task.rs | 4 +++- meilisearch-lib/src/tasks/task_store/store.rs | 7 ++++--- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/meilisearch-http/src/routes/dump.rs b/meilisearch-http/src/routes/dump.rs index 65cd7521f..b58552f27 100644 --- a/meilisearch-http/src/routes/dump.rs +++ b/meilisearch-http/src/routes/dump.rs @@ -8,6 +8,7 @@ use serde_json::json; use crate::analytics::Analytics; use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::sequential_extractor::SeqHandler; +use crate::task::SummarizedTaskView; pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump)))) @@ -23,7 +24,7 @@ pub async fn create_dump( ) -> Result { analytics.publish("Dump Created".to_string(), json!({}), Some(&req)); - let res = meilisearch.create_dump().await?; + let res: SummarizedTaskView = meilisearch.register_dump_task().await?.into(); debug!("returns: {:?}", res); Ok(HttpResponse::Accepted().json(res)) diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 56a181d29..c2399f141 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -24,6 +24,7 @@ enum TaskType { DocumentDeletion, SettingsUpdate, ClearAll, + Dump, } impl From for TaskType { @@ -43,6 +44,7 @@ impl From for TaskType { TaskContent::IndexDeletion => TaskType::IndexDeletion, TaskContent::IndexCreation { .. } => TaskType::IndexCreation, TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, + TaskContent::Dump { path } => TaskType::Dump, _ => unreachable!("unexpected task type"), } } @@ -216,7 +218,7 @@ impl From for TaskView { TaskType::IndexUpdate, Some(TaskDetails::IndexInfo { primary_key }), ), - TaskContent::Dump { path: _ } => todo!(), + TaskContent::Dump { path: _ } => (TaskType::Dump, None), }; // An event always has at least one event: "Created" diff --git a/meilisearch-lib/src/tasks/task_store/store.rs b/meilisearch-lib/src/tasks/task_store/store.rs index 912047d1e..902f80560 100644 --- a/meilisearch-lib/src/tasks/task_store/store.rs +++ b/meilisearch-lib/src/tasks/task_store/store.rs @@ -108,9 +108,10 @@ impl Store { pub fn put(&self, txn: &mut RwTxn, task: &Task) -> Result<()> { self.tasks.put(txn, &BEU64::new(task.id), task)?; - self.uids_task_ids - // TODO(marin): The index uid should be remaped to a task queue identifier here - .put(txn, &(&task.index_uid.as_ref().unwrap(), task.id), &())?; + // only add the task to the indexes index if it has an index_uid + if let Some(ref index_uid) = task.index_uid { + self.uids_task_ids.put(txn, &(&index_uid, task.id), &())?; + } Ok(()) } From 6a0231cb283997ee3715a6beae7b70227b271f2a Mon Sep 17 00:00:00 2001 From: ad hoc Date: Tue, 17 May 2022 17:55:47 +0200 Subject: [PATCH 020/185] perform dump method --- meilisearch-lib/src/index_resolver/mod.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 3b8bdd631..33be749b1 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -3,7 +3,7 @@ pub mod index_store; pub mod meta_store; use std::convert::{TryFrom, TryInto}; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::sync::Arc; use error::{IndexResolverError, Result}; @@ -351,10 +351,14 @@ where Ok(TaskResult::Other) } - TaskContent::Dump { path: _ } => Ok(TaskResult::Other), + TaskContent::Dump { path } => self.perform_dump(path).await, } } + async fn perform_dump(&self, path: &PathBuf) -> Result { + todo!() + } + async fn process_job(&self, job: Job) { match job { Job::Dump { ret, path } => { From 46cdc1770168ae8250b804b53cff33d845b8958e Mon Sep 17 00:00:00 2001 From: ad hoc Date: Thu, 19 May 2022 12:43:46 +0200 Subject: [PATCH 021/185] make scheduler accept multiple batch handlers --- meilisearch-http/src/task.rs | 2 +- .../dump_actor => dump}/actor.rs | 0 .../dump_actor => dump}/compat/mod.rs | 0 .../dump_actor => dump}/compat/v2.rs | 0 .../dump_actor => dump}/compat/v3.rs | 0 .../dump_actor => dump}/error.rs | 0 .../dump_actor => dump}/handle_impl.rs | 0 .../dump_actor => dump}/loaders/mod.rs | 0 .../dump_actor => dump}/loaders/v1.rs | 0 .../dump_actor => dump}/loaders/v2.rs | 4 +- .../dump_actor => dump}/loaders/v3.rs | 4 +- .../dump_actor => dump}/loaders/v4.rs | 2 +- .../dump_actor => dump}/message.rs | 0 .../dump_actor => dump}/mod.rs | 140 +++++----- meilisearch-lib/src/index_controller/error.rs | 2 +- meilisearch-lib/src/index_controller/mod.rs | 17 +- meilisearch-lib/src/index_resolver/mod.rs | 112 +------- meilisearch-lib/src/lib.rs | 1 + meilisearch-lib/src/snapshot.rs | 4 +- meilisearch-lib/src/tasks/batch.rs | 63 ++++- .../src/tasks/batch_handlers/empty_handler.rs | 20 ++ .../batch_handlers/index_resolver_handler.rs | 58 ++++ .../src/tasks/batch_handlers/mod.rs | 2 + meilisearch-lib/src/tasks/mod.rs | 11 +- meilisearch-lib/src/tasks/scheduler.rs | 259 ++++++++++++------ meilisearch-lib/src/tasks/task.rs | 31 +-- meilisearch-lib/src/tasks/task_store/mod.rs | 55 +++- meilisearch-lib/src/tasks/update_loop.rs | 71 +++-- 28 files changed, 484 insertions(+), 374 deletions(-) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/actor.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/compat/mod.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/compat/v2.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/compat/v3.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/error.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/handle_impl.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/loaders/mod.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/loaders/v1.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/loaders/v2.rs (98%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/loaders/v3.rs (97%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/loaders/v4.rs (95%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/message.rs (100%) rename meilisearch-lib/src/{index_controller/dump_actor => dump}/mod.rs (81%) create mode 100644 meilisearch-lib/src/tasks/batch_handlers/empty_handler.rs create mode 100644 meilisearch-lib/src/tasks/batch_handlers/index_resolver_handler.rs create mode 100644 meilisearch-lib/src/tasks/batch_handlers/mod.rs diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index c2399f141..5a8542ff8 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -44,7 +44,7 @@ impl From for TaskType { TaskContent::IndexDeletion => TaskType::IndexDeletion, TaskContent::IndexCreation { .. } => TaskType::IndexCreation, TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, - TaskContent::Dump { path } => TaskType::Dump, + TaskContent::Dump { .. } => TaskType::Dump, _ => unreachable!("unexpected task type"), } } diff --git a/meilisearch-lib/src/index_controller/dump_actor/actor.rs b/meilisearch-lib/src/dump/actor.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/actor.rs rename to meilisearch-lib/src/dump/actor.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/compat/mod.rs b/meilisearch-lib/src/dump/compat/mod.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/compat/mod.rs rename to meilisearch-lib/src/dump/compat/mod.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/compat/v2.rs b/meilisearch-lib/src/dump/compat/v2.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/compat/v2.rs rename to meilisearch-lib/src/dump/compat/v2.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs b/meilisearch-lib/src/dump/compat/v3.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs rename to meilisearch-lib/src/dump/compat/v3.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/error.rs b/meilisearch-lib/src/dump/error.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/error.rs rename to meilisearch-lib/src/dump/error.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/handle_impl.rs b/meilisearch-lib/src/dump/handle_impl.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/handle_impl.rs rename to meilisearch-lib/src/dump/handle_impl.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/loaders/mod.rs b/meilisearch-lib/src/dump/loaders/mod.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/loaders/mod.rs rename to meilisearch-lib/src/dump/loaders/mod.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/loaders/v1.rs b/meilisearch-lib/src/dump/loaders/v1.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/loaders/v1.rs rename to meilisearch-lib/src/dump/loaders/v1.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/loaders/v2.rs b/meilisearch-lib/src/dump/loaders/v2.rs similarity index 98% rename from meilisearch-lib/src/index_controller/dump_actor/loaders/v2.rs rename to meilisearch-lib/src/dump/loaders/v2.rs index e2445913e..5926de931 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/loaders/v2.rs +++ b/meilisearch-lib/src/dump/loaders/v2.rs @@ -5,8 +5,8 @@ use std::path::{Path, PathBuf}; use serde_json::{Deserializer, Value}; use tempfile::NamedTempFile; -use crate::index_controller::dump_actor::compat::{self, v2, v3}; -use crate::index_controller::dump_actor::Metadata; +use crate::dump::compat::{self, v2, v3}; +use crate::dump::Metadata; use crate::options::IndexerOpts; /// The dump v2 reads the dump folder and patches all the needed file to make it compatible with a diff --git a/meilisearch-lib/src/index_controller/dump_actor/loaders/v3.rs b/meilisearch-lib/src/dump/loaders/v3.rs similarity index 97% rename from meilisearch-lib/src/index_controller/dump_actor/loaders/v3.rs rename to meilisearch-lib/src/dump/loaders/v3.rs index 902691511..0a2ea438b 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/loaders/v3.rs +++ b/meilisearch-lib/src/dump/loaders/v3.rs @@ -9,8 +9,8 @@ use log::info; use tempfile::tempdir; use uuid::Uuid; -use crate::index_controller::dump_actor::compat::v3; -use crate::index_controller::dump_actor::Metadata; +use crate::dump::compat::v3; +use crate::dump::Metadata; use crate::index_resolver::meta_store::{DumpEntry, IndexMeta}; use crate::options::IndexerOpts; use crate::tasks::task::{Task, TaskId}; diff --git a/meilisearch-lib/src/index_controller/dump_actor/loaders/v4.rs b/meilisearch-lib/src/dump/loaders/v4.rs similarity index 95% rename from meilisearch-lib/src/index_controller/dump_actor/loaders/v4.rs rename to meilisearch-lib/src/dump/loaders/v4.rs index 38d61f146..c898f83b1 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/loaders/v4.rs +++ b/meilisearch-lib/src/dump/loaders/v4.rs @@ -6,7 +6,7 @@ use meilisearch_auth::AuthController; use milli::heed::EnvOpenOptions; use crate::analytics; -use crate::index_controller::dump_actor::Metadata; +use crate::dump::Metadata; use crate::index_resolver::IndexResolver; use crate::options::IndexerOpts; use crate::tasks::TaskStore; diff --git a/meilisearch-lib/src/index_controller/dump_actor/message.rs b/meilisearch-lib/src/dump/message.rs similarity index 100% rename from meilisearch-lib/src/index_controller/dump_actor/message.rs rename to meilisearch-lib/src/dump/message.rs diff --git a/meilisearch-lib/src/index_controller/dump_actor/mod.rs b/meilisearch-lib/src/dump/mod.rs similarity index 81% rename from meilisearch-lib/src/index_controller/dump_actor/mod.rs rename to meilisearch-lib/src/dump/mod.rs index 00be3a371..bc717b35e 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/mod.rs +++ b/meilisearch-lib/src/dump/mod.rs @@ -3,28 +3,24 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use anyhow::bail; -use log::{info, trace}; +use log::info; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; pub use actor::DumpActor; pub use handle_impl::*; -use meilisearch_auth::AuthController; pub use message::DumpMsg; use tempfile::TempDir; -use tokio::fs::create_dir_all; -use tokio::sync::{oneshot, RwLock}; +use tokio::sync::RwLock; -use crate::analytics; -use crate::compression::{from_tar_gz, to_tar_gz}; -use crate::index_controller::dump_actor::error::DumpActorError; -use crate::index_controller::dump_actor::loaders::{v2, v3, v4}; +use crate::compression::from_tar_gz; use crate::options::IndexerOpts; -use crate::tasks::task::Job; use crate::tasks::Scheduler; use crate::update_file_store::UpdateFileStore; use error::Result; +use self::loaders::{v2, v3, v4}; + mod actor; mod compat; pub mod error; @@ -316,7 +312,7 @@ fn persist_dump(dst_path: impl AsRef, tmp_dst: TempDir) -> anyhow::Result< Ok(()) } -struct DumpJob { +pub struct DumpJob { dump_path: PathBuf, db_path: PathBuf, update_file_store: UpdateFileStore, @@ -328,65 +324,65 @@ struct DumpJob { impl DumpJob { async fn run(self) -> Result<()> { - trace!("Performing dump."); - - create_dir_all(&self.dump_path).await?; - - let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; - let temp_dump_path = temp_dump_dir.path().to_owned(); - - let meta = MetadataVersion::new_v4(self.index_db_size, self.update_db_size); - let meta_path = temp_dump_path.join(META_FILE_NAME); - let mut meta_file = File::create(&meta_path)?; - serde_json::to_writer(&mut meta_file, &meta)?; - analytics::copy_user_id(&self.db_path, &temp_dump_path); - - create_dir_all(&temp_dump_path.join("indexes")).await?; - - let (sender, receiver) = oneshot::channel(); - - self.scheduler - .write() - .await - .schedule_job(Job::Dump { - ret: sender, - path: temp_dump_path.clone(), - }) - .await; - - // wait until the job has started performing before finishing the dump process - let sender = receiver.await??; - - AuthController::dump(&self.db_path, &temp_dump_path)?; - - //TODO(marin): this is not right, the scheduler should dump itself, not do it here... - self.scheduler - .read() - .await - .dump(&temp_dump_path, self.update_file_store.clone()) - .await?; - - let dump_path = tokio::task::spawn_blocking(move || -> Result { - // for now we simply copy the updates/updates_files - // FIXME: We may copy more files than necessary, if new files are added while we are - // performing the dump. We need a way to filter them out. - - let temp_dump_file = tempfile::NamedTempFile::new_in(&self.dump_path)?; - to_tar_gz(temp_dump_path, temp_dump_file.path()) - .map_err(|e| DumpActorError::Internal(e.into()))?; - - let dump_path = self.dump_path.join(self.uid).with_extension("dump"); - temp_dump_file.persist(&dump_path)?; - - Ok(dump_path) - }) - .await??; - - // notify the update loop that we are finished performing the dump. - let _ = sender.send(()); - - info!("Created dump in {:?}.", dump_path); - + // trace!("Performing dump."); + // + // create_dir_all(&self.dump_path).await?; + // + // let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??; + // let temp_dump_path = temp_dump_dir.path().to_owned(); + // + // let meta = MetadataVersion::new_v4(self.index_db_size, self.update_db_size); + // let meta_path = temp_dump_path.join(META_FILE_NAME); + // let mut meta_file = File::create(&meta_path)?; + // serde_json::to_writer(&mut meta_file, &meta)?; + // analytics::copy_user_id(&self.db_path, &temp_dump_path); + // + // create_dir_all(&temp_dump_path.join("indexes")).await?; + // + // let (sender, receiver) = oneshot::channel(); + // + // self.scheduler + // .write() + // .await + // .schedule_job(Job::Dump { + // ret: sender, + // path: temp_dump_path.clone(), + // }) + // .await; + // + // // wait until the job has started performing before finishing the dump process + // let sender = receiver.await??; + // + // AuthController::dump(&self.db_path, &temp_dump_path)?; + // + // //TODO(marin): this is not right, the scheduler should dump itself, not do it here... + // self.scheduler + // .read() + // .await + // .dump(&temp_dump_path, self.update_file_store.clone()) + // .await?; + // + // let dump_path = tokio::task::spawn_blocking(move || -> Result { + // // for now we simply copy the updates/updates_files + // // FIXME: We may copy more files than necessary, if new files are added while we are + // // performing the dump. We need a way to filter them out. + // + // let temp_dump_file = tempfile::NamedTempFile::new_in(&self.dump_path)?; + // to_tar_gz(temp_dump_path, temp_dump_file.path()) + // .map_err(|e| DumpActorError::Internal(e.into()))?; + // + // let dump_path = self.dump_path.join(self.uid).with_extension("dump"); + // temp_dump_file.persist(&dump_path)?; + // + // Ok(dump_path) + // }) + // .await??; + // + // // notify the update loop that we are finished performing the dump. + // let _ = sender.send(()); + // + // info!("Created dump in {:?}.", dump_path); + // Ok(()) } } @@ -401,7 +397,7 @@ mod test { use crate::options::SchedulerConfig; use crate::tasks::error::Result as TaskResult; use crate::tasks::task::{Task, TaskId}; - use crate::tasks::{MockTaskPerformer, TaskFilter, TaskStore}; + use crate::tasks::{BatchHandler, TaskFilter, TaskStore}; use crate::update_file_store::UpdateFileStore; fn setup() { @@ -426,7 +422,7 @@ mod test { let mocker = Mocker::default(); let update_file_store = UpdateFileStore::mock(mocker); - let mut performer = MockTaskPerformer::new(); + let mut performer = BatchHandler::new(); performer .expect_process_job() .once() @@ -480,7 +476,7 @@ mod test { ) .then(|_| Ok(Vec::new())); let task_store = TaskStore::mock(mocker); - let mut performer = MockTaskPerformer::new(); + let mut performer = BatchHandler::new(); performer .expect_process_job() .once() diff --git a/meilisearch-lib/src/index_controller/error.rs b/meilisearch-lib/src/index_controller/error.rs index 85af76623..11ef03d73 100644 --- a/meilisearch-lib/src/index_controller/error.rs +++ b/meilisearch-lib/src/index_controller/error.rs @@ -6,11 +6,11 @@ use tokio::task::JoinError; use super::DocumentAdditionFormat; use crate::document_formats::DocumentFormatError; +use crate::dump::error::DumpActorError; use crate::index::error::IndexError; use crate::tasks::error::TaskError; use crate::update_file_store::UpdateFileStoreError; -use super::dump_actor::error::DumpActorError; use crate::index_resolver::error::IndexResolverError; pub type Result = std::result::Result; diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 4be90489a..b73402d56 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -19,25 +19,23 @@ use tokio::time::sleep; use uuid::Uuid; use crate::document_formats::{read_csv, read_json, read_ndjson}; +use crate::dump::{self, load_dump, DumpActor, DumpActorHandle, DumpActorHandleImpl, DumpInfo}; use crate::index::{ Checked, Document, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings, Unchecked, }; -use crate::index_controller::dump_actor::{load_dump, DumpActor, DumpActorHandleImpl}; use crate::options::{IndexerOpts, SchedulerConfig}; use crate::snapshot::{load_snapshot, SnapshotService}; use crate::tasks::error::TaskError; use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskId}; -use crate::tasks::{Scheduler, TaskFilter, TaskStore}; +use crate::tasks::{BatchHandler, EmptyBatchHandler, Scheduler, TaskFilter, TaskStore}; use error::Result; -use self::dump_actor::{DumpActorHandle, DumpInfo}; use self::error::IndexControllerError; use crate::index_resolver::index_store::{IndexStore, MapIndexStore}; use crate::index_resolver::meta_store::{HeedMetaStore, IndexMetaStore}; use crate::index_resolver::{create_index_resolver, IndexResolver, IndexUid}; use crate::update_file_store::UpdateFileStore; -mod dump_actor; pub mod error; pub mod versioning; @@ -73,12 +71,12 @@ pub struct IndexSettings { } pub struct IndexController { - index_resolver: Arc>, + pub index_resolver: Arc>, scheduler: Arc>, task_store: TaskStore, dump_path: PathBuf, - dump_handle: dump_actor::DumpActorHandleImpl, - update_file_store: UpdateFileStore, + dump_handle: dump::DumpActorHandleImpl, + pub update_file_store: UpdateFileStore, } /// Need a custom implementation for clone because deriving require that U and I are clone. @@ -223,8 +221,9 @@ impl IndexControllerBuilder { )?); let task_store = TaskStore::new(meta_env)?; - let scheduler = - Scheduler::new(task_store.clone(), index_resolver.clone(), scheduler_config)?; + let handlers: Vec> = + vec![index_resolver.clone(), Arc::new(EmptyBatchHandler)]; + let scheduler = Scheduler::new(task_store.clone(), handlers, scheduler_config)?; let dump_path = self .dump_dst diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 33be749b1..f463cd24d 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -3,7 +3,7 @@ pub mod index_store; pub mod meta_store; use std::convert::{TryFrom, TryInto}; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::sync::Arc; use error::{IndexResolverError, Result}; @@ -14,15 +14,12 @@ use milli::heed::Env; use milli::update::{DocumentDeletionResult, IndexerConfig}; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; -use tokio::sync::oneshot; use tokio::task::spawn_blocking; use uuid::Uuid; use crate::index::{error::Result as IndexResult, Index}; use crate::options::IndexerOpts; -use crate::tasks::batch::Batch; -use crate::tasks::task::{DocumentDeletion, Job, Task, TaskContent, TaskEvent, TaskId, TaskResult}; -use crate::tasks::TaskPerformer; +use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult}; use crate::update_file_store::UpdateFileStore; use self::meta_store::IndexMeta; @@ -91,69 +88,10 @@ impl TryInto for String { } } -#[async_trait::async_trait] -impl TaskPerformer for IndexResolver -where - U: IndexMetaStore + Send + Sync + 'static, - I: IndexStore + Send + Sync + 'static, -{ - async fn process_batch(&self, mut batch: Batch) -> Batch { - // If a batch contains multiple tasks, then it must be a document addition batch - if let Some(Task { - content: TaskContent::DocumentAddition { .. }, - .. - }) = batch.tasks.first() - { - debug_assert!(batch.tasks.iter().all(|t| matches!( - t, - Task { - content: TaskContent::DocumentAddition { .. }, - .. - } - ))); - - self.process_document_addition_batch(batch).await - } else { - if let Some(task) = batch.tasks.first_mut() { - task.events - .push(TaskEvent::Processing(OffsetDateTime::now_utc())); - - match self.process_task(task).await { - Ok(success) => { - task.events.push(TaskEvent::Succeded { - result: success, - timestamp: OffsetDateTime::now_utc(), - }); - } - Err(err) => task.events.push(TaskEvent::Failed { - error: err.into(), - timestamp: OffsetDateTime::now_utc(), - }), - } - } - batch - } - } - - async fn process_job(&self, job: Job) { - self.process_job(job).await; - } - - async fn finish(&self, batch: &Batch) { - for task in &batch.tasks { - if let Some(content_uuid) = task.get_content_uuid() { - if let Err(e) = self.file_store.delete(content_uuid).await { - log::error!("error deleting update file: {}", e); - } - } - } - } -} - pub struct IndexResolver { index_uuid_store: U, index_store: I, - file_store: UpdateFileStore, + pub file_store: UpdateFileStore, } impl IndexResolver { @@ -189,7 +127,7 @@ where } } - async fn process_document_addition_batch(&self, mut batch: Batch) -> Batch { + pub async fn process_document_addition_batch(&self, mut tasks: Vec) -> Vec { fn get_content_uuid(task: &Task) -> Uuid { match task { Task { @@ -200,9 +138,9 @@ where } } - let content_uuids = batch.tasks.iter().map(get_content_uuid).collect::>(); + let content_uuids = tasks.iter().map(get_content_uuid).collect::>(); - match batch.tasks.first() { + match tasks.first() { Some(Task { index_uid: Some(ref index_uid), id, @@ -231,13 +169,13 @@ where Ok(index) => index, Err(e) => { let error = ResponseError::from(e); - for task in batch.tasks.iter_mut() { + for task in tasks.iter_mut() { task.events.push(TaskEvent::Failed { error: error.clone(), timestamp: now, }); } - return batch; + return tasks; } }; @@ -269,17 +207,17 @@ where }, }; - for task in batch.tasks.iter_mut() { + for task in tasks.iter_mut() { task.events.push(event.clone()); } - batch + tasks } _ => panic!("invalid batch!"), } } - async fn process_task(&self, task: &Task) -> Result { + pub async fn process_task(&self, task: &Task) -> Result { let index_uid = task.index_uid.clone(); match &task.content { TaskContent::DocumentAddition { .. } => panic!("updates should be handled by batch"), @@ -351,33 +289,7 @@ where Ok(TaskResult::Other) } - TaskContent::Dump { path } => self.perform_dump(path).await, - } - } - - async fn perform_dump(&self, path: &PathBuf) -> Result { - todo!() - } - - async fn process_job(&self, job: Job) { - match job { - Job::Dump { ret, path } => { - log::trace!("The Dump task is getting executed"); - - let (sender, receiver) = oneshot::channel(); - if ret.send(self.dump(path).await.map(|_| sender)).is_err() { - log::error!("The dump actor died."); - } - - // wait until the dump has finished performing. - let _ = receiver.await; - } - Job::Empty => log::error!("Tried to process an empty task."), - Job::Snapshot(job) => { - if let Err(e) = job.run().await { - log::error!("Error performing snapshot: {}", e); - } - } + _ => unreachable!("Invalid task for index resolver"), } } diff --git a/meilisearch-lib/src/lib.rs b/meilisearch-lib/src/lib.rs index 1161340ba..3d3d5e860 100644 --- a/meilisearch-lib/src/lib.rs +++ b/meilisearch-lib/src/lib.rs @@ -3,6 +3,7 @@ pub mod error; pub mod options; mod analytics; +mod dump; pub mod index; pub mod index_controller; mod index_resolver; diff --git a/meilisearch-lib/src/snapshot.rs b/meilisearch-lib/src/snapshot.rs index 6c27ad2f0..6dda0f3e8 100644 --- a/meilisearch-lib/src/snapshot.rs +++ b/meilisearch-lib/src/snapshot.rs @@ -14,7 +14,6 @@ use walkdir::WalkDir; use crate::compression::from_tar_gz; use crate::index_controller::open_meta_env; use crate::index_controller::versioning::VERSION_FILE_NAME; -use crate::tasks::task::Job; use crate::tasks::Scheduler; pub struct SnapshotService { @@ -39,8 +38,7 @@ impl SnapshotService { meta_env_size: self.meta_env_size, index_size: self.index_size, }; - let job = Job::Snapshot(snapshot_job); - self.scheduler.write().await.schedule_job(job).await; + self.scheduler.write().await.register_snapshot(snapshot_job); sleep(self.snapshot_period).await; } } diff --git a/meilisearch-lib/src/tasks/batch.rs b/meilisearch-lib/src/tasks/batch.rs index 4a8cf7907..88c73e3de 100644 --- a/meilisearch-lib/src/tasks/batch.rs +++ b/meilisearch-lib/src/tasks/batch.rs @@ -1,22 +1,75 @@ use time::OffsetDateTime; -use super::task::Task; +use crate::snapshot::SnapshotJob; + +use super::task::{Task, TaskEvent}; pub type BatchId = u64; +#[derive(Debug)] +pub enum BatchContent { + DocumentAddtitionBatch(Vec), + IndexUpdate(Task), + Dump(Task), + Snapshot(SnapshotJob), + // Symbolizes a empty batch. This can occur when we were woken, but there wasn't any work to do. + Empty, +} + +impl BatchContent { + pub fn first(&self) -> Option<&Task> { + match self { + BatchContent::DocumentAddtitionBatch(ts) => ts.first(), + BatchContent::Dump(t) | BatchContent::IndexUpdate(t) => Some(t), + BatchContent::Snapshot(_) | BatchContent::Empty => None, + } + } + + pub fn push_event(&mut self, event: TaskEvent) { + match self { + BatchContent::DocumentAddtitionBatch(ts) => { + ts.iter_mut().for_each(|t| t.events.push(event.clone())) + } + BatchContent::IndexUpdate(t) | BatchContent::Dump(t) => t.events.push(event), + BatchContent::Snapshot(_) | BatchContent::Empty => (), + } + } +} + #[derive(Debug)] pub struct Batch { - pub id: BatchId, + // Only batches that contains a persistant tasks are given an id. Snapshot batches don't have + // an id. + pub id: Option, pub created_at: OffsetDateTime, - pub tasks: Vec, + pub content: BatchContent, } impl Batch { + pub fn new(id: Option, content: BatchContent) -> Self { + Self { + id, + created_at: OffsetDateTime::now_utc(), + content, + } + } pub fn len(&self) -> usize { - self.tasks.len() + match self.content { + BatchContent::DocumentAddtitionBatch(ref ts) => ts.len(), + BatchContent::IndexUpdate(_) | BatchContent::Dump(_) | BatchContent::Snapshot(_) => 1, + BatchContent::Empty => 0, + } } pub fn is_empty(&self) -> bool { - self.tasks.is_empty() + self.len() == 0 + } + + pub fn empty() -> Self { + Self { + id: None, + created_at: OffsetDateTime::now_utc(), + content: BatchContent::Empty, + } } } diff --git a/meilisearch-lib/src/tasks/batch_handlers/empty_handler.rs b/meilisearch-lib/src/tasks/batch_handlers/empty_handler.rs new file mode 100644 index 000000000..5d6aa2275 --- /dev/null +++ b/meilisearch-lib/src/tasks/batch_handlers/empty_handler.rs @@ -0,0 +1,20 @@ +use crate::tasks::batch::{Batch, BatchContent}; +use crate::tasks::BatchHandler; + +/// A sink handler for empty tasks. +pub struct EmptyBatchHandler; + +#[async_trait::async_trait] +impl BatchHandler for EmptyBatchHandler { + fn accept(&self, batch: &Batch) -> bool { + matches!(batch.content, BatchContent::Empty) + } + + async fn process_batch(&self, batch: Batch) -> Batch { + batch + } + + async fn finish(&self, _: &Batch) { + () + } +} diff --git a/meilisearch-lib/src/tasks/batch_handlers/index_resolver_handler.rs b/meilisearch-lib/src/tasks/batch_handlers/index_resolver_handler.rs new file mode 100644 index 000000000..41a78a22b --- /dev/null +++ b/meilisearch-lib/src/tasks/batch_handlers/index_resolver_handler.rs @@ -0,0 +1,58 @@ +use time::OffsetDateTime; + +use crate::index_resolver::IndexResolver; +use crate::index_resolver::{index_store::IndexStore, meta_store::IndexMetaStore}; +use crate::tasks::batch::{Batch, BatchContent}; +use crate::tasks::task::TaskEvent; +use crate::tasks::BatchHandler; + +#[async_trait::async_trait] +impl BatchHandler for IndexResolver +where + U: IndexMetaStore + Send + Sync + 'static, + I: IndexStore + Send + Sync + 'static, +{ + fn accept(&self, batch: &Batch) -> bool { + match batch.content { + BatchContent::DocumentAddtitionBatch(_) | BatchContent::IndexUpdate(_) => true, + _ => false, + } + } + + async fn process_batch(&self, mut batch: Batch) -> Batch { + match batch.content { + BatchContent::DocumentAddtitionBatch(ref mut tasks) => { + *tasks = self + .process_document_addition_batch(std::mem::take(tasks)) + .await; + } + BatchContent::IndexUpdate(ref mut task) => match self.process_task(&task).await { + Ok(success) => { + task.events.push(TaskEvent::Succeded { + result: success, + timestamp: OffsetDateTime::now_utc(), + }); + } + Err(err) => task.events.push(TaskEvent::Failed { + error: err.into(), + timestamp: OffsetDateTime::now_utc(), + }), + }, + _ => unreachable!(), + } + + batch + } + + async fn finish(&self, batch: &Batch) { + if let BatchContent::DocumentAddtitionBatch(ref tasks) = batch.content { + for task in tasks { + if let Some(content_uuid) = task.get_content_uuid() { + if let Err(e) = self.file_store.delete(content_uuid).await { + log::error!("error deleting update file: {}", e); + } + } + } + } + } +} diff --git a/meilisearch-lib/src/tasks/batch_handlers/mod.rs b/meilisearch-lib/src/tasks/batch_handlers/mod.rs new file mode 100644 index 000000000..0e94c76f1 --- /dev/null +++ b/meilisearch-lib/src/tasks/batch_handlers/mod.rs @@ -0,0 +1,2 @@ +pub mod empty_handler; +mod index_resolver_handler; diff --git a/meilisearch-lib/src/tasks/mod.rs b/meilisearch-lib/src/tasks/mod.rs index b56dfaf9d..bc01c4901 100644 --- a/meilisearch-lib/src/tasks/mod.rs +++ b/meilisearch-lib/src/tasks/mod.rs @@ -1,5 +1,6 @@ use async_trait::async_trait; +pub use batch_handlers::empty_handler::EmptyBatchHandler; pub use scheduler::Scheduler; pub use task_store::TaskFilter; @@ -11,9 +12,8 @@ pub use task_store::TaskStore; use batch::Batch; use error::Result; -use self::task::Job; - pub mod batch; +mod batch_handlers; pub mod error; mod scheduler; pub mod task; @@ -22,12 +22,13 @@ pub mod update_loop; #[cfg_attr(test, mockall::automock(type Error=test::DebugError;))] #[async_trait] -pub trait TaskPerformer: Sync + Send + 'static { +pub trait BatchHandler: Sync + Send + 'static { + /// return whether this handler can accept this batch + fn accept(&self, batch: &Batch) -> bool; + /// Processes the `Task` batch returning the batch with the `Task` updated. async fn process_batch(&self, batch: Batch) -> Batch; - async fn process_job(&self, job: Job); - /// `finish` is called when the result of `process` has been commited to the task store. This /// method can be used to perform cleanup after the update has been completed for example. async fn finish(&self, batch: &Batch); diff --git a/meilisearch-lib/src/tasks/scheduler.rs b/meilisearch-lib/src/tasks/scheduler.rs index 1f76f179a..f3018b782 100644 --- a/meilisearch-lib/src/tasks/scheduler.rs +++ b/meilisearch-lib/src/tasks/scheduler.rs @@ -2,6 +2,7 @@ use std::cmp::Ordering; use std::collections::{hash_map::Entry, BinaryHeap, HashMap, VecDeque}; use std::ops::{Deref, DerefMut}; use std::path::Path; +use std::slice; use std::sync::Arc; use std::time::Duration; @@ -11,24 +12,21 @@ use time::OffsetDateTime; use tokio::sync::{watch, RwLock}; use crate::options::SchedulerConfig; +use crate::snapshot::SnapshotJob; use crate::update_file_store::UpdateFileStore; -use super::batch::Batch; +use super::batch::{Batch, BatchContent}; use super::error::Result; -use super::task::{Job, Task, TaskContent, TaskEvent, TaskId}; +use super::task::{Task, TaskContent, TaskEvent, TaskId}; use super::update_loop::UpdateLoop; -use super::{TaskFilter, TaskPerformer, TaskStore}; +use super::{BatchHandler, TaskFilter, TaskStore}; #[derive(Eq, Debug, Clone, Copy)] enum TaskType { - DocumentAddition { - number: usize, - }, - DocumentUpdate { - number: usize, - }, - /// Any other kind of task, including Dumps - Other, + DocumentAddition { number: usize }, + DocumentUpdate { number: usize }, + IndexUpdate, + Dump, } /// Two tasks are equal if they have the same type. @@ -166,7 +164,13 @@ impl TaskQueue { } => TaskType::DocumentUpdate { number: documents_count, }, - _ => TaskType::Other, + TaskContent::Dump { .. } => TaskType::Dump, + TaskContent::DocumentDeletion(_) + | TaskContent::SettingsUpdate { .. } + | TaskContent::IndexDeletion + | TaskContent::IndexCreation { .. } + | TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, + _ => unreachable!("unhandled task type"), }; let task = PendingTask { kind, id }; @@ -217,11 +221,12 @@ impl TaskQueue { } pub struct Scheduler { - jobs: VecDeque, + // TODO: currently snapshots are non persistent tasks, and are treated differently. + snapshots: VecDeque, tasks: TaskQueue, store: TaskStore, - processing: Vec, + processing: Processing, next_fetched_task_id: TaskId, config: SchedulerConfig, /// Notifies the update loop that a new task was received @@ -229,14 +234,11 @@ pub struct Scheduler { } impl Scheduler { - pub fn new