mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-12 07:58:54 +01:00
Merge remote-tracking branch 'origin/release-v1.0.0' into import-milli
This commit is contained in:
commit
cde62fcb5b
3
.github/workflows/latest-git-tag.yml
vendored
3
.github/workflows/latest-git-tag.yml
vendored
@ -3,7 +3,7 @@ name: Update latest git tag
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [released]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-version:
|
check-version:
|
||||||
@ -17,6 +17,7 @@ jobs:
|
|||||||
|
|
||||||
update-latest-tag:
|
update-latest-tag:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: check-version
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: rickstaa/action-create-tag@v1
|
- uses: rickstaa/action-create-tag@v1
|
||||||
|
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
@ -2,7 +2,7 @@ name: Publish to APT repository & Homebrew
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [released]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-version:
|
check-version:
|
||||||
|
124
Cargo.lock
generated
124
Cargo.lock
generated
@ -46,7 +46,7 @@ dependencies = [
|
|||||||
"actix-tls",
|
"actix-tls",
|
||||||
"actix-utils",
|
"actix-utils",
|
||||||
"ahash",
|
"ahash",
|
||||||
"base64",
|
"base64 0.13.1",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"brotli",
|
"brotli",
|
||||||
"bytes",
|
"bytes",
|
||||||
@ -337,9 +337,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-trait"
|
name = "async-trait"
|
||||||
version = "0.1.60"
|
version = "0.1.61"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "677d1d8ab452a3936018a687b20e6f7cf5363d713b732b8884001317b0e48aa3"
|
checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2 1.0.49",
|
||||||
"quote 1.0.23",
|
"quote 1.0.23",
|
||||||
@ -383,7 +383,7 @@ dependencies = [
|
|||||||
"cfg-if",
|
"cfg-if",
|
||||||
"libc",
|
"libc",
|
||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
"object 0.30.1",
|
"object 0.30.2",
|
||||||
"rustc-demangle",
|
"rustc-demangle",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -393,6 +393,12 @@ version = "0.13.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
|
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "base64"
|
||||||
|
version = "0.21.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64ct"
|
name = "base64ct"
|
||||||
version = "1.5.3"
|
version = "1.5.3"
|
||||||
@ -1404,8 +1410,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filter-parser"
|
name = "filter-parser"
|
||||||
version = "0.38.0"
|
version = "0.39.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.38.0#c3f4835e8e102586bd6d5eb1e55c4bba5e92f994"
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nom",
|
"nom",
|
||||||
"nom_locate",
|
"nom_locate",
|
||||||
@ -1433,8 +1439,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flatten-serde-json"
|
name = "flatten-serde-json"
|
||||||
version = "0.38.0"
|
version = "0.39.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.38.0#c3f4835e8e102586bd6d5eb1e55c4bba5e92f994"
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
@ -1679,9 +1685,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "glob"
|
name = "glob"
|
||||||
version = "0.3.0"
|
version = "0.3.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
|
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "grenad"
|
name = "grenad"
|
||||||
@ -1958,9 +1964,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "insta"
|
name = "insta"
|
||||||
version = "1.24.1"
|
version = "1.26.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "eb5686bd8e9239eabe90bb30a0c341bffd6fdc177fb556708f2cb792bf00352d"
|
checksum = "f6f0f08b46e4379744de2ab67aa8f7de3ffd1da3e275adc41fcc82053ede46ff"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console",
|
"console",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
@ -1993,9 +1999,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipnet"
|
name = "ipnet"
|
||||||
version = "2.7.0"
|
version = "2.7.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "11b0d96e660696543b251e58030cf9787df56da39dab19ad60eae7353040917e"
|
checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "is-terminal"
|
name = "is-terminal"
|
||||||
@ -2065,8 +2071,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "json-depth-checker"
|
name = "json-depth-checker"
|
||||||
version = "0.38.0"
|
version = "0.39.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.38.0#c3f4835e8e102586bd6d5eb1e55c4bba5e92f994"
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
@ -2085,7 +2091,7 @@ version = "8.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "09f4f04699947111ec1733e71778d763555737579e44b85844cae8e1940a1828"
|
checksum = "09f4f04699947111ec1733e71778d763555737579e44b85844cae8e1940a1828"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64",
|
"base64 0.13.1",
|
||||||
"pem",
|
"pem",
|
||||||
"ring",
|
"ring",
|
||||||
"serde",
|
"serde",
|
||||||
@ -2122,9 +2128,9 @@ checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libgit2-sys"
|
name = "libgit2-sys"
|
||||||
version = "0.14.0+1.5.0"
|
version = "0.14.1+1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "47a00859c70c8a4f7218e6d1cc32875c4b55f6799445b842b0d8ed5e4c3d959b"
|
checksum = "4a07fb2692bc3593bda59de45a502bb3071659f2c515e28c71e728306b038e17"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
@ -2140,9 +2146,9 @@ checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libmimalloc-sys"
|
name = "libmimalloc-sys"
|
||||||
version = "0.1.28"
|
version = "0.1.30"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "04d1c67deb83e6b75fa4fe3309e09cfeade12e7721d95322af500d3814ea60c9"
|
checksum = "dd8c7cbf8b89019683667e347572e6d55a7df7ea36b0c4ce69961b0cde67b174"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
@ -2551,7 +2557,7 @@ dependencies = [
|
|||||||
name = "meilisearch-auth"
|
name = "meilisearch-auth"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64",
|
"base64 0.13.1",
|
||||||
"enum-iterator",
|
"enum-iterator",
|
||||||
"hmac",
|
"hmac",
|
||||||
"meilisearch-types",
|
"meilisearch-types",
|
||||||
@ -2582,7 +2588,7 @@ dependencies = [
|
|||||||
"insta",
|
"insta",
|
||||||
"meili-snap",
|
"meili-snap",
|
||||||
"memmap2",
|
"memmap2",
|
||||||
"milli 0.38.0",
|
"milli 0.39.0",
|
||||||
"proptest",
|
"proptest",
|
||||||
"proptest-derive",
|
"proptest-derive",
|
||||||
"roaring",
|
"roaring",
|
||||||
@ -2622,8 +2628,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "milli"
|
name = "milli"
|
||||||
version = "0.38.0"
|
version = "0.39.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.38.0#c3f4835e8e102586bd6d5eb1e55c4bba5e92f994"
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bimap",
|
"bimap",
|
||||||
"bincode",
|
"bincode",
|
||||||
@ -2633,16 +2639,17 @@ dependencies = [
|
|||||||
"concat-arrays",
|
"concat-arrays",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
"csv",
|
"csv",
|
||||||
|
"deserr",
|
||||||
"either",
|
"either",
|
||||||
"filter-parser 0.38.0",
|
"filter-parser 0.39.0",
|
||||||
"flatten-serde-json 0.38.0",
|
"flatten-serde-json 0.39.0",
|
||||||
"fst",
|
"fst",
|
||||||
"fxhash",
|
"fxhash",
|
||||||
"geoutils",
|
"geoutils",
|
||||||
"grenad",
|
"grenad",
|
||||||
"heed",
|
"heed",
|
||||||
"itertools",
|
"itertools",
|
||||||
"json-depth-checker 0.38.0",
|
"json-depth-checker 0.39.0",
|
||||||
"levenshtein_automata",
|
"levenshtein_automata",
|
||||||
"log",
|
"log",
|
||||||
"logging_timer",
|
"logging_timer",
|
||||||
@ -2718,9 +2725,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mimalloc"
|
name = "mimalloc"
|
||||||
version = "0.1.32"
|
version = "0.1.34"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9b2374e2999959a7b583e1811a1ddbf1d3a4b9496eceb9746f1192a59d871eca"
|
checksum = "9dcb174b18635f7561a0c6c9fc2ce57218ac7523cf72c50af80e2d79ab8f3ba1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libmimalloc-sys",
|
"libmimalloc-sys",
|
||||||
]
|
]
|
||||||
@ -2865,9 +2872,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object"
|
name = "object"
|
||||||
version = "0.30.1"
|
version = "0.30.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8d864c91689fdc196779b98dba0aceac6118594c2df6ee5d943eb6a8df4d107a"
|
checksum = "2b8c786513eb403643f2a88c244c2aaa270ef2153f55094587d0c48a3cf22a83"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
@ -3010,7 +3017,7 @@ version = "1.1.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "03c64931a1a212348ec4f3b4362585eca7159d0d09cbdf4a7f74f02173596fd4"
|
checksum = "03c64931a1a212348ec4f3b4362585eca7159d0d09cbdf4a7f74f02173596fd4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64",
|
"base64 0.13.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3029,9 +3036,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest"
|
name = "pest"
|
||||||
version = "2.5.2"
|
version = "2.5.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0f6e86fb9e7026527a0d46bc308b841d73170ef8f443e1807f6ef88526a816d4"
|
checksum = "4257b4a04d91f7e9e6290be5d3da4804dd5784fafde3a497d73eb2b4a158c30a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"ucd-trie",
|
"ucd-trie",
|
||||||
@ -3039,9 +3046,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest_derive"
|
name = "pest_derive"
|
||||||
version = "2.5.2"
|
version = "2.5.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "96504449aa860c8dcde14f9fba5c58dc6658688ca1fe363589d6327b8662c603"
|
checksum = "241cda393b0cdd65e62e07e12454f1f25d57017dcc514b1514cd3c4645e3a0a6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pest",
|
"pest",
|
||||||
"pest_generator",
|
"pest_generator",
|
||||||
@ -3049,9 +3056,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest_generator"
|
name = "pest_generator"
|
||||||
version = "2.5.2"
|
version = "2.5.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "798e0220d1111ae63d66cb66a5dcb3fc2d986d520b98e49e1852bfdb11d7c5e7"
|
checksum = "46b53634d8c8196302953c74d5352f33d0c512a9499bd2ce468fc9f4128fa27c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pest",
|
"pest",
|
||||||
"pest_meta",
|
"pest_meta",
|
||||||
@ -3062,13 +3069,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pest_meta"
|
name = "pest_meta"
|
||||||
version = "2.5.2"
|
version = "2.5.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "984298b75898e30a843e278a9f2452c31e349a073a0ce6fd950a12a74464e065"
|
checksum = "0ef4f1332a8d4678b41966bb4cc1d0676880e84183a1ecc3f4b69f03e99c7a51"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"pest",
|
"pest",
|
||||||
"sha1",
|
"sha2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3398,9 +3405,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.7.0"
|
version = "1.7.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
|
checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@ -3434,7 +3441,7 @@ version = "0.11.13"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "68cc60575865c7831548863cc02356512e3f1dc2f3f82cb837d7fc4cc8f3c97c"
|
checksum = "68cc60575865c7831548863cc02356512e3f1dc2f3f82cb837d7fc4cc8f3c97c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64",
|
"base64 0.13.1",
|
||||||
"bytes",
|
"bytes",
|
||||||
"encoding_rs",
|
"encoding_rs",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
@ -3555,11 +3562,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustls-pemfile"
|
name = "rustls-pemfile"
|
||||||
version = "1.0.1"
|
version = "1.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0864aeff53f8c05aa08d86e5ef839d3dfcf07aeba2db32f12db0ef716e87bd55"
|
checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64",
|
"base64 0.21.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3613,9 +3620,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "segment"
|
name = "segment"
|
||||||
version = "0.2.1"
|
version = "0.2.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "24fc91c898e0487ff3e471d0849bbaf7d38a00ff5e3531009d386b0bab9b6b12"
|
checksum = "2bb93f3f738322ce8f33c4e80c251fb1560ca81f3a241355271fcb912eeb48e3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
@ -3897,9 +3904,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sysinfo"
|
name = "sysinfo"
|
||||||
version = "0.26.8"
|
version = "0.26.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "29ddf41e393a9133c81d5f0974195366bd57082deac6e0eb02ed39b8341c2bb6"
|
checksum = "5c18a6156d1f27a9592ee18c1a846ca8dd5c258b7179fc193ae87c74ebb666f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"core-foundation-sys",
|
"core-foundation-sys",
|
||||||
@ -4033,9 +4040,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.24.0"
|
version = "1.24.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7125661431c26622a80ca5051a2f936c9a678318e0351007b0cc313143024e5c"
|
checksum = "1d9f76183f91ecfb55e1d7d5602bd1d979e38a3a522fe900241cf195624d67ae"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"bytes",
|
"bytes",
|
||||||
@ -4136,9 +4143,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "try-lock"
|
name = "try-lock"
|
||||||
version = "0.2.3"
|
version = "0.2.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642"
|
checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typenum"
|
name = "typenum"
|
||||||
@ -4627,10 +4634,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zstd-sys"
|
name = "zstd-sys"
|
||||||
version = "2.0.4+zstd.1.5.2"
|
version = "2.0.5+zstd.1.5.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4fa202f2ef00074143e219d15b62ffc317d17cc33909feac471c044087cad7b0"
|
checksum = "edc50ffce891ad571e9f9afe5039c4837bede781ac4bb13052ed7ae695518596"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
|
"pkg-config",
|
||||||
]
|
]
|
||||||
|
@ -249,17 +249,17 @@ pub(crate) mod test {
|
|||||||
|
|
||||||
pub fn create_test_settings() -> Settings<Checked> {
|
pub fn create_test_settings() -> Settings<Checked> {
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
displayed_attributes: Setting::Set(vec![S("race"), S("name")]).into(),
|
displayed_attributes: Setting::Set(vec![S("race"), S("name")]),
|
||||||
searchable_attributes: Setting::Set(vec![S("name"), S("race")]).into(),
|
searchable_attributes: Setting::Set(vec![S("name"), S("race")]),
|
||||||
filterable_attributes: Setting::Set(btreeset! { S("race"), S("age") }).into(),
|
filterable_attributes: Setting::Set(btreeset! { S("race"), S("age") }),
|
||||||
sortable_attributes: Setting::Set(btreeset! { S("age") }).into(),
|
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
||||||
ranking_rules: Setting::NotSet.into(),
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet.into(),
|
stop_words: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet.into(),
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet.into(),
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet.into(),
|
typo_tolerance: Setting::NotSet,
|
||||||
faceting: Setting::NotSet.into(),
|
faceting: Setting::NotSet,
|
||||||
pagination: Setting::NotSet.into(),
|
pagination: Setting::NotSet,
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
};
|
};
|
||||||
settings.check()
|
settings.check()
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
|
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
|
||||||
use crate::reader::{v5, v6, Document, UpdateFile};
|
use crate::reader::{v5, v6, Document, UpdateFile};
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
@ -254,51 +256,50 @@ impl<T> From<v5::Setting<T>> for v6::Setting<T> {
|
|||||||
impl From<v5::ResponseError> for v6::ResponseError {
|
impl From<v5::ResponseError> for v6::ResponseError {
|
||||||
fn from(error: v5::ResponseError) -> Self {
|
fn from(error: v5::ResponseError) -> Self {
|
||||||
let code = match error.error_code.as_ref() {
|
let code = match error.error_code.as_ref() {
|
||||||
"index_creation_failed" => v6::Code::CreateIndex,
|
"index_creation_failed" => v6::Code::IndexCreationFailed,
|
||||||
"index_already_exists" => v6::Code::IndexAlreadyExists,
|
"index_already_exists" => v6::Code::IndexAlreadyExists,
|
||||||
"index_not_found" => v6::Code::IndexNotFound,
|
"index_not_found" => v6::Code::IndexNotFound,
|
||||||
"invalid_index_uid" => v6::Code::InvalidIndexUid,
|
"invalid_index_uid" => v6::Code::InvalidIndexUid,
|
||||||
"invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo,
|
"invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo,
|
||||||
"invalid_state" => v6::Code::InvalidState,
|
"invalid_state" => v6::Code::InvalidState,
|
||||||
"primary_key_inference_failed" => v6::Code::NoPrimaryKeyCandidateFound,
|
"primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound,
|
||||||
"index_primary_key_already_exists" => v6::Code::PrimaryKeyAlreadyPresent,
|
"index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists,
|
||||||
"max_fields_limit_exceeded" => v6::Code::MaxFieldsLimitExceeded,
|
"max_fields_limit_exceeded" => v6::Code::MaxFieldsLimitExceeded,
|
||||||
"missing_document_id" => v6::Code::MissingDocumentId,
|
"missing_document_id" => v6::Code::MissingDocumentId,
|
||||||
"invalid_document_id" => v6::Code::InvalidDocumentId,
|
"invalid_document_id" => v6::Code::InvalidDocumentId,
|
||||||
"invalid_filter" => v6::Code::Filter,
|
"invalid_filter" => v6::Code::InvalidSettingsFilterableAttributes,
|
||||||
"invalid_sort" => v6::Code::Sort,
|
"invalid_sort" => v6::Code::InvalidSettingsSortableAttributes,
|
||||||
"bad_parameter" => v6::Code::BadParameter,
|
"bad_parameter" => v6::Code::BadParameter,
|
||||||
"bad_request" => v6::Code::BadRequest,
|
"bad_request" => v6::Code::BadRequest,
|
||||||
"database_size_limit_reached" => v6::Code::DatabaseSizeLimitReached,
|
"database_size_limit_reached" => v6::Code::DatabaseSizeLimitReached,
|
||||||
"document_not_found" => v6::Code::DocumentNotFound,
|
"document_not_found" => v6::Code::DocumentNotFound,
|
||||||
"internal" => v6::Code::Internal,
|
"internal" => v6::Code::Internal,
|
||||||
"invalid_geo_field" => v6::Code::InvalidDocumentGeoField,
|
"invalid_geo_field" => v6::Code::InvalidDocumentGeoField,
|
||||||
"invalid_ranking_rule" => v6::Code::InvalidRankingRule,
|
"invalid_ranking_rule" => v6::Code::InvalidSettingsRankingRules,
|
||||||
"invalid_store_file" => v6::Code::InvalidStore,
|
"invalid_store_file" => v6::Code::InvalidStoreFile,
|
||||||
"invalid_api_key" => v6::Code::InvalidToken,
|
"invalid_api_key" => v6::Code::InvalidApiKey,
|
||||||
"missing_authorization_header" => v6::Code::MissingAuthorizationHeader,
|
"missing_authorization_header" => v6::Code::MissingAuthorizationHeader,
|
||||||
"no_space_left_on_device" => v6::Code::NoSpaceLeftOnDevice,
|
"no_space_left_on_device" => v6::Code::NoSpaceLeftOnDevice,
|
||||||
"dump_not_found" => v6::Code::DumpNotFound,
|
"dump_not_found" => v6::Code::DumpNotFound,
|
||||||
"task_not_found" => v6::Code::TaskNotFound,
|
"task_not_found" => v6::Code::TaskNotFound,
|
||||||
"payload_too_large" => v6::Code::PayloadTooLarge,
|
"payload_too_large" => v6::Code::PayloadTooLarge,
|
||||||
"unretrievable_document" => v6::Code::RetrieveDocument,
|
"unretrievable_document" => v6::Code::UnretrievableDocument,
|
||||||
"search_error" => v6::Code::SearchDocuments,
|
|
||||||
"unsupported_media_type" => v6::Code::UnsupportedMediaType,
|
"unsupported_media_type" => v6::Code::UnsupportedMediaType,
|
||||||
"dump_already_processing" => v6::Code::DumpAlreadyInProgress,
|
"dump_already_processing" => v6::Code::DumpAlreadyProcessing,
|
||||||
"dump_process_failed" => v6::Code::DumpProcessFailed,
|
"dump_process_failed" => v6::Code::DumpProcessFailed,
|
||||||
"invalid_content_type" => v6::Code::InvalidContentType,
|
"invalid_content_type" => v6::Code::InvalidContentType,
|
||||||
"missing_content_type" => v6::Code::MissingContentType,
|
"missing_content_type" => v6::Code::MissingContentType,
|
||||||
"malformed_payload" => v6::Code::MalformedPayload,
|
"malformed_payload" => v6::Code::MalformedPayload,
|
||||||
"missing_payload" => v6::Code::MissingPayload,
|
"missing_payload" => v6::Code::MissingPayload,
|
||||||
"api_key_not_found" => v6::Code::ApiKeyNotFound,
|
"api_key_not_found" => v6::Code::ApiKeyNotFound,
|
||||||
"missing_parameter" => v6::Code::UnretrievableErrorCode,
|
"missing_parameter" => v6::Code::BadRequest,
|
||||||
"invalid_api_key_actions" => v6::Code::InvalidApiKeyActions,
|
"invalid_api_key_actions" => v6::Code::InvalidApiKeyActions,
|
||||||
"invalid_api_key_indexes" => v6::Code::InvalidApiKeyIndexes,
|
"invalid_api_key_indexes" => v6::Code::InvalidApiKeyIndexes,
|
||||||
"invalid_api_key_expires_at" => v6::Code::InvalidApiKeyExpiresAt,
|
"invalid_api_key_expires_at" => v6::Code::InvalidApiKeyExpiresAt,
|
||||||
"invalid_api_key_description" => v6::Code::InvalidApiKeyDescription,
|
"invalid_api_key_description" => v6::Code::InvalidApiKeyDescription,
|
||||||
"invalid_api_key_name" => v6::Code::InvalidApiKeyName,
|
"invalid_api_key_name" => v6::Code::InvalidApiKeyName,
|
||||||
"invalid_api_key_uid" => v6::Code::InvalidApiKeyUid,
|
"invalid_api_key_uid" => v6::Code::InvalidApiKeyUid,
|
||||||
"immutable_field" => v6::Code::ImmutableField,
|
"immutable_field" => v6::Code::BadRequest,
|
||||||
"api_key_already_exists" => v6::Code::ApiKeyAlreadyExists,
|
"api_key_already_exists" => v6::Code::ApiKeyAlreadyExists,
|
||||||
other => {
|
other => {
|
||||||
log::warn!("Unknown error code {}", other);
|
log::warn!("Unknown error code {}", other);
|
||||||
@ -316,7 +317,26 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
searchable_attributes: settings.searchable_attributes.into(),
|
searchable_attributes: settings.searchable_attributes.into(),
|
||||||
filterable_attributes: settings.filterable_attributes.into(),
|
filterable_attributes: settings.filterable_attributes.into(),
|
||||||
sortable_attributes: settings.sortable_attributes.into(),
|
sortable_attributes: settings.sortable_attributes.into(),
|
||||||
ranking_rules: settings.ranking_rules.into(),
|
ranking_rules: {
|
||||||
|
match settings.ranking_rules {
|
||||||
|
v5::settings::Setting::Set(ranking_rules) => {
|
||||||
|
let mut new_ranking_rules = vec![];
|
||||||
|
for rule in ranking_rules {
|
||||||
|
match v6::RankingRuleView::from_str(&rule) {
|
||||||
|
Ok(new_rule) => {
|
||||||
|
new_ranking_rules.push(new_rule);
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
log::warn!("Error while importing settings. The ranking rule `{rule}` does not exist anymore.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v6::Setting::Set(new_ranking_rules)
|
||||||
|
}
|
||||||
|
v5::settings::Setting::Reset => v6::Setting::Reset,
|
||||||
|
v5::settings::Setting::NotSet => v6::Setting::NotSet,
|
||||||
|
}
|
||||||
|
},
|
||||||
stop_words: settings.stop_words.into(),
|
stop_words: settings.stop_words.into(),
|
||||||
synonyms: settings.synonyms.into(),
|
synonyms: settings.synonyms.into(),
|
||||||
distinct_attribute: settings.distinct_attribute.into(),
|
distinct_attribute: settings.distinct_attribute.into(),
|
||||||
|
@ -26,7 +26,7 @@ pub type Kind = crate::KindDump;
|
|||||||
pub type Details = meilisearch_types::tasks::Details;
|
pub type Details = meilisearch_types::tasks::Details;
|
||||||
|
|
||||||
// everything related to the settings
|
// everything related to the settings
|
||||||
pub type Setting<T> = meilisearch_types::settings::Setting<T>;
|
pub type Setting<T> = meilisearch_types::milli::update::Setting<T>;
|
||||||
pub type TypoTolerance = meilisearch_types::settings::TypoSettings;
|
pub type TypoTolerance = meilisearch_types::settings::TypoSettings;
|
||||||
pub type MinWordSizeForTypos = meilisearch_types::settings::MinWordSizeTyposSetting;
|
pub type MinWordSizeForTypos = meilisearch_types::settings::MinWordSizeTyposSetting;
|
||||||
pub type FacetingSettings = meilisearch_types::settings::FacetingSettings;
|
pub type FacetingSettings = meilisearch_types::settings::FacetingSettings;
|
||||||
@ -40,6 +40,7 @@ pub type IndexUid = meilisearch_types::index_uid::IndexUid;
|
|||||||
// everything related to the errors
|
// everything related to the errors
|
||||||
pub type ResponseError = meilisearch_types::error::ResponseError;
|
pub type ResponseError = meilisearch_types::error::ResponseError;
|
||||||
pub type Code = meilisearch_types::error::Code;
|
pub type Code = meilisearch_types::error::Code;
|
||||||
|
pub type RankingRuleView = meilisearch_types::settings::RankingRuleView;
|
||||||
|
|
||||||
pub struct V6Reader {
|
pub struct V6Reader {
|
||||||
dump: TempDir,
|
dump: TempDir,
|
||||||
|
@ -139,8 +139,8 @@ impl ErrorCode for Error {
|
|||||||
match self {
|
match self {
|
||||||
Error::IndexNotFound(_) => Code::IndexNotFound,
|
Error::IndexNotFound(_) => Code::IndexNotFound,
|
||||||
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
||||||
Error::SwapDuplicateIndexesFound(_) => Code::InvalidDuplicateIndexesFound,
|
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||||
Error::SwapDuplicateIndexFound(_) => Code::InvalidDuplicateIndexesFound,
|
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||||
Error::SwapIndexNotFound(_) => Code::InvalidSwapIndexes,
|
Error::SwapIndexNotFound(_) => Code::InvalidSwapIndexes,
|
||||||
Error::SwapIndexesNotFound(_) => Code::InvalidSwapIndexes,
|
Error::SwapIndexesNotFound(_) => Code::InvalidSwapIndexes,
|
||||||
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
||||||
@ -150,8 +150,8 @@ impl ErrorCode for Error {
|
|||||||
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledBy,
|
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledBy,
|
||||||
Error::InvalidIndexUid { .. } => Code::InvalidIndexUid,
|
Error::InvalidIndexUid { .. } => Code::InvalidIndexUid,
|
||||||
Error::TaskNotFound(_) => Code::TaskNotFound,
|
Error::TaskNotFound(_) => Code::TaskNotFound,
|
||||||
Error::TaskDeletionWithEmptyQuery => Code::TaskDeletionWithEmptyQuery,
|
Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters,
|
||||||
Error::TaskCancelationWithEmptyQuery => Code::TaskCancelationWithEmptyQuery,
|
Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters,
|
||||||
Error::Dump(e) => e.error_code(),
|
Error::Dump(e) => e.error_code(),
|
||||||
Error::Milli(e) => e.error_code(),
|
Error::Milli(e) => e.error_code(),
|
||||||
Error::ProcessBatchPanicked => Code::Internal,
|
Error::ProcessBatchPanicked => Code::Internal,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use meilisearch_types::error::{Code, ErrorCode};
|
use meilisearch_types::error::{Code, ErrorCode};
|
||||||
use meilisearch_types::{internal_error, keys};
|
use meilisearch_types::internal_error;
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
||||||
|
|
||||||
@ -11,8 +11,6 @@ pub enum AuthControllerError {
|
|||||||
ApiKeyNotFound(String),
|
ApiKeyNotFound(String),
|
||||||
#[error("`uid` field value `{0}` is already an existing API key.")]
|
#[error("`uid` field value `{0}` is already an existing API key.")]
|
||||||
ApiKeyAlreadyExists(String),
|
ApiKeyAlreadyExists(String),
|
||||||
#[error(transparent)]
|
|
||||||
ApiKey(#[from] keys::Error),
|
|
||||||
#[error("Internal error: {0}")]
|
#[error("Internal error: {0}")]
|
||||||
Internal(Box<dyn Error + Send + Sync + 'static>),
|
Internal(Box<dyn Error + Send + Sync + 'static>),
|
||||||
}
|
}
|
||||||
@ -27,7 +25,6 @@ internal_error!(
|
|||||||
impl ErrorCode for AuthControllerError {
|
impl ErrorCode for AuthControllerError {
|
||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
Self::ApiKey(e) => e.error_code(),
|
|
||||||
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
|
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
|
||||||
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
|
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
|
||||||
Self::Internal(_) => Code::Internal,
|
Self::Internal(_) => Code::Internal,
|
||||||
|
@ -8,10 +8,9 @@ use std::path::Path;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use error::{AuthControllerError, Result};
|
use error::{AuthControllerError, Result};
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
||||||
use meilisearch_types::star_or::StarOr;
|
use meilisearch_types::star_or::StarOr;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
|
||||||
pub use store::open_auth_store_env;
|
pub use store::open_auth_store_env;
|
||||||
use store::{generate_key_as_hexa, HeedAuthStore};
|
use store::{generate_key_as_hexa, HeedAuthStore};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
@ -34,17 +33,18 @@ impl AuthController {
|
|||||||
Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
|
Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_key(&self, value: Value) -> Result<Key> {
|
pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
|
||||||
let key = Key::create_from_value(value)?;
|
match self.store.get_api_key(create_key.uid)? {
|
||||||
match self.store.get_api_key(key.uid)? {
|
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
|
||||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(key.uid.to_string())),
|
None => self.store.put_api_key(create_key.to_key()),
|
||||||
None => self.store.put_api_key(key),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_key(&self, uid: Uuid, value: Value) -> Result<Key> {
|
pub fn update_key(&self, uid: Uuid, patch: PatchApiKey) -> Result<Key> {
|
||||||
let mut key = self.get_key(uid)?;
|
let mut key = self.get_key(uid)?;
|
||||||
key.update_from_value(value)?;
|
key.description = patch.description;
|
||||||
|
key.name = patch.name;
|
||||||
|
key.updated_at = OffsetDateTime::now_utc();
|
||||||
self.store.put_api_key(key)
|
self.store.put_api_key(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,14 +9,14 @@ actix-web = { version = "4.2.1", default-features = false }
|
|||||||
anyhow = "1.0.65"
|
anyhow = "1.0.65"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.1.6"
|
csv = "1.1.6"
|
||||||
deserr = { version = "0.1.2", features = ["serde-json"] }
|
deserr = "0.1.4"
|
||||||
either = { version = "1.6.1", features = ["serde"] }
|
either = { version = "1.6.1", features = ["serde"] }
|
||||||
enum-iterator = "1.1.3"
|
enum-iterator = "1.1.3"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.24"
|
flate2 = "1.0.24"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.5.7"
|
memmap2 = "0.5.7"
|
||||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.38.0", default-features = false }
|
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.39.0", default-features = false }
|
||||||
proptest = { version = "1.0.0", optional = true }
|
proptest = { version = "1.0.0", optional = true }
|
||||||
proptest-derive = { version = "0.3.0", optional = true }
|
proptest-derive = { version = "0.3.0", optional = true }
|
||||||
roaring = { version = "0.10.0", features = ["serde"] }
|
roaring = { version = "0.10.0", features = ["serde"] }
|
||||||
|
@ -1,12 +1,17 @@
|
|||||||
|
use std::convert::Infallible;
|
||||||
|
use std::marker::PhantomData;
|
||||||
use std::{fmt, io};
|
use std::{fmt, io};
|
||||||
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use actix_web::{self as aweb, HttpResponseBuilder};
|
use actix_web::{self as aweb, HttpResponseBuilder};
|
||||||
use aweb::rt::task::JoinError;
|
use aweb::rt::task::JoinError;
|
||||||
use convert_case::Casing;
|
use convert_case::Casing;
|
||||||
|
use deserr::{DeserializeError, IntoValue, MergeWithError, ValuePointerRef};
|
||||||
use milli::heed::{Error as HeedError, MdbError};
|
use milli::heed::{Error as HeedError, MdbError};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use self::deserr_codes::MissingIndexUid;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||||
@ -31,7 +36,7 @@ impl ResponseError {
|
|||||||
Self {
|
Self {
|
||||||
code: code.http(),
|
code: code.http(),
|
||||||
message,
|
message,
|
||||||
error_code: code.err_code().error_name.to_string(),
|
error_code: code.err_code().error_name,
|
||||||
error_type: code.type_(),
|
error_type: code.type_(),
|
||||||
error_link: code.url(),
|
error_link: code.url(),
|
||||||
}
|
}
|
||||||
@ -48,7 +53,7 @@ impl std::error::Error for ResponseError {}
|
|||||||
|
|
||||||
impl<T> From<T> for ResponseError
|
impl<T> From<T> for ResponseError
|
||||||
where
|
where
|
||||||
T: ErrorCode,
|
T: std::error::Error + ErrorCode,
|
||||||
{
|
{
|
||||||
fn from(other: T) -> Self {
|
fn from(other: T) -> Self {
|
||||||
Self::from_msg(other.to_string(), other.error_code())
|
Self::from_msg(other.to_string(), other.error_code())
|
||||||
@ -66,7 +71,7 @@ impl aweb::error::ResponseError for ResponseError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ErrorCode: std::error::Error {
|
pub trait ErrorCode {
|
||||||
fn error_code(&self) -> Code;
|
fn error_code(&self) -> Code;
|
||||||
|
|
||||||
/// returns the HTTP status code associated with the error
|
/// returns the HTTP status code associated with the error
|
||||||
@ -111,433 +116,23 @@ impl fmt::Display for ErrorType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! make_error_codes {
|
||||||
|
($($code_ident:ident, $err_type:ident, $status:ident);*) => {
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum Code {
|
pub enum Code {
|
||||||
// error related to your setup
|
$($code_ident),*
|
||||||
IoError,
|
|
||||||
NoSpaceLeftOnDevice,
|
|
||||||
TooManyOpenFiles,
|
|
||||||
|
|
||||||
// index related error
|
|
||||||
CreateIndex,
|
|
||||||
IndexAlreadyExists,
|
|
||||||
InvalidIndexPrimaryKey,
|
|
||||||
IndexNotFound,
|
|
||||||
InvalidIndexUid,
|
|
||||||
MissingIndexUid,
|
|
||||||
InvalidMinWordLengthForTypo,
|
|
||||||
InvalidIndexLimit,
|
|
||||||
InvalidIndexOffset,
|
|
||||||
|
|
||||||
DuplicateIndexFound,
|
|
||||||
|
|
||||||
// invalid state error
|
|
||||||
InvalidState,
|
|
||||||
NoPrimaryKeyCandidateFound,
|
|
||||||
MultiplePrimaryKeyCandidatesFound,
|
|
||||||
PrimaryKeyAlreadyPresent,
|
|
||||||
|
|
||||||
MaxFieldsLimitExceeded,
|
|
||||||
MissingDocumentId,
|
|
||||||
InvalidDocumentId,
|
|
||||||
|
|
||||||
Filter,
|
|
||||||
Sort,
|
|
||||||
|
|
||||||
// Invalid swap-indexes
|
|
||||||
InvalidSwapIndexes,
|
|
||||||
InvalidDuplicateIndexesFound,
|
|
||||||
|
|
||||||
// Invalid settings update request
|
|
||||||
InvalidSettingsDisplayedAttributes,
|
|
||||||
InvalidSettingsSearchableAttributes,
|
|
||||||
InvalidSettingsFilterableAttributes,
|
|
||||||
InvalidSettingsSortableAttributes,
|
|
||||||
InvalidSettingsRankingRules,
|
|
||||||
InvalidSettingsStopWords,
|
|
||||||
InvalidSettingsSynonyms,
|
|
||||||
InvalidSettingsDistinctAttribute,
|
|
||||||
InvalidSettingsTypoTolerance,
|
|
||||||
InvalidSettingsFaceting,
|
|
||||||
InvalidSettingsPagination,
|
|
||||||
|
|
||||||
// Invalid search request
|
|
||||||
InvalidSearchQ,
|
|
||||||
InvalidSearchOffset,
|
|
||||||
InvalidSearchLimit,
|
|
||||||
InvalidSearchPage,
|
|
||||||
InvalidSearchHitsPerPage,
|
|
||||||
InvalidSearchAttributesToRetrieve,
|
|
||||||
InvalidSearchAttributesToCrop,
|
|
||||||
InvalidSearchCropLength,
|
|
||||||
InvalidSearchAttributesToHighlight,
|
|
||||||
InvalidSearchShowMatchesPosition,
|
|
||||||
InvalidSearchFilter,
|
|
||||||
InvalidSearchSort,
|
|
||||||
InvalidSearchFacets,
|
|
||||||
InvalidSearchHighlightPreTag,
|
|
||||||
InvalidSearchHighlightPostTag,
|
|
||||||
InvalidSearchCropMarker,
|
|
||||||
InvalidSearchMatchingStrategy,
|
|
||||||
|
|
||||||
// Related to the tasks
|
|
||||||
InvalidTaskUids,
|
|
||||||
InvalidTaskTypes,
|
|
||||||
InvalidTaskStatuses,
|
|
||||||
InvalidTaskCanceledBy,
|
|
||||||
InvalidTaskLimit,
|
|
||||||
InvalidTaskFrom,
|
|
||||||
InvalidTaskBeforeEnqueuedAt,
|
|
||||||
InvalidTaskAfterEnqueuedAt,
|
|
||||||
InvalidTaskBeforeStartedAt,
|
|
||||||
InvalidTaskAfterStartedAt,
|
|
||||||
InvalidTaskBeforeFinishedAt,
|
|
||||||
InvalidTaskAfterFinishedAt,
|
|
||||||
|
|
||||||
// Documents API
|
|
||||||
InvalidDocumentFields,
|
|
||||||
InvalidDocumentLimit,
|
|
||||||
InvalidDocumentOffset,
|
|
||||||
|
|
||||||
BadParameter,
|
|
||||||
BadRequest,
|
|
||||||
DatabaseSizeLimitReached,
|
|
||||||
DocumentNotFound,
|
|
||||||
Internal,
|
|
||||||
InvalidDocumentGeoField,
|
|
||||||
InvalidRankingRule,
|
|
||||||
InvalidStore,
|
|
||||||
InvalidToken,
|
|
||||||
MissingAuthorizationHeader,
|
|
||||||
MissingMasterKey,
|
|
||||||
DumpNotFound,
|
|
||||||
TaskNotFound,
|
|
||||||
TaskDeletionWithEmptyQuery,
|
|
||||||
TaskCancelationWithEmptyQuery,
|
|
||||||
PayloadTooLarge,
|
|
||||||
RetrieveDocument,
|
|
||||||
SearchDocuments,
|
|
||||||
UnsupportedMediaType,
|
|
||||||
|
|
||||||
DumpAlreadyInProgress,
|
|
||||||
DumpProcessFailed,
|
|
||||||
// Only used when importing a dump
|
|
||||||
UnretrievableErrorCode,
|
|
||||||
|
|
||||||
InvalidContentType,
|
|
||||||
MissingContentType,
|
|
||||||
MalformedPayload,
|
|
||||||
MissingPayload,
|
|
||||||
|
|
||||||
ApiKeyNotFound,
|
|
||||||
|
|
||||||
MissingApiKeyActions,
|
|
||||||
MissingApiKeyExpiresAt,
|
|
||||||
MissingApiKeyIndexes,
|
|
||||||
|
|
||||||
InvalidApiKeyOffset,
|
|
||||||
InvalidApiKeyLimit,
|
|
||||||
InvalidApiKeyActions,
|
|
||||||
InvalidApiKeyIndexes,
|
|
||||||
InvalidApiKeyExpiresAt,
|
|
||||||
InvalidApiKeyDescription,
|
|
||||||
InvalidApiKeyName,
|
|
||||||
InvalidApiKeyUid,
|
|
||||||
ImmutableField,
|
|
||||||
ApiKeyAlreadyExists,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Code {
|
impl Code {
|
||||||
/// associate a `Code` variant to the actual ErrCode
|
/// associate a `Code` variant to the actual ErrCode
|
||||||
fn err_code(&self) -> ErrCode {
|
fn err_code(&self) -> ErrCode {
|
||||||
use Code::*;
|
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
// related to the setup
|
$(
|
||||||
IoError => ErrCode::system("io_error", StatusCode::UNPROCESSABLE_ENTITY),
|
Code::$code_ident => {
|
||||||
TooManyOpenFiles => {
|
ErrCode::$err_type( stringify!($code_ident).to_case(convert_case::Case::Snake), StatusCode::$status)
|
||||||
ErrCode::system("too_many_open_files", StatusCode::UNPROCESSABLE_ENTITY)
|
|
||||||
}
|
}
|
||||||
NoSpaceLeftOnDevice => {
|
)*
|
||||||
ErrCode::system("no_space_left_on_device", StatusCode::UNPROCESSABLE_ENTITY)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// index related errors
|
|
||||||
// create index is thrown on internal error while creating an index.
|
|
||||||
CreateIndex => {
|
|
||||||
ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
}
|
||||||
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT),
|
|
||||||
// thrown when requesting an unexisting index
|
|
||||||
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
|
|
||||||
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
|
|
||||||
MissingIndexUid => ErrCode::invalid("missing_index_uid", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidIndexPrimaryKey => {
|
|
||||||
ErrCode::invalid("invalid_index_primary_key", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidIndexLimit => ErrCode::invalid("invalid_index_limit", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidIndexOffset => ErrCode::invalid("invalid_index_offset", StatusCode::BAD_REQUEST),
|
|
||||||
|
|
||||||
// invalid state error
|
|
||||||
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
|
|
||||||
// thrown when no primary key has been set
|
|
||||||
NoPrimaryKeyCandidateFound => {
|
|
||||||
ErrCode::invalid("index_primary_key_no_candidate_found", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
MultiplePrimaryKeyCandidatesFound => ErrCode::invalid(
|
|
||||||
"index_primary_key_multiple_candidates_found",
|
|
||||||
StatusCode::BAD_REQUEST,
|
|
||||||
),
|
|
||||||
// error thrown when trying to set an already existing primary key
|
|
||||||
PrimaryKeyAlreadyPresent => {
|
|
||||||
ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
// invalid ranking rule
|
|
||||||
InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST),
|
|
||||||
|
|
||||||
// invalid database
|
|
||||||
InvalidStore => {
|
|
||||||
ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
|
|
||||||
// invalid document
|
|
||||||
MaxFieldsLimitExceeded => {
|
|
||||||
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST),
|
|
||||||
|
|
||||||
// error related to filters
|
|
||||||
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
|
|
||||||
// error related to sorts
|
|
||||||
Sort => ErrCode::invalid("invalid_sort", StatusCode::BAD_REQUEST),
|
|
||||||
|
|
||||||
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
|
||||||
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
|
||||||
DatabaseSizeLimitReached => {
|
|
||||||
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
|
||||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
|
||||||
InvalidDocumentGeoField => {
|
|
||||||
ErrCode::invalid("invalid_document_geo_field", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN),
|
|
||||||
MissingAuthorizationHeader => {
|
|
||||||
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
|
|
||||||
}
|
|
||||||
MissingMasterKey => {
|
|
||||||
ErrCode::authentication("missing_master_key", StatusCode::UNAUTHORIZED)
|
|
||||||
}
|
|
||||||
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
|
|
||||||
TaskDeletionWithEmptyQuery => {
|
|
||||||
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
TaskCancelationWithEmptyQuery => {
|
|
||||||
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND),
|
|
||||||
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
|
|
||||||
RetrieveDocument => {
|
|
||||||
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST),
|
|
||||||
UnsupportedMediaType => {
|
|
||||||
ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
|
||||||
}
|
|
||||||
|
|
||||||
// error related to dump
|
|
||||||
DumpAlreadyInProgress => {
|
|
||||||
ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT)
|
|
||||||
}
|
|
||||||
DumpProcessFailed => {
|
|
||||||
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
MissingContentType => {
|
|
||||||
ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
|
||||||
}
|
|
||||||
MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidContentType => {
|
|
||||||
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
|
||||||
}
|
|
||||||
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
|
|
||||||
// This one can only happen when importing a dump and encountering an unknown code in the task queue.
|
|
||||||
UnretrievableErrorCode => {
|
|
||||||
ErrCode::invalid("unretrievable_error_code", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
// error related to keys
|
|
||||||
ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND),
|
|
||||||
|
|
||||||
MissingApiKeyExpiresAt => {
|
|
||||||
ErrCode::invalid("missing_api_key_expires_at", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
MissingApiKeyActions => {
|
|
||||||
ErrCode::invalid("missing_api_key_actions", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
MissingApiKeyIndexes => {
|
|
||||||
ErrCode::invalid("missing_api_key_indexes", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
InvalidApiKeyOffset => {
|
|
||||||
ErrCode::invalid("invalid_api_key_offset", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidApiKeyLimit => {
|
|
||||||
ErrCode::invalid("invalid_api_key_limit", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidApiKeyActions => {
|
|
||||||
ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidApiKeyIndexes => {
|
|
||||||
ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidApiKeyExpiresAt => {
|
|
||||||
ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidApiKeyDescription => {
|
|
||||||
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST),
|
|
||||||
ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT),
|
|
||||||
ImmutableField => ErrCode::invalid("immutable_field", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidMinWordLengthForTypo => {
|
|
||||||
ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
DuplicateIndexFound => {
|
|
||||||
ErrCode::invalid("duplicate_index_found", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swap indexes error
|
|
||||||
InvalidSwapIndexes => ErrCode::invalid("invalid_swap_indexes", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidDuplicateIndexesFound => {
|
|
||||||
ErrCode::invalid("invalid_swap_duplicate_index_found", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Invalid settings
|
|
||||||
InvalidSettingsDisplayedAttributes => {
|
|
||||||
ErrCode::invalid("invalid_settings_displayed_attributes", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsSearchableAttributes => {
|
|
||||||
ErrCode::invalid("invalid_settings_searchable_attributes", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsFilterableAttributes => {
|
|
||||||
ErrCode::invalid("invalid_settings_filterable_attributes", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsSortableAttributes => {
|
|
||||||
ErrCode::invalid("invalid_settings_sortable_attributes", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsRankingRules => {
|
|
||||||
ErrCode::invalid("invalid_settings_ranking_rules", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsStopWords => {
|
|
||||||
ErrCode::invalid("invalid_settings_stop_words", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsSynonyms => {
|
|
||||||
ErrCode::invalid("invalid_settings_synonyms", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsDistinctAttribute => {
|
|
||||||
ErrCode::invalid("invalid_settings_distinct_attribute", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsTypoTolerance => {
|
|
||||||
ErrCode::invalid("invalid_settings_typo_tolerance", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsFaceting => {
|
|
||||||
ErrCode::invalid("invalid_settings_faceting", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSettingsPagination => {
|
|
||||||
ErrCode::invalid("invalid_settings_pagination", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Invalid search
|
|
||||||
InvalidSearchQ => ErrCode::invalid("invalid_search_q", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidSearchOffset => {
|
|
||||||
ErrCode::invalid("invalid_search_offset", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchLimit => ErrCode::invalid("invalid_search_limit", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidSearchPage => ErrCode::invalid("invalid_search_page", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidSearchHitsPerPage => {
|
|
||||||
ErrCode::invalid("invalid_search_hits_per_page", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchAttributesToRetrieve => {
|
|
||||||
ErrCode::invalid("invalid_search_attributes_to_retrieve", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchAttributesToCrop => {
|
|
||||||
ErrCode::invalid("invalid_search_attributes_to_crop", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchCropLength => {
|
|
||||||
ErrCode::invalid("invalid_search_crop_length", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchAttributesToHighlight => {
|
|
||||||
ErrCode::invalid("invalid_search_attributes_to_highlight", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchShowMatchesPosition => {
|
|
||||||
ErrCode::invalid("invalid_search_show_matches_position", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchFilter => {
|
|
||||||
ErrCode::invalid("invalid_search_filter", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchSort => ErrCode::invalid("invalid_search_sort", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidSearchFacets => {
|
|
||||||
ErrCode::invalid("invalid_search_facets", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchHighlightPreTag => {
|
|
||||||
ErrCode::invalid("invalid_search_highlight_pre_tag", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchHighlightPostTag => {
|
|
||||||
ErrCode::invalid("invalid_search_highlight_post_tag", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchCropMarker => {
|
|
||||||
ErrCode::invalid("invalid_search_crop_marker", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidSearchMatchingStrategy => {
|
|
||||||
ErrCode::invalid("invalid_search_matching_strategy", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Related to the tasks
|
|
||||||
InvalidTaskUids => ErrCode::invalid("invalid_task_uids", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidTaskTypes => ErrCode::invalid("invalid_task_types", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidTaskStatuses => {
|
|
||||||
ErrCode::invalid("invalid_task_statuses", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidTaskCanceledBy => {
|
|
||||||
ErrCode::invalid("invalid_task_canceled_by", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidTaskLimit => ErrCode::invalid("invalid_task_limit", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidTaskFrom => ErrCode::invalid("invalid_task_from", StatusCode::BAD_REQUEST),
|
|
||||||
InvalidTaskBeforeEnqueuedAt => {
|
|
||||||
ErrCode::invalid("invalid_task_before_enqueued_at", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidTaskAfterEnqueuedAt => {
|
|
||||||
ErrCode::invalid("invalid_task_after_enqueued_at", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidTaskBeforeStartedAt => {
|
|
||||||
ErrCode::invalid("invalid_task_before_started_at", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidTaskAfterStartedAt => {
|
|
||||||
ErrCode::invalid("invalid_task_after_started_at", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidTaskBeforeFinishedAt => {
|
|
||||||
ErrCode::invalid("invalid_task_before_finished_at", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidTaskAfterFinishedAt => {
|
|
||||||
ErrCode::invalid("invalid_task_after_finished_at", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
|
|
||||||
InvalidDocumentFields => {
|
|
||||||
ErrCode::invalid("invalid_document_fields", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidDocumentLimit => {
|
|
||||||
ErrCode::invalid("invalid_document_limit", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
InvalidDocumentOffset => {
|
|
||||||
ErrCode::invalid("invalid_document_offset", StatusCode::BAD_REQUEST)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// return the HTTP status code associated with the `Code`
|
/// return the HTTP status code associated with the `Code`
|
||||||
fn http(&self) -> StatusCode {
|
fn http(&self) -> StatusCode {
|
||||||
self.err_code().status_code
|
self.err_code().status_code
|
||||||
@ -561,28 +156,158 @@ impl Code {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
pub mod deserr_codes {
|
||||||
|
use super::{Code, ErrorCode};
|
||||||
|
$(
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct $code_ident;
|
||||||
|
impl ErrorCode for $code_ident {
|
||||||
|
fn error_code(&self) -> Code {
|
||||||
|
Code::$code_ident
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
make_error_codes! {
|
||||||
|
ApiKeyAlreadyExists , invalid , CONFLICT ;
|
||||||
|
ApiKeyNotFound , invalid , NOT_FOUND ;
|
||||||
|
BadParameter , invalid , BAD_REQUEST;
|
||||||
|
BadRequest , invalid , BAD_REQUEST;
|
||||||
|
DatabaseSizeLimitReached , internal , INTERNAL_SERVER_ERROR;
|
||||||
|
DocumentNotFound , invalid , NOT_FOUND;
|
||||||
|
DumpAlreadyProcessing , invalid , CONFLICT;
|
||||||
|
DumpNotFound , invalid , NOT_FOUND;
|
||||||
|
DumpProcessFailed , internal , INTERNAL_SERVER_ERROR;
|
||||||
|
DuplicateIndexFound , invalid , BAD_REQUEST;
|
||||||
|
|
||||||
|
ImmutableApiKeyUid , invalid , BAD_REQUEST;
|
||||||
|
ImmutableApiKeyKey , invalid , BAD_REQUEST;
|
||||||
|
ImmutableApiKeyActions , invalid , BAD_REQUEST;
|
||||||
|
ImmutableApiKeyIndexes , invalid , BAD_REQUEST;
|
||||||
|
ImmutableApiKeyExpiresAt , invalid , BAD_REQUEST;
|
||||||
|
ImmutableApiKeyCreatedAt , invalid , BAD_REQUEST;
|
||||||
|
ImmutableApiKeyUpdatedAt , invalid , BAD_REQUEST;
|
||||||
|
|
||||||
|
ImmutableIndexUid , invalid , BAD_REQUEST;
|
||||||
|
ImmutableIndexCreatedAt , invalid , BAD_REQUEST;
|
||||||
|
ImmutableIndexUpdatedAt , invalid , BAD_REQUEST;
|
||||||
|
|
||||||
|
IndexAlreadyExists , invalid , CONFLICT ;
|
||||||
|
IndexCreationFailed , internal , INTERNAL_SERVER_ERROR;
|
||||||
|
IndexNotFound , invalid , NOT_FOUND;
|
||||||
|
IndexPrimaryKeyAlreadyExists , invalid , BAD_REQUEST ;
|
||||||
|
IndexPrimaryKeyNoCandidateFound , invalid , BAD_REQUEST ;
|
||||||
|
IndexPrimaryKeyMultipleCandidatesFound, invalid , BAD_REQUEST;
|
||||||
|
Internal , internal , INTERNAL_SERVER_ERROR ;
|
||||||
|
InvalidApiKeyActions , invalid , BAD_REQUEST ;
|
||||||
|
InvalidApiKeyDescription , invalid , BAD_REQUEST ;
|
||||||
|
InvalidApiKeyExpiresAt , invalid , BAD_REQUEST ;
|
||||||
|
InvalidApiKeyIndexes , invalid , BAD_REQUEST ;
|
||||||
|
InvalidApiKeyLimit , invalid , BAD_REQUEST ;
|
||||||
|
InvalidApiKeyName , invalid , BAD_REQUEST ;
|
||||||
|
InvalidApiKeyOffset , invalid , BAD_REQUEST ;
|
||||||
|
InvalidApiKeyUid , invalid , BAD_REQUEST ;
|
||||||
|
InvalidApiKey , authentication, FORBIDDEN ;
|
||||||
|
InvalidContentType , invalid , UNSUPPORTED_MEDIA_TYPE ;
|
||||||
|
InvalidDocumentFields , invalid , BAD_REQUEST ;
|
||||||
|
InvalidDocumentGeoField , invalid , BAD_REQUEST ;
|
||||||
|
InvalidDocumentId , invalid , BAD_REQUEST ;
|
||||||
|
InvalidDocumentLimit , invalid , BAD_REQUEST ;
|
||||||
|
InvalidDocumentOffset , invalid , BAD_REQUEST ;
|
||||||
|
InvalidIndexLimit , invalid , BAD_REQUEST ;
|
||||||
|
InvalidIndexOffset , invalid , BAD_REQUEST ;
|
||||||
|
InvalidIndexPrimaryKey , invalid , BAD_REQUEST ;
|
||||||
|
InvalidIndexUid , invalid , BAD_REQUEST ;
|
||||||
|
InvalidMinWordLengthForTypo , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchAttributesToCrop , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchAttributesToHighlight , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchAttributesToRetrieve , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchCropLength , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchCropMarker , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchFacets , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchFilter , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchHighlightPostTag , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchHighlightPreTag , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchHitsPerPage , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchLimit , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchMatchingStrategy , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchOffset , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchPage , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchQ , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchShowMatchesPosition , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSearchSort , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsDisplayedAttributes , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsDistinctAttribute , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsFaceting , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsFilterableAttributes , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsPagination , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsRankingRules , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsSearchableAttributes , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsSortableAttributes , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsStopWords , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsSynonyms , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSettingsTypoTolerance , invalid , BAD_REQUEST ;
|
||||||
|
InvalidState , internal , INTERNAL_SERVER_ERROR ;
|
||||||
|
InvalidStoreFile , internal , INTERNAL_SERVER_ERROR ;
|
||||||
|
InvalidSwapDuplicateIndexFound , invalid , BAD_REQUEST ;
|
||||||
|
InvalidSwapIndexes , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskAfterEnqueuedAt , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskAfterFinishedAt , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskAfterStartedAt , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskBeforeEnqueuedAt , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskBeforeFinishedAt , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskBeforeStartedAt , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskCanceledBy , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskFrom , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskLimit , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskStatuses , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskTypes , invalid , BAD_REQUEST ;
|
||||||
|
InvalidTaskUids , invalid , BAD_REQUEST ;
|
||||||
|
IoError , system , UNPROCESSABLE_ENTITY;
|
||||||
|
MalformedPayload , invalid , BAD_REQUEST ;
|
||||||
|
MaxFieldsLimitExceeded , invalid , BAD_REQUEST ;
|
||||||
|
MissingApiKeyActions , invalid , BAD_REQUEST ;
|
||||||
|
MissingApiKeyExpiresAt , invalid , BAD_REQUEST ;
|
||||||
|
MissingApiKeyIndexes , invalid , BAD_REQUEST ;
|
||||||
|
MissingAuthorizationHeader , authentication, UNAUTHORIZED ;
|
||||||
|
MissingContentType , invalid , UNSUPPORTED_MEDIA_TYPE ;
|
||||||
|
MissingDocumentId , invalid , BAD_REQUEST ;
|
||||||
|
MissingIndexUid , invalid , BAD_REQUEST ;
|
||||||
|
MissingMasterKey , authentication, UNAUTHORIZED ;
|
||||||
|
MissingPayload , invalid , BAD_REQUEST ;
|
||||||
|
MissingTaskFilters , invalid , BAD_REQUEST ;
|
||||||
|
NoSpaceLeftOnDevice , system , UNPROCESSABLE_ENTITY;
|
||||||
|
PayloadTooLarge , invalid , PAYLOAD_TOO_LARGE ;
|
||||||
|
TaskNotFound , invalid , NOT_FOUND ;
|
||||||
|
TooManyOpenFiles , system , UNPROCESSABLE_ENTITY ;
|
||||||
|
UnretrievableDocument , internal , BAD_REQUEST ;
|
||||||
|
UnretrievableErrorCode , invalid , BAD_REQUEST ;
|
||||||
|
UnsupportedMediaType , invalid , UNSUPPORTED_MEDIA_TYPE
|
||||||
|
}
|
||||||
|
|
||||||
/// Internal structure providing a convenient way to create error codes
|
/// Internal structure providing a convenient way to create error codes
|
||||||
struct ErrCode {
|
struct ErrCode {
|
||||||
status_code: StatusCode,
|
status_code: StatusCode,
|
||||||
error_type: ErrorType,
|
error_type: ErrorType,
|
||||||
error_name: &'static str,
|
error_name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrCode {
|
impl ErrCode {
|
||||||
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn authentication(error_name: String, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
|
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn internal(error_name: String, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
|
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn invalid(error_name: String, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
|
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn system(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn system(error_name: String, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode { status_code, error_name, error_type: ErrorType::System }
|
ErrCode { status_code, error_name, error_type: ErrorType::System }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -608,26 +333,26 @@ impl ErrorCode for milli::Error {
|
|||||||
| UserError::DocumentLimitReached
|
| UserError::DocumentLimitReached
|
||||||
| UserError::AccessingSoftDeletedDocument { .. }
|
| UserError::AccessingSoftDeletedDocument { .. }
|
||||||
| UserError::UnknownInternalDocumentId { .. } => Code::Internal,
|
| UserError::UnknownInternalDocumentId { .. } => Code::Internal,
|
||||||
UserError::InvalidStoreFile => Code::InvalidStore,
|
UserError::InvalidStoreFile => Code::InvalidStoreFile,
|
||||||
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
|
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
|
||||||
UserError::MaxDatabaseSizeReached => Code::DatabaseSizeLimitReached,
|
UserError::MaxDatabaseSizeReached => Code::DatabaseSizeLimitReached,
|
||||||
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
|
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
|
||||||
UserError::InvalidFilter(_) => Code::Filter,
|
UserError::InvalidFilter(_) => Code::InvalidSearchFilter,
|
||||||
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
|
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
|
||||||
UserError::InvalidDocumentId { .. } | UserError::TooManyDocumentIds { .. } => {
|
UserError::InvalidDocumentId { .. } | UserError::TooManyDocumentIds { .. } => {
|
||||||
Code::InvalidDocumentId
|
Code::InvalidDocumentId
|
||||||
}
|
}
|
||||||
UserError::NoPrimaryKeyCandidateFound => Code::NoPrimaryKeyCandidateFound,
|
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
|
||||||
UserError::MultiplePrimaryKeyCandidatesFound { .. } => {
|
UserError::MultiplePrimaryKeyCandidatesFound { .. } => {
|
||||||
Code::MultiplePrimaryKeyCandidatesFound
|
Code::IndexPrimaryKeyMultipleCandidatesFound
|
||||||
}
|
}
|
||||||
UserError::PrimaryKeyCannotBeChanged(_) => Code::PrimaryKeyAlreadyPresent,
|
UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists,
|
||||||
UserError::SortRankingRuleMissing => Code::Sort,
|
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
||||||
UserError::InvalidFacetsDistribution { .. } => Code::BadRequest,
|
UserError::InvalidFacetsDistribution { .. } => Code::BadRequest,
|
||||||
UserError::InvalidSortableAttribute { .. } => Code::Sort,
|
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
||||||
UserError::CriterionError(_) => Code::InvalidRankingRule,
|
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
||||||
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
||||||
UserError::SortError(_) => Code::Sort,
|
UserError::SortError(_) => Code::InvalidSearchSort,
|
||||||
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
||||||
Code::InvalidMinWordLengthForTypo
|
Code::InvalidMinWordLengthForTypo
|
||||||
}
|
}
|
||||||
@ -656,7 +381,7 @@ impl ErrorCode for HeedError {
|
|||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
HeedError::Mdb(MdbError::MapFull) => Code::DatabaseSizeLimitReached,
|
HeedError::Mdb(MdbError::MapFull) => Code::DatabaseSizeLimitReached,
|
||||||
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStore,
|
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStoreFile,
|
||||||
HeedError::Io(e) => e.error_code(),
|
HeedError::Io(e) => e.error_code(),
|
||||||
HeedError::Mdb(_)
|
HeedError::Mdb(_)
|
||||||
| HeedError::Encoding
|
| HeedError::Encoding
|
||||||
@ -697,6 +422,82 @@ mod strategy {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct DeserrError<C: ErrorCode = deserr_codes::BadRequest> {
|
||||||
|
pub msg: String,
|
||||||
|
pub code: Code,
|
||||||
|
_phantom: PhantomData<C>,
|
||||||
|
}
|
||||||
|
impl<C: ErrorCode> std::fmt::Debug for DeserrError<C> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C: ErrorCode> std::fmt::Display for DeserrError<C> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C: ErrorCode> std::error::Error for DeserrError<C> {}
|
||||||
|
impl<C: ErrorCode> ErrorCode for DeserrError<C> {
|
||||||
|
fn error_code(&self) -> Code {
|
||||||
|
self.code
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C1: ErrorCode, C2: ErrorCode> MergeWithError<DeserrError<C2>> for DeserrError<C1> {
|
||||||
|
fn merge(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
other: DeserrError<C2>,
|
||||||
|
_merge_location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DeserrError<MissingIndexUid> {
|
||||||
|
pub fn missing_index_uid(field: &str, location: ValuePointerRef) -> Self {
|
||||||
|
let x = unwrap_any(Self::error::<Infallible>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::MissingField { field },
|
||||||
|
location,
|
||||||
|
));
|
||||||
|
Self { msg: x.msg, code: MissingIndexUid.error_code(), _phantom: PhantomData }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrError<C> {
|
||||||
|
fn error<V: IntoValue>(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
error: deserr::ErrorKind<V>,
|
||||||
|
location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
let msg = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
||||||
|
|
||||||
|
Err(DeserrError { msg, code: C::default().error_code(), _phantom: PhantomData })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TakeErrorMessage<T>(pub T);
|
||||||
|
|
||||||
|
impl<C: Default + ErrorCode, T> MergeWithError<TakeErrorMessage<T>> for DeserrError<C>
|
||||||
|
where
|
||||||
|
T: std::error::Error,
|
||||||
|
{
|
||||||
|
fn merge(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
other: TakeErrorMessage<T>,
|
||||||
|
merge_location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
DeserrError::error::<Infallible>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::Unexpected { msg: other.0.to_string() },
|
||||||
|
merge_location,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! internal_error {
|
macro_rules! internal_error {
|
||||||
($target:ty : $($other:path), *) => {
|
($target:ty : $($other:path), *) => {
|
||||||
|
@ -1,22 +1,105 @@
|
|||||||
|
use std::convert::Infallible;
|
||||||
|
use std::fmt::Display;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
|
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValuePointerRef};
|
||||||
use enum_iterator::Sequence;
|
use enum_iterator::Sequence;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{from_value, Value};
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::macros::{format_description, time};
|
use time::macros::{format_description, time};
|
||||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::error::{Code, ErrorCode};
|
use crate::error::deserr_codes::*;
|
||||||
|
use crate::error::{unwrap_any, Code, DeserrError, ErrorCode, TakeErrorMessage};
|
||||||
use crate::index_uid::{IndexUid, IndexUidFormatError};
|
use crate::index_uid::{IndexUid, IndexUidFormatError};
|
||||||
use crate::star_or::StarOr;
|
use crate::star_or::StarOr;
|
||||||
|
|
||||||
type Result<T> = std::result::Result<T, Error>;
|
|
||||||
|
|
||||||
pub type KeyId = Uuid;
|
pub type KeyId = Uuid;
|
||||||
|
|
||||||
|
impl<C: Default + ErrorCode> MergeWithError<IndexUidFormatError> for DeserrError<C> {
|
||||||
|
fn merge(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
other: IndexUidFormatError,
|
||||||
|
merge_location: deserr::ValuePointerRef,
|
||||||
|
) -> std::result::Result<Self, Self> {
|
||||||
|
DeserrError::error::<Infallible>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::Unexpected { msg: other.to_string() },
|
||||||
|
merge_location,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_uuid_from_str(s: &str) -> Result<Uuid, TakeErrorMessage<uuid::Error>> {
|
||||||
|
Uuid::parse_str(s).map_err(TakeErrorMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
|
pub struct CreateApiKey {
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyDescription>)]
|
||||||
|
pub description: Option<String>,
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyName>)]
|
||||||
|
pub name: Option<String>,
|
||||||
|
#[deserr(default = Uuid::new_v4(), error = DeserrError<InvalidApiKeyUid>, from(&String) = parse_uuid_from_str -> TakeErrorMessage<uuid::Error>)]
|
||||||
|
pub uid: KeyId,
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyActions>)]
|
||||||
|
pub actions: Vec<Action>,
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyIndexes>)]
|
||||||
|
pub indexes: Vec<StarOr<IndexUid>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyExpiresAt>, default = None, from(&String) = parse_expiration_date -> TakeErrorMessage<ParseOffsetDateTimeError>)]
|
||||||
|
pub expires_at: Option<OffsetDateTime>,
|
||||||
|
}
|
||||||
|
impl CreateApiKey {
|
||||||
|
pub fn to_key(self) -> Key {
|
||||||
|
let CreateApiKey { description, name, uid, actions, indexes, expires_at } = self;
|
||||||
|
let now = OffsetDateTime::now_utc();
|
||||||
|
Key {
|
||||||
|
description,
|
||||||
|
name,
|
||||||
|
uid,
|
||||||
|
actions,
|
||||||
|
indexes,
|
||||||
|
expires_at,
|
||||||
|
created_at: now,
|
||||||
|
updated_at: now,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deny_immutable_fields_api_key(
|
||||||
|
field: &str,
|
||||||
|
accepted: &[&str],
|
||||||
|
location: ValuePointerRef,
|
||||||
|
) -> DeserrError {
|
||||||
|
let mut error = unwrap_any(DeserrError::<BadRequest>::error::<Infallible>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||||
|
location,
|
||||||
|
));
|
||||||
|
|
||||||
|
error.code = match field {
|
||||||
|
"uid" => Code::ImmutableApiKeyUid,
|
||||||
|
"actions" => Code::ImmutableApiKeyActions,
|
||||||
|
"indexes" => Code::ImmutableApiKeyIndexes,
|
||||||
|
"expiresAt" => Code::ImmutableApiKeyExpiresAt,
|
||||||
|
"createdAt" => Code::ImmutableApiKeyCreatedAt,
|
||||||
|
"updatedAt" => Code::ImmutableApiKeyUpdatedAt,
|
||||||
|
_ => Code::BadRequest,
|
||||||
|
};
|
||||||
|
error
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)]
|
||||||
|
pub struct PatchApiKey {
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyDescription>)]
|
||||||
|
pub description: Option<String>,
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyName>)]
|
||||||
|
pub name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
||||||
pub struct Key {
|
pub struct Key {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
@ -35,100 +118,6 @@ pub struct Key {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Key {
|
impl Key {
|
||||||
pub fn create_from_value(value: Value) -> Result<Self> {
|
|
||||||
let name = match value.get("name") {
|
|
||||||
None | Some(Value::Null) => None,
|
|
||||||
Some(des) => from_value(des.clone())
|
|
||||||
.map(Some)
|
|
||||||
.map_err(|_| Error::InvalidApiKeyName(des.clone()))?,
|
|
||||||
};
|
|
||||||
|
|
||||||
let description = match value.get("description") {
|
|
||||||
None | Some(Value::Null) => None,
|
|
||||||
Some(des) => from_value(des.clone())
|
|
||||||
.map(Some)
|
|
||||||
.map_err(|_| Error::InvalidApiKeyDescription(des.clone()))?,
|
|
||||||
};
|
|
||||||
|
|
||||||
let uid = value.get("uid").map_or_else(
|
|
||||||
|| Ok(Uuid::new_v4()),
|
|
||||||
|uid| from_value(uid.clone()).map_err(|_| Error::InvalidApiKeyUid(uid.clone())),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let actions = value
|
|
||||||
.get("actions")
|
|
||||||
.map(|act| {
|
|
||||||
from_value(act.clone()).map_err(|_| Error::InvalidApiKeyActions(act.clone()))
|
|
||||||
})
|
|
||||||
.ok_or(Error::MissingApiKeyActions)??;
|
|
||||||
|
|
||||||
let indexes = value
|
|
||||||
.get("indexes")
|
|
||||||
.map(|ind| {
|
|
||||||
from_value::<Vec<String>>(ind.clone())
|
|
||||||
// If it's not a vec of string, return an API key parsing error.
|
|
||||||
.map_err(|_| Error::InvalidApiKeyIndexes(ind.clone()))
|
|
||||||
.and_then(|ind| {
|
|
||||||
ind.into_iter()
|
|
||||||
// If it's not a valid Index uid, return an Index Uid parsing error.
|
|
||||||
.map(|i| StarOr::<IndexUid>::from_str(&i).map_err(Error::from))
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.ok_or(Error::MissingApiKeyIndexes)??;
|
|
||||||
|
|
||||||
let expires_at = value
|
|
||||||
.get("expiresAt")
|
|
||||||
.map(parse_expiration_date)
|
|
||||||
.ok_or(Error::MissingApiKeyExpiresAt)??;
|
|
||||||
|
|
||||||
let created_at = OffsetDateTime::now_utc();
|
|
||||||
let updated_at = created_at;
|
|
||||||
|
|
||||||
Ok(Self { name, description, uid, actions, indexes, expires_at, created_at, updated_at })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_from_value(&mut self, value: Value) -> Result<()> {
|
|
||||||
if let Some(des) = value.get("description") {
|
|
||||||
let des =
|
|
||||||
from_value(des.clone()).map_err(|_| Error::InvalidApiKeyDescription(des.clone()));
|
|
||||||
self.description = des?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(des) = value.get("name") {
|
|
||||||
let des = from_value(des.clone()).map_err(|_| Error::InvalidApiKeyName(des.clone()));
|
|
||||||
self.name = des?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if value.get("uid").is_some() {
|
|
||||||
return Err(Error::ImmutableField("uid".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if value.get("actions").is_some() {
|
|
||||||
return Err(Error::ImmutableField("actions".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if value.get("indexes").is_some() {
|
|
||||||
return Err(Error::ImmutableField("indexes".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if value.get("expiresAt").is_some() {
|
|
||||||
return Err(Error::ImmutableField("expiresAt".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if value.get("createdAt").is_some() {
|
|
||||||
return Err(Error::ImmutableField("createdAt".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if value.get("updatedAt").is_some() {
|
|
||||||
return Err(Error::ImmutableField("updatedAt".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
self.updated_at = OffsetDateTime::now_utc();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn default_admin() -> Self {
|
pub fn default_admin() -> Self {
|
||||||
let now = OffsetDateTime::now_utc();
|
let now = OffsetDateTime::now_utc();
|
||||||
let uid = Uuid::new_v4();
|
let uid = Uuid::new_v4();
|
||||||
@ -160,107 +149,143 @@ impl Key {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
|
#[derive(Debug)]
|
||||||
match value {
|
pub struct ParseOffsetDateTimeError(String);
|
||||||
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
|
impl Display for ParseOffsetDateTimeError {
|
||||||
.or_else(|_| {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
PrimitiveDateTime::parse(
|
writeln!(f, "`{original}` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", original = self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for ParseOffsetDateTimeError {}
|
||||||
|
|
||||||
|
fn parse_expiration_date(
|
||||||
|
string: &str,
|
||||||
|
) -> std::result::Result<Option<OffsetDateTime>, TakeErrorMessage<ParseOffsetDateTimeError>> {
|
||||||
|
let datetime = if let Ok(datetime) = OffsetDateTime::parse(string, &Rfc3339) {
|
||||||
|
datetime
|
||||||
|
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||||
string,
|
string,
|
||||||
format_description!(
|
format_description!(
|
||||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
||||||
),
|
),
|
||||||
).map(|datetime| datetime.assume_utc())
|
) {
|
||||||
})
|
primitive_datetime.assume_utc()
|
||||||
.or_else(|_| {
|
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||||
PrimitiveDateTime::parse(
|
|
||||||
string,
|
string,
|
||||||
format_description!(
|
format_description!(
|
||||||
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
||||||
),
|
),
|
||||||
).map(|datetime| datetime.assume_utc())
|
) {
|
||||||
})
|
primitive_datetime.assume_utc()
|
||||||
.or_else(|_| {
|
} else if let Ok(date) = Date::parse(
|
||||||
Date::parse(string, format_description!(
|
string,
|
||||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]"
|
format_description!("[year repr:full base:calendar]-[month repr:numerical]-[day]"),
|
||||||
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc())
|
) {
|
||||||
})
|
PrimitiveDateTime::new(date, time!(00:00)).assume_utc()
|
||||||
.map_err(|_| Error::InvalidApiKeyExpiresAt(value.clone()))
|
|
||||||
// check if the key is already expired.
|
|
||||||
.and_then(|d| {
|
|
||||||
if d > OffsetDateTime::now_utc() {
|
|
||||||
Ok(d)
|
|
||||||
} else {
|
} else {
|
||||||
Err(Error::InvalidApiKeyExpiresAt(value.clone()))
|
return Err(TakeErrorMessage(ParseOffsetDateTimeError(string.to_owned())));
|
||||||
}
|
};
|
||||||
})
|
if datetime > OffsetDateTime::now_utc() {
|
||||||
.map(Option::Some),
|
Ok(Some(datetime))
|
||||||
Value::Null => Ok(None),
|
} else {
|
||||||
_otherwise => Err(Error::InvalidApiKeyExpiresAt(value.clone())),
|
Err(TakeErrorMessage(ParseOffsetDateTimeError(string.to_owned())))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence)]
|
#[derive(
|
||||||
|
Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, DeserializeFromValue,
|
||||||
|
)]
|
||||||
#[repr(u8)]
|
#[repr(u8)]
|
||||||
pub enum Action {
|
pub enum Action {
|
||||||
#[serde(rename = "*")]
|
#[serde(rename = "*")]
|
||||||
|
#[deserr(rename = "*")]
|
||||||
All = 0,
|
All = 0,
|
||||||
#[serde(rename = "search")]
|
#[serde(rename = "search")]
|
||||||
|
#[deserr(rename = "search")]
|
||||||
Search,
|
Search,
|
||||||
#[serde(rename = "documents.*")]
|
#[serde(rename = "documents.*")]
|
||||||
|
#[deserr(rename = "documents.*")]
|
||||||
DocumentsAll,
|
DocumentsAll,
|
||||||
#[serde(rename = "documents.add")]
|
#[serde(rename = "documents.add")]
|
||||||
|
#[deserr(rename = "documents.add")]
|
||||||
DocumentsAdd,
|
DocumentsAdd,
|
||||||
#[serde(rename = "documents.get")]
|
#[serde(rename = "documents.get")]
|
||||||
|
#[deserr(rename = "documents.get")]
|
||||||
DocumentsGet,
|
DocumentsGet,
|
||||||
#[serde(rename = "documents.delete")]
|
#[serde(rename = "documents.delete")]
|
||||||
|
#[deserr(rename = "documents.delete")]
|
||||||
DocumentsDelete,
|
DocumentsDelete,
|
||||||
#[serde(rename = "indexes.*")]
|
#[serde(rename = "indexes.*")]
|
||||||
|
#[deserr(rename = "indexes.*")]
|
||||||
IndexesAll,
|
IndexesAll,
|
||||||
#[serde(rename = "indexes.create")]
|
#[serde(rename = "indexes.create")]
|
||||||
|
#[deserr(rename = "indexes.create")]
|
||||||
IndexesAdd,
|
IndexesAdd,
|
||||||
#[serde(rename = "indexes.get")]
|
#[serde(rename = "indexes.get")]
|
||||||
|
#[deserr(rename = "indexes.get")]
|
||||||
IndexesGet,
|
IndexesGet,
|
||||||
#[serde(rename = "indexes.update")]
|
#[serde(rename = "indexes.update")]
|
||||||
|
#[deserr(rename = "indexes.update")]
|
||||||
IndexesUpdate,
|
IndexesUpdate,
|
||||||
#[serde(rename = "indexes.delete")]
|
#[serde(rename = "indexes.delete")]
|
||||||
|
#[deserr(rename = "indexes.delete")]
|
||||||
IndexesDelete,
|
IndexesDelete,
|
||||||
#[serde(rename = "indexes.swap")]
|
#[serde(rename = "indexes.swap")]
|
||||||
|
#[deserr(rename = "indexes.swap")]
|
||||||
IndexesSwap,
|
IndexesSwap,
|
||||||
#[serde(rename = "tasks.*")]
|
#[serde(rename = "tasks.*")]
|
||||||
|
#[deserr(rename = "tasks.*")]
|
||||||
TasksAll,
|
TasksAll,
|
||||||
#[serde(rename = "tasks.cancel")]
|
#[serde(rename = "tasks.cancel")]
|
||||||
|
#[deserr(rename = "tasks.cancel")]
|
||||||
TasksCancel,
|
TasksCancel,
|
||||||
#[serde(rename = "tasks.delete")]
|
#[serde(rename = "tasks.delete")]
|
||||||
|
#[deserr(rename = "tasks.delete")]
|
||||||
TasksDelete,
|
TasksDelete,
|
||||||
#[serde(rename = "tasks.get")]
|
#[serde(rename = "tasks.get")]
|
||||||
|
#[deserr(rename = "tasks.get")]
|
||||||
TasksGet,
|
TasksGet,
|
||||||
#[serde(rename = "settings.*")]
|
#[serde(rename = "settings.*")]
|
||||||
|
#[deserr(rename = "settings.*")]
|
||||||
SettingsAll,
|
SettingsAll,
|
||||||
#[serde(rename = "settings.get")]
|
#[serde(rename = "settings.get")]
|
||||||
|
#[deserr(rename = "settings.get")]
|
||||||
SettingsGet,
|
SettingsGet,
|
||||||
#[serde(rename = "settings.update")]
|
#[serde(rename = "settings.update")]
|
||||||
|
#[deserr(rename = "settings.update")]
|
||||||
SettingsUpdate,
|
SettingsUpdate,
|
||||||
#[serde(rename = "stats.*")]
|
#[serde(rename = "stats.*")]
|
||||||
|
#[deserr(rename = "stats.*")]
|
||||||
StatsAll,
|
StatsAll,
|
||||||
#[serde(rename = "stats.get")]
|
#[serde(rename = "stats.get")]
|
||||||
|
#[deserr(rename = "stats.get")]
|
||||||
StatsGet,
|
StatsGet,
|
||||||
#[serde(rename = "metrics.*")]
|
#[serde(rename = "metrics.*")]
|
||||||
|
#[deserr(rename = "metrics.*")]
|
||||||
MetricsAll,
|
MetricsAll,
|
||||||
#[serde(rename = "metrics.get")]
|
#[serde(rename = "metrics.get")]
|
||||||
|
#[deserr(rename = "metrics.get")]
|
||||||
MetricsGet,
|
MetricsGet,
|
||||||
#[serde(rename = "dumps.*")]
|
#[serde(rename = "dumps.*")]
|
||||||
|
#[deserr(rename = "dumps.*")]
|
||||||
DumpsAll,
|
DumpsAll,
|
||||||
#[serde(rename = "dumps.create")]
|
#[serde(rename = "dumps.create")]
|
||||||
|
#[deserr(rename = "dumps.create")]
|
||||||
DumpsCreate,
|
DumpsCreate,
|
||||||
#[serde(rename = "version")]
|
#[serde(rename = "version")]
|
||||||
|
#[deserr(rename = "version")]
|
||||||
Version,
|
Version,
|
||||||
#[serde(rename = "keys.create")]
|
#[serde(rename = "keys.create")]
|
||||||
|
#[deserr(rename = "keys.create")]
|
||||||
KeysAdd,
|
KeysAdd,
|
||||||
#[serde(rename = "keys.get")]
|
#[serde(rename = "keys.get")]
|
||||||
|
#[deserr(rename = "keys.get")]
|
||||||
KeysGet,
|
KeysGet,
|
||||||
#[serde(rename = "keys.update")]
|
#[serde(rename = "keys.update")]
|
||||||
|
#[deserr(rename = "keys.update")]
|
||||||
KeysUpdate,
|
KeysUpdate,
|
||||||
#[serde(rename = "keys.delete")]
|
#[serde(rename = "keys.delete")]
|
||||||
|
#[deserr(rename = "keys.delete")]
|
||||||
KeysDelete,
|
KeysDelete,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -341,56 +366,3 @@ pub mod actions {
|
|||||||
pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
|
pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
|
||||||
pub const KEYS_DELETE: u8 = KeysDelete.repr();
|
pub const KEYS_DELETE: u8 = KeysDelete.repr();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("`expiresAt` field is mandatory.")]
|
|
||||||
MissingApiKeyExpiresAt,
|
|
||||||
#[error("`indexes` field is mandatory.")]
|
|
||||||
MissingApiKeyIndexes,
|
|
||||||
#[error("`actions` field is mandatory.")]
|
|
||||||
MissingApiKeyActions,
|
|
||||||
#[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
|
|
||||||
InvalidApiKeyActions(Value),
|
|
||||||
#[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
|
|
||||||
InvalidApiKeyIndexes(Value),
|
|
||||||
#[error("{0}")]
|
|
||||||
InvalidApiKeyIndexUid(IndexUidFormatError),
|
|
||||||
#[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
|
|
||||||
InvalidApiKeyExpiresAt(Value),
|
|
||||||
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
|
||||||
InvalidApiKeyDescription(Value),
|
|
||||||
#[error(
|
|
||||||
"`name` field value `{0}` is invalid. It should be a string or specified as a null value."
|
|
||||||
)]
|
|
||||||
InvalidApiKeyName(Value),
|
|
||||||
#[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")]
|
|
||||||
InvalidApiKeyUid(Value),
|
|
||||||
#[error("The `{0}` field cannot be modified for the given resource.")]
|
|
||||||
ImmutableField(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<IndexUidFormatError> for Error {
|
|
||||||
fn from(e: IndexUidFormatError) -> Self {
|
|
||||||
Self::InvalidApiKeyIndexUid(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ErrorCode for Error {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
match self {
|
|
||||||
Self::MissingApiKeyExpiresAt => Code::MissingApiKeyExpiresAt,
|
|
||||||
Self::MissingApiKeyIndexes => Code::MissingApiKeyIndexes,
|
|
||||||
Self::MissingApiKeyActions => Code::MissingApiKeyActions,
|
|
||||||
Self::InvalidApiKeyActions(_) => Code::InvalidApiKeyActions,
|
|
||||||
Self::InvalidApiKeyIndexes(_) | Self::InvalidApiKeyIndexUid(_) => {
|
|
||||||
Code::InvalidApiKeyIndexes
|
|
||||||
}
|
|
||||||
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
|
|
||||||
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
|
|
||||||
Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName,
|
|
||||||
Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid,
|
|
||||||
Self::ImmutableField(_) => Code::ImmutableField,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,11 +1,18 @@
|
|||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
|
use std::convert::Infallible;
|
||||||
|
use std::fmt;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use deserr::{DeserializeError, DeserializeFromValue};
|
use deserr::{DeserializeError, DeserializeFromValue, ErrorKind, MergeWithError, ValuePointerRef};
|
||||||
use fst::IntoStreamer;
|
use fst::IntoStreamer;
|
||||||
use milli::{Index, DEFAULT_VALUES_PER_FACET};
|
use milli::update::Setting;
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
||||||
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
|
||||||
|
use crate::error::deserr_codes::*;
|
||||||
|
use crate::error::{unwrap_any, DeserrError};
|
||||||
|
|
||||||
/// The maximimum number of results that the engine
|
/// The maximimum number of results that the engine
|
||||||
/// will be able to return in one search call.
|
/// will be able to return in one search call.
|
||||||
@ -27,112 +34,6 @@ where
|
|||||||
.serialize(s)
|
.serialize(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
|
||||||
pub enum Setting<T> {
|
|
||||||
Set(T),
|
|
||||||
Reset,
|
|
||||||
NotSet,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Default for Setting<T> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::NotSet
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<Setting<T>> for milli::update::Setting<T> {
|
|
||||||
fn from(value: Setting<T>) -> Self {
|
|
||||||
match value {
|
|
||||||
Setting::Set(x) => milli::update::Setting::Set(x),
|
|
||||||
Setting::Reset => milli::update::Setting::Reset,
|
|
||||||
Setting::NotSet => milli::update::Setting::NotSet,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<T> From<milli::update::Setting<T>> for Setting<T> {
|
|
||||||
fn from(value: milli::update::Setting<T>) -> Self {
|
|
||||||
match value {
|
|
||||||
milli::update::Setting::Set(x) => Setting::Set(x),
|
|
||||||
milli::update::Setting::Reset => Setting::Reset,
|
|
||||||
milli::update::Setting::NotSet => Setting::NotSet,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Setting<T> {
|
|
||||||
pub fn set(self) -> Option<T> {
|
|
||||||
match self {
|
|
||||||
Self::Set(value) => Some(value),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn as_ref(&self) -> Setting<&T> {
|
|
||||||
match *self {
|
|
||||||
Self::Set(ref value) => Setting::Set(value),
|
|
||||||
Self::Reset => Setting::Reset,
|
|
||||||
Self::NotSet => Setting::NotSet,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_not_set(&self) -> bool {
|
|
||||||
matches!(self, Self::NotSet)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If `Self` is `Reset`, then map self to `Set` with the provided `val`.
|
|
||||||
pub fn or_reset(self, val: T) -> Self {
|
|
||||||
match self {
|
|
||||||
Self::Reset => Self::Set(val),
|
|
||||||
otherwise => otherwise,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Serialize> Serialize for Setting<T> {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: Serializer,
|
|
||||||
{
|
|
||||||
match self {
|
|
||||||
Self::Set(value) => Some(value),
|
|
||||||
// Usually not_set isn't serialized by setting skip_serializing_if field attribute
|
|
||||||
Self::NotSet | Self::Reset => None,
|
|
||||||
}
|
|
||||||
.serialize(serializer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de, T: Deserialize<'de>> Deserialize<'de> for Setting<T> {
|
|
||||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
Deserialize::deserialize(deserializer).map(|x| match x {
|
|
||||||
Some(x) => Self::Set(x),
|
|
||||||
None => Self::Reset, // Reset is forced by sending null value
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, E> DeserializeFromValue<E> for Setting<T>
|
|
||||||
where
|
|
||||||
T: DeserializeFromValue<E>,
|
|
||||||
E: DeserializeError,
|
|
||||||
{
|
|
||||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
|
||||||
value: deserr::Value<V>,
|
|
||||||
location: deserr::ValuePointerRef,
|
|
||||||
) -> Result<Self, E> {
|
|
||||||
match value {
|
|
||||||
deserr::Value::Null => Ok(Setting::Reset),
|
|
||||||
_ => T::deserialize_from_value(value, location).map(Setting::Set),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn default() -> Option<Self> {
|
|
||||||
Some(Self::NotSet)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, Serialize, PartialEq, Eq)]
|
#[derive(Clone, Default, Debug, Serialize, PartialEq, Eq)]
|
||||||
pub struct Checked;
|
pub struct Checked;
|
||||||
|
|
||||||
@ -151,78 +52,90 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
fn validate_min_word_size_for_typo_setting<E: DeserializeError>(
|
||||||
|
s: MinWordSizeTyposSetting,
|
||||||
|
location: ValuePointerRef,
|
||||||
|
) -> Result<MinWordSizeTyposSetting, E> {
|
||||||
|
if let (Setting::Set(one), Setting::Set(two)) = (s.one_typo, s.two_typos) {
|
||||||
|
if one > two {
|
||||||
|
return Err(unwrap_any(E::error::<Infallible>(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(s)
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrError<InvalidMinWordLengthForTypo>)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
|
||||||
pub struct MinWordSizeTyposSetting {
|
pub struct MinWordSizeTyposSetting {
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub one_typo: Setting<u8>,
|
pub one_typo: Setting<u8>,
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub two_typos: Setting<u8>,
|
pub two_typos: Setting<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrError<InvalidMinWordLengthForTypo>>)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
|
||||||
pub struct TypoSettings {
|
pub struct TypoSettings {
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub enabled: Setting<bool>,
|
pub enabled: Setting<bool>,
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(error = DeserrError<InvalidMinWordLengthForTypo>)]
|
||||||
pub min_word_size_for_typos: Setting<MinWordSizeTyposSetting>,
|
pub min_word_size_for_typos: Setting<MinWordSizeTyposSetting>,
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub disable_on_words: Setting<BTreeSet<String>>,
|
pub disable_on_words: Setting<BTreeSet<String>>,
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub disable_on_attributes: Setting<BTreeSet<String>>,
|
pub disable_on_attributes: Setting<BTreeSet<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct FacetingSettings {
|
pub struct FacetingSettings {
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub max_values_per_facet: Setting<usize>,
|
pub max_values_per_facet: Setting<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct PaginationSettings {
|
pub struct PaginationSettings {
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub max_total_hits: Setting<usize>,
|
pub max_total_hits: Setting<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl MergeWithError<milli::CriterionError> for DeserrError<InvalidSettingsRankingRules> {
|
||||||
|
fn merge(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
other: milli::CriterionError,
|
||||||
|
merge_location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
Self::error::<Infallible>(
|
||||||
|
None,
|
||||||
|
ErrorKind::Unexpected { msg: other.to_string() },
|
||||||
|
merge_location,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
||||||
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
||||||
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(
|
||||||
#[serde(rename_all = "camelCase")]
|
deny_unknown_fields,
|
||||||
#[serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>"))]
|
rename_all = "camelCase",
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>")
|
||||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
)]
|
||||||
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct Settings<T> {
|
pub struct Settings<T> {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Setting::is_not_set"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsDisplayedAttributes>)]
|
||||||
pub displayed_attributes: Setting<Vec<String>>,
|
pub displayed_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(
|
#[serde(
|
||||||
@ -230,38 +143,39 @@ pub struct Settings<T> {
|
|||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Setting::is_not_set"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsSearchableAttributes>)]
|
||||||
pub searchable_attributes: Setting<Vec<String>>,
|
pub searchable_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsFilterableAttributes>)]
|
||||||
pub filterable_attributes: Setting<BTreeSet<String>>,
|
pub filterable_attributes: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsSortableAttributes>)]
|
||||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsRankingRules>)]
|
||||||
pub ranking_rules: Setting<Vec<String>>,
|
pub ranking_rules: Setting<Vec<RankingRuleView>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsStopWords>)]
|
||||||
pub stop_words: Setting<BTreeSet<String>>,
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsSynonyms>)]
|
||||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsDistinctAttribute>)]
|
||||||
pub distinct_attribute: Setting<String>,
|
pub distinct_attribute: Setting<String>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsTypoTolerance>)]
|
||||||
pub typo_tolerance: Setting<TypoSettings>,
|
pub typo_tolerance: Setting<TypoSettings>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsFaceting>)]
|
||||||
pub faceting: Setting<FacetingSettings>,
|
pub faceting: Setting<FacetingSettings>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
#[deserr(error = DeserrError<InvalidSettingsPagination>)]
|
||||||
pub pagination: Setting<PaginationSettings>,
|
pub pagination: Setting<PaginationSettings>,
|
||||||
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
|
#[deserr(skip)]
|
||||||
pub _kind: PhantomData<T>,
|
pub _kind: PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -396,7 +310,9 @@ pub fn apply_settings_to_builder(
|
|||||||
}
|
}
|
||||||
|
|
||||||
match settings.ranking_rules {
|
match settings.ranking_rules {
|
||||||
Setting::Set(ref criteria) => builder.set_criteria(criteria.clone()),
|
Setting::Set(ref criteria) => {
|
||||||
|
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
|
||||||
|
}
|
||||||
Setting::Reset => builder.reset_criteria(),
|
Setting::Reset => builder.reset_criteria(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
@ -510,7 +426,7 @@ pub fn settings(
|
|||||||
|
|
||||||
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
|
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
|
||||||
|
|
||||||
let criteria = index.criteria(rtxn)?.into_iter().map(|c| c.to_string()).collect();
|
let criteria = index.criteria(rtxn)?;
|
||||||
|
|
||||||
let stop_words = index
|
let stop_words = index
|
||||||
.stop_words(rtxn)?
|
.stop_words(rtxn)?
|
||||||
@ -571,7 +487,7 @@ pub fn settings(
|
|||||||
},
|
},
|
||||||
filterable_attributes: Setting::Set(filterable_attributes),
|
filterable_attributes: Setting::Set(filterable_attributes),
|
||||||
sortable_attributes: Setting::Set(sortable_attributes),
|
sortable_attributes: Setting::Set(sortable_attributes),
|
||||||
ranking_rules: Setting::Set(criteria),
|
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
||||||
stop_words: Setting::Set(stop_words),
|
stop_words: Setting::Set(stop_words),
|
||||||
distinct_attribute: match distinct_field {
|
distinct_attribute: match distinct_field {
|
||||||
Some(field) => Setting::Set(field),
|
Some(field) => Setting::Set(field),
|
||||||
@ -585,16 +501,106 @@ pub fn settings(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||||
|
#[deserr(from(&String) = FromStr::from_str -> CriterionError)]
|
||||||
|
pub enum RankingRuleView {
|
||||||
|
/// Sorted by decreasing number of matched query terms.
|
||||||
|
/// Query words at the front of an attribute is considered better than if it was at the back.
|
||||||
|
Words,
|
||||||
|
/// Sorted by increasing number of typos.
|
||||||
|
Typo,
|
||||||
|
/// Sorted by increasing distance between matched query terms.
|
||||||
|
Proximity,
|
||||||
|
/// Documents with quey words contained in more important
|
||||||
|
/// attributes are considered better.
|
||||||
|
Attribute,
|
||||||
|
/// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable
|
||||||
|
/// attributes can be used in place of this criterion at query time.
|
||||||
|
Sort,
|
||||||
|
/// Sorted by the similarity of the matched words with the query words.
|
||||||
|
Exactness,
|
||||||
|
/// Sorted by the increasing value of the field specified.
|
||||||
|
Asc(String),
|
||||||
|
/// Sorted by the decreasing value of the field specified.
|
||||||
|
Desc(String),
|
||||||
|
}
|
||||||
|
impl Serialize for RankingRuleView {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
serializer.serialize_str(&format!("{}", Criterion::from(self.clone())))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'de> Deserialize<'de> for RankingRuleView {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
struct Visitor;
|
||||||
|
impl<'de> serde::de::Visitor<'de> for Visitor {
|
||||||
|
type Value = RankingRuleView;
|
||||||
|
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
write!(formatter, "the name of a valid ranking rule (string)")
|
||||||
|
}
|
||||||
|
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||||
|
where
|
||||||
|
E: serde::de::Error,
|
||||||
|
{
|
||||||
|
let criterion = Criterion::from_str(v).map_err(|_| {
|
||||||
|
E::invalid_value(serde::de::Unexpected::Str(v), &"a valid ranking rule")
|
||||||
|
})?;
|
||||||
|
Ok(RankingRuleView::from(criterion))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
deserializer.deserialize_str(Visitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl FromStr for RankingRuleView {
|
||||||
|
type Err = <Criterion as FromStr>::Err;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(RankingRuleView::from(Criterion::from_str(s)?))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl fmt::Display for RankingRuleView {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
fmt::Display::fmt(&Criterion::from(self.clone()), f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<Criterion> for RankingRuleView {
|
||||||
|
fn from(value: Criterion) -> Self {
|
||||||
|
match value {
|
||||||
|
Criterion::Words => RankingRuleView::Words,
|
||||||
|
Criterion::Typo => RankingRuleView::Typo,
|
||||||
|
Criterion::Proximity => RankingRuleView::Proximity,
|
||||||
|
Criterion::Attribute => RankingRuleView::Attribute,
|
||||||
|
Criterion::Sort => RankingRuleView::Sort,
|
||||||
|
Criterion::Exactness => RankingRuleView::Exactness,
|
||||||
|
Criterion::Asc(x) => RankingRuleView::Asc(x),
|
||||||
|
Criterion::Desc(x) => RankingRuleView::Desc(x),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<RankingRuleView> for Criterion {
|
||||||
|
fn from(value: RankingRuleView) -> Self {
|
||||||
|
match value {
|
||||||
|
RankingRuleView::Words => Criterion::Words,
|
||||||
|
RankingRuleView::Typo => Criterion::Typo,
|
||||||
|
RankingRuleView::Proximity => Criterion::Proximity,
|
||||||
|
RankingRuleView::Attribute => Criterion::Attribute,
|
||||||
|
RankingRuleView::Sort => Criterion::Sort,
|
||||||
|
RankingRuleView::Exactness => Criterion::Exactness,
|
||||||
|
RankingRuleView::Asc(x) => Criterion::Asc(x),
|
||||||
|
RankingRuleView::Desc(x) => Criterion::Desc(x),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use proptest::prelude::*;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
|
|
||||||
prop_oneof![Just(Setting::NotSet), Just(Setting::Reset), any::<T>().prop_map(Setting::Set)]
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_setting_check() {
|
fn test_setting_check() {
|
||||||
// test no changes
|
// test no changes
|
||||||
|
@ -3,9 +3,12 @@ use std::marker::PhantomData;
|
|||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
|
||||||
use serde::de::Visitor;
|
use serde::de::Visitor;
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
|
|
||||||
|
use crate::error::unwrap_any;
|
||||||
|
|
||||||
/// A type that tries to match either a star (*) or
|
/// A type that tries to match either a star (*) or
|
||||||
/// any other thing that implements `FromStr`.
|
/// any other thing that implements `FromStr`.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -14,6 +17,35 @@ pub enum StarOr<T> {
|
|||||||
Other(T),
|
Other(T),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<E: DeserializeError, T> DeserializeFromValue<E> for StarOr<T>
|
||||||
|
where
|
||||||
|
T: FromStr,
|
||||||
|
E: MergeWithError<T::Err>,
|
||||||
|
{
|
||||||
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
|
value: deserr::Value<V>,
|
||||||
|
location: deserr::ValuePointerRef,
|
||||||
|
) -> Result<Self, E> {
|
||||||
|
match value {
|
||||||
|
deserr::Value::String(v) => match v.as_str() {
|
||||||
|
"*" => Ok(StarOr::Star),
|
||||||
|
v => match FromStr::from_str(v) {
|
||||||
|
Ok(x) => Ok(StarOr::Other(x)),
|
||||||
|
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_ => Err(unwrap_any(E::error::<V>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::IncorrectValueKind {
|
||||||
|
actual: value,
|
||||||
|
accepted: &[ValueKind::String],
|
||||||
|
},
|
||||||
|
location,
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T: FromStr> FromStr for StarOr<T> {
|
impl<T: FromStr> FromStr for StarOr<T> {
|
||||||
type Err = T::Err;
|
type Err = T::Err;
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", "
|
|||||||
bytes = "1.2.1"
|
bytes = "1.2.1"
|
||||||
clap = { version = "4.0.9", features = ["derive", "env"] }
|
clap = { version = "4.0.9", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.6"
|
crossbeam-channel = "0.5.6"
|
||||||
deserr = { version = "0.1.2", features = ["serde-json"] }
|
deserr = "0.1.4"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
either = "1.8.0"
|
either = "1.8.0"
|
||||||
env_logger = "0.9.1"
|
env_logger = "0.9.1"
|
||||||
@ -109,5 +109,5 @@ japanese = ["meilisearch-types/japanese"]
|
|||||||
thai = ["meilisearch-types/thai"]
|
thai = ["meilisearch-types/thai"]
|
||||||
|
|
||||||
[package.metadata.mini-dashboard]
|
[package.metadata.mini-dashboard]
|
||||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.4/build.zip"
|
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.5/build.zip"
|
||||||
sha1 = "b53c2edb51d4ce1984d5586333b91c4ad3a1b4e4"
|
sha1 = "6fe959b78511b32e9ff857fd9fd31740633b9fce"
|
||||||
|
@ -7,7 +7,7 @@ use serde_json::Value;
|
|||||||
|
|
||||||
use super::{find_user_id, Analytics, DocumentDeletionKind};
|
use super::{find_user_id, Analytics, DocumentDeletionKind};
|
||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
use crate::routes::tasks::TasksFilterQueryRaw;
|
use crate::routes::tasks::TasksFilterQuery;
|
||||||
use crate::Opt;
|
use crate::Opt;
|
||||||
|
|
||||||
pub struct MockAnalytics {
|
pub struct MockAnalytics {
|
||||||
@ -58,6 +58,6 @@ impl Analytics for MockAnalytics {
|
|||||||
_request: &HttpRequest,
|
_request: &HttpRequest,
|
||||||
) {
|
) {
|
||||||
}
|
}
|
||||||
fn get_tasks(&self, _query: &TasksFilterQueryRaw, _request: &HttpRequest) {}
|
fn get_tasks(&self, _query: &TasksFilterQuery, _request: &HttpRequest) {}
|
||||||
fn health_seen(&self, _request: &HttpRequest) {}
|
fn health_seen(&self, _request: &HttpRequest) {}
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ use platform_dirs::AppDirs;
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
use crate::routes::tasks::TasksFilterQueryRaw;
|
use crate::routes::tasks::TasksFilterQuery;
|
||||||
|
|
||||||
// if we are in debug mode OR the analytics feature is disabled
|
// if we are in debug mode OR the analytics feature is disabled
|
||||||
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||||
@ -94,7 +94,7 @@ pub trait Analytics: Sync + Send {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// this method should be called to aggregate the get tasks requests.
|
// this method should be called to aggregate the get tasks requests.
|
||||||
fn get_tasks(&self, query: &TasksFilterQueryRaw, request: &HttpRequest);
|
fn get_tasks(&self, query: &TasksFilterQuery, request: &HttpRequest);
|
||||||
|
|
||||||
// this method should be called to aggregate a add documents request
|
// this method should be called to aggregate a add documents request
|
||||||
fn health_seen(&self, request: &HttpRequest);
|
fn health_seen(&self, request: &HttpRequest);
|
||||||
|
@ -27,7 +27,7 @@ use super::{config_user_id_path, DocumentDeletionKind, MEILISEARCH_CONFIG_PATH};
|
|||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::option::{default_http_addr, IndexerOpts, MaxMemory, MaxThreads, ScheduleSnapshot};
|
use crate::option::{default_http_addr, IndexerOpts, MaxMemory, MaxThreads, ScheduleSnapshot};
|
||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
use crate::routes::tasks::TasksFilterQueryRaw;
|
use crate::routes::tasks::TasksFilterQuery;
|
||||||
use crate::routes::{create_all_stats, Stats};
|
use crate::routes::{create_all_stats, Stats};
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||||
@ -195,7 +195,7 @@ impl super::Analytics for SegmentAnalytics {
|
|||||||
let _ = self.sender.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
|
let _ = self.sender.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_tasks(&self, query: &TasksFilterQueryRaw, request: &HttpRequest) {
|
fn get_tasks(&self, query: &TasksFilterQuery, request: &HttpRequest) {
|
||||||
let aggregate = TasksAggregator::from_query(query, request);
|
let aggregate = TasksAggregator::from_query(query, request);
|
||||||
let _ = self.sender.try_send(AnalyticsMsg::AggregateTasks(aggregate));
|
let _ = self.sender.try_send(AnalyticsMsg::AggregateTasks(aggregate));
|
||||||
}
|
}
|
||||||
@ -868,21 +868,21 @@ pub struct TasksAggregator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TasksAggregator {
|
impl TasksAggregator {
|
||||||
pub fn from_query(query: &TasksFilterQueryRaw, request: &HttpRequest) -> Self {
|
pub fn from_query(query: &TasksFilterQuery, request: &HttpRequest) -> Self {
|
||||||
Self {
|
Self {
|
||||||
timestamp: Some(OffsetDateTime::now_utc()),
|
timestamp: Some(OffsetDateTime::now_utc()),
|
||||||
user_agents: extract_user_agents(request).into_iter().collect(),
|
user_agents: extract_user_agents(request).into_iter().collect(),
|
||||||
filtered_by_uid: query.common.uids.is_some(),
|
filtered_by_uid: query.uids.is_some(),
|
||||||
filtered_by_index_uid: query.common.index_uids.is_some(),
|
filtered_by_index_uid: query.index_uids.is_some(),
|
||||||
filtered_by_type: query.common.types.is_some(),
|
filtered_by_type: query.types.is_some(),
|
||||||
filtered_by_status: query.common.statuses.is_some(),
|
filtered_by_status: query.statuses.is_some(),
|
||||||
filtered_by_canceled_by: query.common.canceled_by.is_some(),
|
filtered_by_canceled_by: query.canceled_by.is_some(),
|
||||||
filtered_by_before_enqueued_at: query.dates.before_enqueued_at.is_some(),
|
filtered_by_before_enqueued_at: query.before_enqueued_at.is_some(),
|
||||||
filtered_by_after_enqueued_at: query.dates.after_enqueued_at.is_some(),
|
filtered_by_after_enqueued_at: query.after_enqueued_at.is_some(),
|
||||||
filtered_by_before_started_at: query.dates.before_started_at.is_some(),
|
filtered_by_before_started_at: query.before_started_at.is_some(),
|
||||||
filtered_by_after_started_at: query.dates.after_started_at.is_some(),
|
filtered_by_after_started_at: query.after_started_at.is_some(),
|
||||||
filtered_by_before_finished_at: query.dates.before_finished_at.is_some(),
|
filtered_by_before_finished_at: query.before_finished_at.is_some(),
|
||||||
filtered_by_after_finished_at: query.dates.after_finished_at.is_some(),
|
filtered_by_after_finished_at: query.after_finished_at.is_some(),
|
||||||
total_received: 1,
|
total_received: 1,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,7 @@ impl ErrorCode for MeilisearchHttpError {
|
|||||||
MeilisearchHttpError::MissingPayload(_) => Code::MissingPayload,
|
MeilisearchHttpError::MissingPayload(_) => Code::MissingPayload,
|
||||||
MeilisearchHttpError::InvalidContentType(_, _) => Code::InvalidContentType,
|
MeilisearchHttpError::InvalidContentType(_, _) => Code::InvalidContentType,
|
||||||
MeilisearchHttpError::DocumentNotFound(_) => Code::DocumentNotFound,
|
MeilisearchHttpError::DocumentNotFound(_) => Code::DocumentNotFound,
|
||||||
MeilisearchHttpError::InvalidExpression(_, _) => Code::Filter,
|
MeilisearchHttpError::InvalidExpression(_, _) => Code::InvalidSearchFilter,
|
||||||
MeilisearchHttpError::PayloadTooLarge => Code::PayloadTooLarge,
|
MeilisearchHttpError::PayloadTooLarge => Code::PayloadTooLarge,
|
||||||
MeilisearchHttpError::SwapIndexPayloadWrongLength(_) => Code::InvalidSwapIndexes,
|
MeilisearchHttpError::SwapIndexPayloadWrongLength(_) => Code::InvalidSwapIndexes,
|
||||||
MeilisearchHttpError::IndexUid(e) => e.error_code(),
|
MeilisearchHttpError::IndexUid(e) => e.error_code(),
|
||||||
|
@ -17,7 +17,7 @@ impl ErrorCode for AuthenticationError {
|
|||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
||||||
AuthenticationError::InvalidToken => Code::InvalidToken,
|
AuthenticationError::InvalidToken => Code::InvalidApiKey,
|
||||||
AuthenticationError::IrretrievableState => Code::Internal,
|
AuthenticationError::IrretrievableState => Code::Internal,
|
||||||
AuthenticationError::MissingMasterKey => Code::MissingMasterKey,
|
AuthenticationError::MissingMasterKey => Code::MissingMasterKey,
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ impl<T, E> ValidatedJson<T, E> {
|
|||||||
|
|
||||||
impl<T, E> FromRequest for ValidatedJson<T, E>
|
impl<T, E> FromRequest for ValidatedJson<T, E>
|
||||||
where
|
where
|
||||||
E: DeserializeError + ErrorCode + 'static,
|
E: DeserializeError + ErrorCode + std::error::Error + 'static,
|
||||||
T: DeserializeFromValue<E>,
|
T: DeserializeFromValue<E>,
|
||||||
{
|
{
|
||||||
type Error = actix_web::Error;
|
type Error = actix_web::Error;
|
||||||
@ -55,7 +55,7 @@ pub struct ValidatedJsonExtractFut<T, E> {
|
|||||||
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
|
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
|
||||||
where
|
where
|
||||||
T: DeserializeFromValue<E>,
|
T: DeserializeFromValue<E>,
|
||||||
E: DeserializeError + ErrorCode + 'static,
|
E: DeserializeError + ErrorCode + std::error::Error + 'static,
|
||||||
{
|
{
|
||||||
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
|
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ impl<T, E> QueryParameter<T, E> {
|
|||||||
impl<T, E> QueryParameter<T, E>
|
impl<T, E> QueryParameter<T, E>
|
||||||
where
|
where
|
||||||
T: DeserializeFromValue<E>,
|
T: DeserializeFromValue<E>,
|
||||||
E: DeserializeError + ErrorCode + 'static,
|
E: DeserializeError + ErrorCode + std::error::Error + 'static,
|
||||||
{
|
{
|
||||||
pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> {
|
pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> {
|
||||||
let value = serde_urlencoded::from_str::<serde_json::Value>(query_str)
|
let value = serde_urlencoded::from_str::<serde_json::Value>(query_str)
|
||||||
@ -58,7 +58,7 @@ impl<T: fmt::Display, E> fmt::Display for QueryParameter<T, E> {
|
|||||||
impl<T, E> FromRequest for QueryParameter<T, E>
|
impl<T, E> FromRequest for QueryParameter<T, E>
|
||||||
where
|
where
|
||||||
T: DeserializeFromValue<E>,
|
T: DeserializeFromValue<E>,
|
||||||
E: DeserializeError + ErrorCode + 'static,
|
E: DeserializeError + ErrorCode + std::error::Error + 'static,
|
||||||
{
|
{
|
||||||
type Error = actix_web::Error;
|
type Error = actix_web::Error;
|
||||||
type Future = Ready<Result<Self, actix_web::Error>>;
|
type Future = Ready<Result<Self, actix_web::Error>>;
|
||||||
|
@ -1,20 +1,21 @@
|
|||||||
use std::convert::Infallible;
|
use std::str;
|
||||||
use std::num::ParseIntError;
|
|
||||||
use std::{fmt, str};
|
|
||||||
|
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::{DeserializeError, IntoValue, MergeWithError, ValuePointerRef};
|
use deserr::DeserializeFromValue;
|
||||||
use meilisearch_auth::error::AuthControllerError;
|
use meilisearch_auth::error::AuthControllerError;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use meilisearch_types::error::{Code, DeserrError, ResponseError, TakeErrorMessage};
|
||||||
|
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use super::indexes::search::parse_usize_take_error_message;
|
||||||
|
use super::PAGINATION_DEFAULT_LIMIT;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
use crate::extractors::json::ValidatedJson;
|
||||||
use crate::extractors::query_parameters::QueryParameter;
|
use crate::extractors::query_parameters::QueryParameter;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::Pagination;
|
use crate::routes::Pagination;
|
||||||
@ -35,7 +36,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
|
|
||||||
pub async fn create_api_key(
|
pub async fn create_api_key(
|
||||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
|
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
|
||||||
body: web::Json<Value>,
|
body: ValidatedJson<CreateApiKey, DeserrError>,
|
||||||
_req: HttpRequest,
|
_req: HttpRequest,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let v = body.into_inner();
|
let v = body.into_inner();
|
||||||
@ -49,72 +50,28 @@ pub async fn create_api_key(
|
|||||||
Ok(HttpResponse::Created().json(res))
|
Ok(HttpResponse::Created().json(res))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
|
||||||
pub struct PaginationDeserrError {
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
error: String,
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
code: Code,
|
pub struct ListApiKeys {
|
||||||
|
#[serde(default)]
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyOffset>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
|
pub offset: usize,
|
||||||
|
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
||||||
|
#[deserr(error = DeserrError<InvalidApiKeyLimit>, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
|
pub limit: usize,
|
||||||
}
|
}
|
||||||
|
impl ListApiKeys {
|
||||||
impl std::fmt::Display for PaginationDeserrError {
|
fn as_pagination(self) -> Pagination {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
Pagination { offset: self.offset, limit: self.limit }
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for PaginationDeserrError {}
|
|
||||||
impl ErrorCode for PaginationDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<PaginationDeserrError> for PaginationDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: PaginationDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DeserializeError for PaginationDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: deserr::ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
let code = match location.last_field() {
|
|
||||||
Some("offset") => Code::InvalidApiKeyLimit,
|
|
||||||
Some("limit") => Code::InvalidApiKeyOffset,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(PaginationDeserrError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<ParseIntError> for PaginationDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: ParseIntError,
|
|
||||||
merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
PaginationDeserrError::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
deserr::ErrorKind::Unexpected { msg: other.to_string() },
|
|
||||||
merge_location,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn list_api_keys(
|
pub async fn list_api_keys(
|
||||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||||
paginate: QueryParameter<Pagination, PaginationDeserrError>,
|
list_api_keys: QueryParameter<ListApiKeys, DeserrError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let paginate = paginate.into_inner();
|
let paginate = list_api_keys.into_inner().as_pagination();
|
||||||
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
let keys = auth_controller.list_keys()?;
|
let keys = auth_controller.list_keys()?;
|
||||||
let page_view = paginate
|
let page_view = paginate
|
||||||
@ -149,15 +106,15 @@ pub async fn get_api_key(
|
|||||||
|
|
||||||
pub async fn patch_api_key(
|
pub async fn patch_api_key(
|
||||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
|
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
|
||||||
body: web::Json<Value>,
|
body: ValidatedJson<PatchApiKey, DeserrError>,
|
||||||
path: web::Path<AuthParam>,
|
path: web::Path<AuthParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let key = path.into_inner().key;
|
let key = path.into_inner().key;
|
||||||
let body = body.into_inner();
|
let patch_api_key = body.into_inner();
|
||||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
let uid =
|
let uid =
|
||||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||||
let key = auth_controller.update_key(uid, body)?;
|
let key = auth_controller.update_key(uid, patch_api_key)?;
|
||||||
|
|
||||||
Ok(KeyView::from_key(key, &auth_controller))
|
Ok(KeyView::from_key(key, &auth_controller))
|
||||||
})
|
})
|
||||||
|
@ -1,19 +1,17 @@
|
|||||||
use std::convert::Infallible;
|
|
||||||
use std::fmt;
|
|
||||||
use std::io::ErrorKind;
|
use std::io::ErrorKind;
|
||||||
use std::num::ParseIntError;
|
use std::num::ParseIntError;
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use actix_web::http::header::CONTENT_TYPE;
|
use actix_web::http::header::CONTENT_TYPE;
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
|
||||||
use bstr::ByteSlice;
|
use bstr::ByteSlice;
|
||||||
use deserr::{DeserializeError, DeserializeFromValue, IntoValue, MergeWithError, ValuePointerRef};
|
use deserr::DeserializeFromValue;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
||||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
|
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
|
||||||
use meilisearch_types::heed::RoTxn;
|
use meilisearch_types::heed::RoTxn;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||||
@ -29,6 +27,7 @@ use tempfile::tempfile;
|
|||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
||||||
|
|
||||||
|
use super::search::parse_usize_take_error_message;
|
||||||
use crate::analytics::{Analytics, DocumentDeletionKind};
|
use crate::analytics::{Analytics, DocumentDeletionKind};
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
use crate::error::PayloadError::ReceivePayload;
|
use crate::error::PayloadError::ReceivePayload;
|
||||||
@ -83,61 +82,16 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct GetDocument {
|
pub struct GetDocument {
|
||||||
|
#[deserr(error = DeserrError<InvalidDocumentFields>)]
|
||||||
fields: Option<CS<StarOr<String>>>,
|
fields: Option<CS<StarOr<String>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct GetDocumentDeserrError {
|
|
||||||
error: String,
|
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for GetDocumentDeserrError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for GetDocumentDeserrError {}
|
|
||||||
impl ErrorCode for GetDocumentDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<GetDocumentDeserrError> for GetDocumentDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: GetDocumentDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DeserializeError for GetDocumentDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: deserr::ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
let code = match location.last_field() {
|
|
||||||
Some("fields") => Code::InvalidDocumentFields,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(GetDocumentDeserrError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_document(
|
pub async fn get_document(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<DocumentParam>,
|
path: web::Path<DocumentParam>,
|
||||||
params: QueryParameter<GetDocument, GetDocumentDeserrError>,
|
params: QueryParameter<GetDocument, DeserrError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let GetDocument { fields } = params.into_inner();
|
let GetDocument { fields } = params.into_inner();
|
||||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
||||||
@ -165,81 +119,20 @@ pub async fn delete_document(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct BrowseQuery {
|
pub struct BrowseQuery {
|
||||||
#[deserr(default, from(&String) = FromStr::from_str -> ParseIntError)]
|
#[deserr(error = DeserrError<InvalidDocumentFields>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<ParseIntError>)]
|
||||||
offset: usize,
|
offset: usize,
|
||||||
#[deserr(default = crate::routes::PAGINATION_DEFAULT_LIMIT(), from(&String) = FromStr::from_str -> ParseIntError)]
|
#[deserr(error = DeserrError<InvalidDocumentLimit>, default = crate::routes::PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<ParseIntError>)]
|
||||||
limit: usize,
|
limit: usize,
|
||||||
|
#[deserr(error = DeserrError<InvalidDocumentLimit>)]
|
||||||
fields: Option<CS<StarOr<String>>>,
|
fields: Option<CS<StarOr<String>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct BrowseQueryDeserrError {
|
|
||||||
error: String,
|
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for BrowseQueryDeserrError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for BrowseQueryDeserrError {}
|
|
||||||
impl ErrorCode for BrowseQueryDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<BrowseQueryDeserrError> for BrowseQueryDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: BrowseQueryDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DeserializeError for BrowseQueryDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: deserr::ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
let code = match location.last_field() {
|
|
||||||
Some("fields") => Code::InvalidDocumentFields,
|
|
||||||
Some("offset") => Code::InvalidDocumentOffset,
|
|
||||||
Some("limit") => Code::InvalidDocumentLimit,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(BrowseQueryDeserrError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<ParseIntError> for BrowseQueryDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: ParseIntError,
|
|
||||||
merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
BrowseQueryDeserrError::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
deserr::ErrorKind::Unexpected { msg: other.to_string() },
|
|
||||||
merge_location,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_all_documents(
|
pub async fn get_all_documents(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: QueryParameter<BrowseQuery, BrowseQueryDeserrError>,
|
params: QueryParameter<BrowseQuery, DeserrError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
let BrowseQuery { limit, offset, fields } = params.into_inner();
|
let BrowseQuery { limit, offset, fields } = params.into_inner();
|
||||||
@ -255,61 +148,16 @@ pub async fn get_all_documents(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct UpdateDocumentsQuery {
|
pub struct UpdateDocumentsQuery {
|
||||||
|
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
|
||||||
pub primary_key: Option<String>,
|
pub primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct UpdateDocumentsQueryDeserrError {
|
|
||||||
error: String,
|
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for UpdateDocumentsQueryDeserrError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for UpdateDocumentsQueryDeserrError {}
|
|
||||||
impl ErrorCode for UpdateDocumentsQueryDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<UpdateDocumentsQueryDeserrError> for UpdateDocumentsQueryDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: UpdateDocumentsQueryDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DeserializeError for UpdateDocumentsQueryDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: deserr::ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
let code = match location.last_field() {
|
|
||||||
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(UpdateDocumentsQueryDeserrError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn add_documents(
|
pub async fn add_documents(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: QueryParameter<UpdateDocumentsQuery, UpdateDocumentsQueryDeserrError>,
|
params: QueryParameter<UpdateDocumentsQuery, DeserrError>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
@ -337,7 +185,7 @@ pub async fn add_documents(
|
|||||||
pub async fn update_documents(
|
pub async fn update_documents(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<String>,
|
path: web::Path<String>,
|
||||||
params: QueryParameter<UpdateDocumentsQuery, UpdateDocumentsQueryDeserrError>,
|
params: QueryParameter<UpdateDocumentsQuery, DeserrError>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
use std::convert::Infallible;
|
use std::convert::Infallible;
|
||||||
use std::num::ParseIntError;
|
|
||||||
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::{
|
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
|
||||||
DeserializeError, DeserializeFromValue, ErrorKind, IntoValue, MergeWithError, ValuePointerRef,
|
|
||||||
};
|
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
|
use meilisearch_types::error::{unwrap_any, Code, DeserrError, ResponseError, TakeErrorMessage};
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::{self, FieldDistribution, Index};
|
use meilisearch_types::milli::{self, FieldDistribution, Index};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
@ -16,7 +14,8 @@ use serde::{Deserialize, Serialize};
|
|||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use super::{Pagination, SummarizedTaskView};
|
use self::search::parse_usize_take_error_message;
|
||||||
|
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||||
@ -72,9 +71,26 @@ impl IndexView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
|
||||||
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
|
pub struct ListIndexes {
|
||||||
|
#[serde(default)]
|
||||||
|
#[deserr(error = DeserrError<InvalidIndexOffset>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
|
pub offset: usize,
|
||||||
|
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
||||||
|
#[deserr(error = DeserrError<InvalidIndexLimit>, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
|
pub limit: usize,
|
||||||
|
}
|
||||||
|
impl ListIndexes {
|
||||||
|
fn as_pagination(self) -> Pagination {
|
||||||
|
Pagination { offset: self.offset, limit: self.limit }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn list_indexes(
|
pub async fn list_indexes(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
||||||
paginate: QueryParameter<Pagination, ListIndexesDeserrError>,
|
paginate: QueryParameter<ListIndexes, DeserrError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let search_rules = &index_scheduler.filters().search_rules;
|
let search_rules = &index_scheduler.filters().search_rules;
|
||||||
let indexes: Vec<_> = index_scheduler.indexes()?;
|
let indexes: Vec<_> = index_scheduler.indexes()?;
|
||||||
@ -84,82 +100,24 @@ pub async fn list_indexes(
|
|||||||
.map(|(name, index)| IndexView::new(name, &index))
|
.map(|(name, index)| IndexView::new(name, &index))
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
let ret = paginate.auto_paginate_sized(indexes.into_iter());
|
let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter());
|
||||||
|
|
||||||
debug!("returns: {:?}", ret);
|
debug!("returns: {:?}", ret);
|
||||||
Ok(HttpResponse::Ok().json(ret))
|
Ok(HttpResponse::Ok().json(ret))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ListIndexesDeserrError {
|
|
||||||
error: String,
|
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for ListIndexesDeserrError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for ListIndexesDeserrError {}
|
|
||||||
impl ErrorCode for ListIndexesDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<ListIndexesDeserrError> for ListIndexesDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: ListIndexesDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deserr::DeserializeError for ListIndexesDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let code = match location.last_field() {
|
|
||||||
Some("offset") => Code::InvalidIndexLimit,
|
|
||||||
Some("limit") => Code::InvalidIndexOffset,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
Err(ListIndexesDeserrError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<ParseIntError> for ListIndexesDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: ParseIntError,
|
|
||||||
merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
ListIndexesDeserrError::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
ErrorKind::Unexpected { msg: other.to_string() },
|
|
||||||
merge_location,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Debug)]
|
#[derive(DeserializeFromValue, Debug)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct IndexCreateRequest {
|
pub struct IndexCreateRequest {
|
||||||
|
#[deserr(error = DeserrError<InvalidIndexUid>, missing_field_error = DeserrError::missing_index_uid)]
|
||||||
uid: String,
|
uid: String,
|
||||||
|
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_index(
|
pub async fn create_index(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
||||||
body: ValidatedJson<IndexCreateRequest, CreateIndexesDeserrError>,
|
body: ValidatedJson<IndexCreateRequest, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
@ -184,58 +142,29 @@ pub async fn create_index(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
fn deny_immutable_fields_index(
|
||||||
pub struct CreateIndexesDeserrError {
|
field: &str,
|
||||||
error: String,
|
accepted: &[&str],
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for CreateIndexesDeserrError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for CreateIndexesDeserrError {}
|
|
||||||
impl ErrorCode for CreateIndexesDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<CreateIndexesDeserrError> for CreateIndexesDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: CreateIndexesDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deserr::DeserializeError for CreateIndexesDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
location: ValuePointerRef,
|
||||||
) -> Result<Self, Self> {
|
) -> DeserrError {
|
||||||
let code = match location.last_field() {
|
let mut error = unwrap_any(DeserrError::<BadRequest>::error::<Infallible>(
|
||||||
Some("uid") => Code::InvalidIndexUid,
|
None,
|
||||||
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
|
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||||
None if matches!(error, ErrorKind::MissingField { field } if field == "uid") => {
|
location,
|
||||||
Code::MissingIndexUid
|
));
|
||||||
}
|
|
||||||
|
error.code = match field {
|
||||||
|
"uid" => Code::ImmutableIndexUid,
|
||||||
|
"createdAt" => Code::ImmutableIndexCreatedAt,
|
||||||
|
"updatedAt" => Code::ImmutableIndexUpdatedAt,
|
||||||
_ => Code::BadRequest,
|
_ => Code::BadRequest,
|
||||||
};
|
};
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
error
|
||||||
|
|
||||||
Err(CreateIndexesDeserrError { error, code })
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Debug)]
|
#[derive(DeserializeFromValue, Debug)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)]
|
||||||
pub struct UpdateIndexRequest {
|
pub struct UpdateIndexRequest {
|
||||||
|
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -254,7 +183,7 @@ pub async fn get_index(
|
|||||||
pub async fn update_index(
|
pub async fn update_index(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<String>,
|
path: web::Path<String>,
|
||||||
body: ValidatedJson<UpdateIndexRequest, UpdateIndexesDeserrError>,
|
body: ValidatedJson<UpdateIndexRequest, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
@ -278,51 +207,6 @@ pub async fn update_index(
|
|||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct UpdateIndexesDeserrError {
|
|
||||||
error: String,
|
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for UpdateIndexesDeserrError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for UpdateIndexesDeserrError {}
|
|
||||||
impl ErrorCode for UpdateIndexesDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<UpdateIndexesDeserrError> for UpdateIndexesDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: UpdateIndexesDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deserr::DeserializeError for UpdateIndexesDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let code = match location.last_field() {
|
|
||||||
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
Err(UpdateIndexesDeserrError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete_index(
|
pub async fn delete_index(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
|
@ -5,7 +5,8 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
|||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_auth::IndexSearchRules;
|
use meilisearch_auth::IndexSearchRules;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
|
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
|
||||||
use serde_cs::vec::CS;
|
use serde_cs::vec::CS;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
@ -15,11 +16,11 @@ use crate::extractors::authentication::GuardedData;
|
|||||||
use crate::extractors::json::ValidatedJson;
|
use crate::extractors::json::ValidatedJson;
|
||||||
use crate::extractors::query_parameters::QueryParameter;
|
use crate::extractors::query_parameters::QueryParameter;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::from_string_to_option;
|
use crate::routes::from_string_to_option_take_error_message;
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
perform_search, MatchingStrategy, SearchDeserError, SearchQuery, DEFAULT_CROP_LENGTH,
|
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||||
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||||
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
DEFAULT_SEARCH_OFFSET,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
@ -30,35 +31,54 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn parse_usize_take_error_message(
|
||||||
|
s: &str,
|
||||||
|
) -> Result<usize, TakeErrorMessage<std::num::ParseIntError>> {
|
||||||
|
usize::from_str(s).map_err(TakeErrorMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_bool_take_error_message(
|
||||||
|
s: &str,
|
||||||
|
) -> Result<bool, TakeErrorMessage<std::str::ParseBoolError>> {
|
||||||
|
s.parse().map_err(TakeErrorMessage)
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct SearchQueryGet {
|
pub struct SearchQueryGet {
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchQ>)]
|
||||||
q: Option<String>,
|
q: Option<String>,
|
||||||
#[deserr(default = DEFAULT_SEARCH_OFFSET(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
#[deserr(error = DeserrError<InvalidSearchOffset>, default = DEFAULT_SEARCH_OFFSET(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
offset: usize,
|
offset: usize,
|
||||||
#[deserr(default = DEFAULT_SEARCH_LIMIT(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
#[deserr(error = DeserrError<InvalidSearchLimit>, default = DEFAULT_SEARCH_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
limit: usize,
|
limit: usize,
|
||||||
#[deserr(from(&String) = from_string_to_option -> std::num::ParseIntError)]
|
#[deserr(error = DeserrError<InvalidSearchPage>, from(&String) = from_string_to_option_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
page: Option<usize>,
|
page: Option<usize>,
|
||||||
#[deserr(from(&String) = from_string_to_option -> std::num::ParseIntError)]
|
#[deserr(error = DeserrError<InvalidSearchHitsPerPage>, from(&String) = from_string_to_option_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
hits_per_page: Option<usize>,
|
hits_per_page: Option<usize>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchAttributesToRetrieve>)]
|
||||||
attributes_to_retrieve: Option<CS<String>>,
|
attributes_to_retrieve: Option<CS<String>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchAttributesToCrop>)]
|
||||||
attributes_to_crop: Option<CS<String>>,
|
attributes_to_crop: Option<CS<String>>,
|
||||||
#[deserr(default = DEFAULT_CROP_LENGTH(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
#[deserr(error = DeserrError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
||||||
crop_length: usize,
|
crop_length: usize,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchAttributesToHighlight>)]
|
||||||
attributes_to_highlight: Option<CS<String>>,
|
attributes_to_highlight: Option<CS<String>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchFilter>)]
|
||||||
filter: Option<String>,
|
filter: Option<String>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchSort>)]
|
||||||
sort: Option<String>,
|
sort: Option<String>,
|
||||||
#[deserr(default, from(&String) = FromStr::from_str -> std::str::ParseBoolError)]
|
#[deserr(error = DeserrError<InvalidSearchShowMatchesPosition>, default, from(&String) = parse_bool_take_error_message -> TakeErrorMessage<std::str::ParseBoolError>)]
|
||||||
show_matches_position: bool,
|
show_matches_position: bool,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchFacets>)]
|
||||||
facets: Option<CS<String>>,
|
facets: Option<CS<String>>,
|
||||||
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
#[deserr(error = DeserrError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
||||||
highlight_pre_tag: String,
|
highlight_pre_tag: String,
|
||||||
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
#[deserr(error = DeserrError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
||||||
highlight_post_tag: String,
|
highlight_post_tag: String,
|
||||||
#[deserr(default = DEFAULT_CROP_MARKER())]
|
#[deserr(error = DeserrError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
|
||||||
crop_marker: String,
|
crop_marker: String,
|
||||||
#[deserr(default)]
|
#[deserr(error = DeserrError<InvalidSearchMatchingStrategy>, default)]
|
||||||
matching_strategy: MatchingStrategy,
|
matching_strategy: MatchingStrategy,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -142,7 +162,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
|||||||
pub async fn search_with_url_query(
|
pub async fn search_with_url_query(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: QueryParameter<SearchQueryGet, SearchDeserError>,
|
params: QueryParameter<SearchQueryGet, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
@ -174,7 +194,7 @@ pub async fn search_with_url_query(
|
|||||||
pub async fn search_with_post(
|
pub async fn search_with_post(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: ValidatedJson<SearchQuery, SearchDeserError>,
|
params: ValidatedJson<SearchQuery, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
@ -1,13 +1,10 @@
|
|||||||
use std::fmt;
|
|
||||||
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::{IntoValue, ValuePointerRef};
|
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
|
use meilisearch_types::error::{DeserrError, ResponseError};
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::settings::{settings, Settings, Unchecked};
|
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
@ -19,7 +16,7 @@ use crate::routes::SummarizedTaskView;
|
|||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! make_setting_route {
|
macro_rules! make_setting_route {
|
||||||
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
($route:literal, $update_verb:ident, $type:ty, $err_ty:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||||
pub mod $attr {
|
pub mod $attr {
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||||
@ -68,7 +65,7 @@ macro_rules! make_setting_route {
|
|||||||
Data<IndexScheduler>,
|
Data<IndexScheduler>,
|
||||||
>,
|
>,
|
||||||
index_uid: actix_web::web::Path<String>,
|
index_uid: actix_web::web::Path<String>,
|
||||||
body: actix_web::web::Json<Option<$type>>,
|
body: $crate::routes::indexes::ValidatedJson<Option<$type>, $err_ty>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
$analytics_var: web::Data<dyn Analytics>,
|
$analytics_var: web::Data<dyn Analytics>,
|
||||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||||
@ -133,6 +130,9 @@ make_setting_route!(
|
|||||||
"/filterable-attributes",
|
"/filterable-attributes",
|
||||||
put,
|
put,
|
||||||
std::collections::BTreeSet<String>,
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsFilterableAttributes,
|
||||||
|
>,
|
||||||
filterable_attributes,
|
filterable_attributes,
|
||||||
"filterableAttributes",
|
"filterableAttributes",
|
||||||
analytics,
|
analytics,
|
||||||
@ -156,6 +156,9 @@ make_setting_route!(
|
|||||||
"/sortable-attributes",
|
"/sortable-attributes",
|
||||||
put,
|
put,
|
||||||
std::collections::BTreeSet<String>,
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsSortableAttributes,
|
||||||
|
>,
|
||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
"sortableAttributes",
|
"sortableAttributes",
|
||||||
analytics,
|
analytics,
|
||||||
@ -179,6 +182,9 @@ make_setting_route!(
|
|||||||
"/displayed-attributes",
|
"/displayed-attributes",
|
||||||
put,
|
put,
|
||||||
Vec<String>,
|
Vec<String>,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsDisplayedAttributes,
|
||||||
|
>,
|
||||||
displayed_attributes,
|
displayed_attributes,
|
||||||
"displayedAttributes",
|
"displayedAttributes",
|
||||||
analytics,
|
analytics,
|
||||||
@ -202,6 +208,9 @@ make_setting_route!(
|
|||||||
"/typo-tolerance",
|
"/typo-tolerance",
|
||||||
patch,
|
patch,
|
||||||
meilisearch_types::settings::TypoSettings,
|
meilisearch_types::settings::TypoSettings,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsTypoTolerance,
|
||||||
|
>,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
"typoTolerance",
|
"typoTolerance",
|
||||||
analytics,
|
analytics,
|
||||||
@ -212,7 +221,7 @@ make_setting_route!(
|
|||||||
"TypoTolerance Updated".to_string(),
|
"TypoTolerance Updated".to_string(),
|
||||||
json!({
|
json!({
|
||||||
"typo_tolerance": {
|
"typo_tolerance": {
|
||||||
"enabled": setting.as_ref().map(|s| !matches!(s.enabled.into(), Setting::Set(false))),
|
"enabled": setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
|
||||||
"disable_on_attributes": setting
|
"disable_on_attributes": setting
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||||
@ -244,6 +253,9 @@ make_setting_route!(
|
|||||||
"/searchable-attributes",
|
"/searchable-attributes",
|
||||||
put,
|
put,
|
||||||
Vec<String>,
|
Vec<String>,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsSearchableAttributes,
|
||||||
|
>,
|
||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
"searchableAttributes",
|
"searchableAttributes",
|
||||||
analytics,
|
analytics,
|
||||||
@ -267,6 +279,9 @@ make_setting_route!(
|
|||||||
"/stop-words",
|
"/stop-words",
|
||||||
put,
|
put,
|
||||||
std::collections::BTreeSet<String>,
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsStopWords,
|
||||||
|
>,
|
||||||
stop_words,
|
stop_words,
|
||||||
"stopWords",
|
"stopWords",
|
||||||
analytics,
|
analytics,
|
||||||
@ -289,6 +304,9 @@ make_setting_route!(
|
|||||||
"/synonyms",
|
"/synonyms",
|
||||||
put,
|
put,
|
||||||
std::collections::BTreeMap<String, Vec<String>>,
|
std::collections::BTreeMap<String, Vec<String>>,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsSynonyms,
|
||||||
|
>,
|
||||||
synonyms,
|
synonyms,
|
||||||
"synonyms",
|
"synonyms",
|
||||||
analytics,
|
analytics,
|
||||||
@ -311,6 +329,9 @@ make_setting_route!(
|
|||||||
"/distinct-attribute",
|
"/distinct-attribute",
|
||||||
put,
|
put,
|
||||||
String,
|
String,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsDistinctAttribute,
|
||||||
|
>,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
"distinctAttribute",
|
"distinctAttribute",
|
||||||
analytics,
|
analytics,
|
||||||
@ -331,24 +352,27 @@ make_setting_route!(
|
|||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/ranking-rules",
|
"/ranking-rules",
|
||||||
put,
|
put,
|
||||||
Vec<String>,
|
Vec<meilisearch_types::settings::RankingRuleView>,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsRankingRules,
|
||||||
|
>,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
"rankingRules",
|
"rankingRules",
|
||||||
analytics,
|
analytics,
|
||||||
|setting: &Option<Vec<String>>, req: &HttpRequest| {
|
|setting: &Option<Vec<meilisearch_types::settings::RankingRuleView>>, req: &HttpRequest| {
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"RankingRules Updated".to_string(),
|
"RankingRules Updated".to_string(),
|
||||||
json!({
|
json!({
|
||||||
"ranking_rules": {
|
"ranking_rules": {
|
||||||
"words_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "words")),
|
"words_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Words))),
|
||||||
"typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "typo")),
|
"typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Typo))),
|
||||||
"proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "proximity")),
|
"proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Proximity))),
|
||||||
"attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "attribute")),
|
"attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Attribute))),
|
||||||
"sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "sort")),
|
"sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Sort))),
|
||||||
"exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "exactness")),
|
"exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Exactness))),
|
||||||
"values": setting.as_ref().map(|rr| rr.iter().filter(|s| !s.contains(':')).cloned().collect::<Vec<_>>().join(", ")),
|
"values": setting.as_ref().map(|rr| rr.iter().filter(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Asc(_) | meilisearch_types::settings::RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
Some(req),
|
Some(req),
|
||||||
@ -360,6 +384,9 @@ make_setting_route!(
|
|||||||
"/faceting",
|
"/faceting",
|
||||||
patch,
|
patch,
|
||||||
meilisearch_types::settings::FacetingSettings,
|
meilisearch_types::settings::FacetingSettings,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsFaceting,
|
||||||
|
>,
|
||||||
faceting,
|
faceting,
|
||||||
"faceting",
|
"faceting",
|
||||||
analytics,
|
analytics,
|
||||||
@ -382,6 +409,9 @@ make_setting_route!(
|
|||||||
"/pagination",
|
"/pagination",
|
||||||
patch,
|
patch,
|
||||||
meilisearch_types::settings::PaginationSettings,
|
meilisearch_types::settings::PaginationSettings,
|
||||||
|
meilisearch_types::error::DeserrError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsPagination,
|
||||||
|
>,
|
||||||
pagination,
|
pagination,
|
||||||
"pagination",
|
"pagination",
|
||||||
analytics,
|
analytics,
|
||||||
@ -428,66 +458,10 @@ generate_configure!(
|
|||||||
faceting
|
faceting
|
||||||
);
|
);
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct SettingsDeserrError {
|
|
||||||
error: String,
|
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for SettingsDeserrError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for SettingsDeserrError {}
|
|
||||||
impl ErrorCode for SettingsDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deserr::MergeWithError<SettingsDeserrError> for SettingsDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: SettingsDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deserr::DeserializeError for SettingsDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: deserr::ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
let code = match location.first_field() {
|
|
||||||
Some("displayedAttributes") => Code::InvalidSettingsDisplayedAttributes,
|
|
||||||
Some("searchableAttributes") => Code::InvalidSettingsSearchableAttributes,
|
|
||||||
Some("filterableAttributes") => Code::InvalidSettingsFilterableAttributes,
|
|
||||||
Some("sortableAttributes") => Code::InvalidSettingsSortableAttributes,
|
|
||||||
Some("rankingRules") => Code::InvalidSettingsRankingRules,
|
|
||||||
Some("stopWords") => Code::InvalidSettingsStopWords,
|
|
||||||
Some("synonyms") => Code::InvalidSettingsSynonyms,
|
|
||||||
Some("distinctAttribute") => Code::InvalidSettingsDistinctAttribute,
|
|
||||||
Some("typoTolerance") => Code::InvalidSettingsTypoTolerance,
|
|
||||||
Some("faceting") => Code::InvalidSettingsFaceting,
|
|
||||||
Some("pagination") => Code::InvalidSettingsPagination,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(SettingsDeserrError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_all(
|
pub async fn update_all(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: ValidatedJson<Settings<Unchecked>, SettingsDeserrError>,
|
body: ValidatedJson<Settings<Unchecked>, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
@ -497,13 +471,13 @@ pub async fn update_all(
|
|||||||
"Settings Updated".to_string(),
|
"Settings Updated".to_string(),
|
||||||
json!({
|
json!({
|
||||||
"ranking_rules": {
|
"ranking_rules": {
|
||||||
"words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "words")),
|
"words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Words))),
|
||||||
"typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "typo")),
|
"typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Typo))),
|
||||||
"proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "proximity")),
|
"proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Proximity))),
|
||||||
"attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "attribute")),
|
"attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Attribute))),
|
||||||
"sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "sort")),
|
"sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Sort))),
|
||||||
"exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "exactness")),
|
"exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Exactness))),
|
||||||
"values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !s.contains(':')).cloned().collect::<Vec<_>>().join(", ")),
|
"values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !matches!(s, RankingRuleView::Asc(_) | RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
|
||||||
},
|
},
|
||||||
"searchable_attributes": {
|
"searchable_attributes": {
|
||||||
"total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
|
"total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
|
||||||
|
@ -3,10 +3,9 @@ use std::str::FromStr;
|
|||||||
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::DeserializeFromValue;
|
|
||||||
use index_scheduler::{IndexScheduler, Query};
|
use index_scheduler::{IndexScheduler, Query};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::{ResponseError, TakeErrorMessage};
|
||||||
use meilisearch_types::settings::{Settings, Unchecked};
|
use meilisearch_types::settings::{Settings, Unchecked};
|
||||||
use meilisearch_types::star_or::StarOr;
|
use meilisearch_types::star_or::StarOr;
|
||||||
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
||||||
@ -57,6 +56,14 @@ where
|
|||||||
{
|
{
|
||||||
Ok(Some(input.parse()?))
|
Ok(Some(input.parse()?))
|
||||||
}
|
}
|
||||||
|
pub fn from_string_to_option_take_error_message<T, E>(
|
||||||
|
input: &str,
|
||||||
|
) -> Result<Option<T>, TakeErrorMessage<E>>
|
||||||
|
where
|
||||||
|
T: FromStr<Err = E>,
|
||||||
|
{
|
||||||
|
Ok(Some(input.parse().map_err(TakeErrorMessage)?))
|
||||||
|
}
|
||||||
|
|
||||||
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
|
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
|
||||||
|
|
||||||
@ -83,16 +90,8 @@ impl From<Task> for SummarizedTaskView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
|
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
pub struct Pagination {
|
pub struct Pagination {
|
||||||
#[serde(default)]
|
|
||||||
#[deserr(default, from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
|
||||||
pub offset: usize,
|
pub offset: usize,
|
||||||
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
|
||||||
#[deserr(default = PAGINATION_DEFAULT_LIMIT(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
|
||||||
pub limit: usize,
|
pub limit: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
use std::fmt;
|
|
||||||
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::{DeserializeFromValue, IntoValue, ValuePointerRef};
|
use deserr::DeserializeFromValue;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
|
use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
|
||||||
|
use meilisearch_types::error::{DeserrError, ResponseError};
|
||||||
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
@ -21,14 +20,15 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
|
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct SwapIndexesPayload {
|
pub struct SwapIndexesPayload {
|
||||||
|
#[deserr(error = DeserrError<InvalidSwapIndexes>)]
|
||||||
indexes: Vec<String>,
|
indexes: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn swap_indexes(
|
pub async fn swap_indexes(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
||||||
params: ValidatedJson<Vec<SwapIndexesPayload>, SwapIndexesDeserrError>,
|
params: ValidatedJson<Vec<SwapIndexesPayload>, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
@ -62,49 +62,3 @@ pub async fn swap_indexes(
|
|||||||
let task: SummarizedTaskView = task.into();
|
let task: SummarizedTaskView = task.into();
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct SwapIndexesDeserrError {
|
|
||||||
error: String,
|
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for SwapIndexesDeserrError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for SwapIndexesDeserrError {}
|
|
||||||
impl ErrorCode for SwapIndexesDeserrError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deserr::MergeWithError<SwapIndexesDeserrError> for SwapIndexesDeserrError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: SwapIndexesDeserrError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deserr::DeserializeError for SwapIndexesDeserrError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: deserr::ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
let code = match location.last_field() {
|
|
||||||
Some("indexes") => Code::InvalidSwapIndexes,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(SwapIndexesDeserrError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
|
use std::num::ParseIntError;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::error::DateField;
|
use deserr::DeserializeFromValue;
|
||||||
use index_scheduler::{IndexScheduler, Query, TaskId};
|
use index_scheduler::{IndexScheduler, Query, TaskId};
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
|
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::settings::{Settings, Unchecked};
|
use meilisearch_types::settings::{Settings, Unchecked};
|
||||||
use meilisearch_types::star_or::StarOr;
|
use meilisearch_types::star_or::StarOr;
|
||||||
@ -14,14 +16,16 @@ use meilisearch_types::tasks::{
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_cs::vec::CS;
|
use serde_cs::vec::CS;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
use time::macros::format_description;
|
||||||
|
use time::{Date, Duration, OffsetDateTime, Time};
|
||||||
use tokio::task;
|
use tokio::task;
|
||||||
|
|
||||||
use self::date_deserializer::{deserialize_date, DeserializeDateOption};
|
|
||||||
use super::{fold_star_or, SummarizedTaskView};
|
use super::{fold_star_or, SummarizedTaskView};
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
use crate::extractors::query_parameters::QueryParameter;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
const DEFAULT_LIMIT: fn() -> u32 = || 20;
|
const DEFAULT_LIMIT: fn() -> u32 = || 20;
|
||||||
@ -160,307 +164,124 @@ impl From<Details> for DetailsView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
fn parse_option_cs<T: FromStr>(
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
s: Option<CS<String>>,
|
||||||
pub struct TaskCommonQueryRaw {
|
) -> Result<Option<Vec<T>>, TakeErrorMessage<T::Err>> {
|
||||||
pub uids: Option<CS<String>>,
|
if let Some(s) = s {
|
||||||
pub canceled_by: Option<CS<String>>,
|
s.into_iter()
|
||||||
pub types: Option<CS<StarOr<String>>>,
|
.map(|s| T::from_str(&s))
|
||||||
pub statuses: Option<CS<StarOr<String>>>,
|
.collect::<Result<Vec<T>, T::Err>>()
|
||||||
pub index_uids: Option<CS<StarOr<String>>>,
|
.map_err(TakeErrorMessage)
|
||||||
}
|
.map(Some)
|
||||||
|
|
||||||
impl TaskCommonQueryRaw {
|
|
||||||
fn validate(self) -> Result<TaskCommonQuery, ResponseError> {
|
|
||||||
let Self { uids, canceled_by, types, statuses, index_uids } = self;
|
|
||||||
let uids = if let Some(uids) = uids {
|
|
||||||
Some(
|
|
||||||
uids.into_iter()
|
|
||||||
.map(|uid_string| {
|
|
||||||
uid_string.parse::<u32>().map_err(|_e| {
|
|
||||||
index_scheduler::Error::InvalidTaskUids { task_uid: uid_string }.into()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<u32>, ResponseError>>()?,
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
None
|
Ok(None)
|
||||||
};
|
|
||||||
let canceled_by = if let Some(canceled_by) = canceled_by {
|
|
||||||
Some(
|
|
||||||
canceled_by
|
|
||||||
.into_iter()
|
|
||||||
.map(|canceled_by_string| {
|
|
||||||
canceled_by_string.parse::<u32>().map_err(|_e| {
|
|
||||||
index_scheduler::Error::InvalidTaskCanceledBy {
|
|
||||||
canceled_by: canceled_by_string,
|
|
||||||
}
|
}
|
||||||
.into()
|
}
|
||||||
})
|
fn parse_option_cs_star_or<T: FromStr>(
|
||||||
})
|
s: Option<CS<StarOr<String>>>,
|
||||||
.collect::<Result<Vec<u32>, ResponseError>>()?,
|
) -> Result<Option<Vec<T>>, TakeErrorMessage<T::Err>> {
|
||||||
)
|
if let Some(s) = s.and_then(fold_star_or) as Option<Vec<String>> {
|
||||||
|
s.into_iter()
|
||||||
|
.map(|s| T::from_str(&s))
|
||||||
|
.collect::<Result<Vec<T>, T::Err>>()
|
||||||
|
.map_err(TakeErrorMessage)
|
||||||
|
.map(Some)
|
||||||
} else {
|
} else {
|
||||||
None
|
Ok(None)
|
||||||
};
|
}
|
||||||
|
}
|
||||||
let types = if let Some(types) = types.and_then(fold_star_or) as Option<Vec<String>> {
|
fn parse_option_str<T: FromStr>(s: Option<String>) -> Result<Option<T>, TakeErrorMessage<T::Err>> {
|
||||||
Some(
|
if let Some(s) = s {
|
||||||
types
|
T::from_str(&s).map_err(TakeErrorMessage).map(Some)
|
||||||
.into_iter()
|
|
||||||
.map(|type_string| {
|
|
||||||
Kind::from_str(&type_string).map_err(|_e| {
|
|
||||||
index_scheduler::Error::InvalidTaskTypes { type_: type_string }.into()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<Kind>, ResponseError>>()?,
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
None
|
Ok(None)
|
||||||
};
|
|
||||||
let statuses = if let Some(statuses) =
|
|
||||||
statuses.and_then(fold_star_or) as Option<Vec<String>>
|
|
||||||
{
|
|
||||||
Some(
|
|
||||||
statuses
|
|
||||||
.into_iter()
|
|
||||||
.map(|status_string| {
|
|
||||||
Status::from_str(&status_string).map_err(|_e| {
|
|
||||||
index_scheduler::Error::InvalidTaskStatuses { status: status_string }
|
|
||||||
.into()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<Status>, ResponseError>>()?,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let index_uids =
|
|
||||||
if let Some(index_uids) = index_uids.and_then(fold_star_or) as Option<Vec<String>> {
|
|
||||||
Some(
|
|
||||||
index_uids
|
|
||||||
.into_iter()
|
|
||||||
.map(|index_uid_string| {
|
|
||||||
IndexUid::from_str(&index_uid_string)
|
|
||||||
.map(|index_uid| index_uid.to_string())
|
|
||||||
.map_err(|_e| {
|
|
||||||
index_scheduler::Error::InvalidIndexUid {
|
|
||||||
index_uid: index_uid_string,
|
|
||||||
}
|
|
||||||
.into()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<String>, ResponseError>>()?,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
Ok(TaskCommonQuery { types, uids, canceled_by, statuses, index_uids })
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
fn parse_str<T: FromStr>(s: String) -> Result<T, TakeErrorMessage<T::Err>> {
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
T::from_str(&s).map_err(TakeErrorMessage)
|
||||||
pub struct TaskDateQueryRaw {
|
|
||||||
pub after_enqueued_at: Option<String>,
|
|
||||||
pub before_enqueued_at: Option<String>,
|
|
||||||
pub after_started_at: Option<String>,
|
|
||||||
pub before_started_at: Option<String>,
|
|
||||||
pub after_finished_at: Option<String>,
|
|
||||||
pub before_finished_at: Option<String>,
|
|
||||||
}
|
|
||||||
impl TaskDateQueryRaw {
|
|
||||||
fn validate(self) -> Result<TaskDateQuery, ResponseError> {
|
|
||||||
let Self {
|
|
||||||
after_enqueued_at,
|
|
||||||
before_enqueued_at,
|
|
||||||
after_started_at,
|
|
||||||
before_started_at,
|
|
||||||
after_finished_at,
|
|
||||||
before_finished_at,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
let mut query = TaskDateQuery {
|
|
||||||
after_enqueued_at: None,
|
|
||||||
before_enqueued_at: None,
|
|
||||||
after_started_at: None,
|
|
||||||
before_started_at: None,
|
|
||||||
after_finished_at: None,
|
|
||||||
before_finished_at: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
for (field_name, string_value, before_or_after, dest) in [
|
|
||||||
(
|
|
||||||
DateField::AfterEnqueuedAt,
|
|
||||||
after_enqueued_at,
|
|
||||||
DeserializeDateOption::After,
|
|
||||||
&mut query.after_enqueued_at,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
DateField::BeforeEnqueuedAt,
|
|
||||||
before_enqueued_at,
|
|
||||||
DeserializeDateOption::Before,
|
|
||||||
&mut query.before_enqueued_at,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
DateField::AfterStartedAt,
|
|
||||||
after_started_at,
|
|
||||||
DeserializeDateOption::After,
|
|
||||||
&mut query.after_started_at,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
DateField::BeforeStartedAt,
|
|
||||||
before_started_at,
|
|
||||||
DeserializeDateOption::Before,
|
|
||||||
&mut query.before_started_at,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
DateField::AfterFinishedAt,
|
|
||||||
after_finished_at,
|
|
||||||
DeserializeDateOption::After,
|
|
||||||
&mut query.after_finished_at,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
DateField::BeforeFinishedAt,
|
|
||||||
before_finished_at,
|
|
||||||
DeserializeDateOption::Before,
|
|
||||||
&mut query.before_finished_at,
|
|
||||||
),
|
|
||||||
] {
|
|
||||||
if let Some(string_value) = string_value {
|
|
||||||
*dest = Some(deserialize_date(field_name, &string_value, before_or_after)?);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(query)
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
}
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
pub struct TasksFilterQueryRaw {
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub common: TaskCommonQueryRaw,
|
|
||||||
#[serde(default = "DEFAULT_LIMIT")]
|
|
||||||
pub limit: u32,
|
|
||||||
pub from: Option<TaskId>,
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub dates: TaskDateQueryRaw,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
pub struct TaskDeletionOrCancelationQueryRaw {
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub common: TaskCommonQueryRaw,
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub dates: TaskDateQueryRaw,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TasksFilterQueryRaw {
|
|
||||||
fn validate(self) -> Result<TasksFilterQuery, ResponseError> {
|
|
||||||
let Self { common, limit, from, dates } = self;
|
|
||||||
let common = common.validate()?;
|
|
||||||
let dates = dates.validate()?;
|
|
||||||
|
|
||||||
Ok(TasksFilterQuery { common, limit, from, dates })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TaskDeletionOrCancelationQueryRaw {
|
|
||||||
fn validate(self) -> Result<TaskDeletionOrCancelationQuery, ResponseError> {
|
|
||||||
let Self { common, dates } = self;
|
|
||||||
let common = common.validate()?;
|
|
||||||
let dates = dates.validate()?;
|
|
||||||
|
|
||||||
Ok(TaskDeletionOrCancelationQuery { common, dates })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
pub struct TaskDateQuery {
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
skip_serializing_if = "Option::is_none",
|
|
||||||
serialize_with = "time::serde::rfc3339::option::serialize"
|
|
||||||
)]
|
|
||||||
after_enqueued_at: Option<OffsetDateTime>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
skip_serializing_if = "Option::is_none",
|
|
||||||
serialize_with = "time::serde::rfc3339::option::serialize"
|
|
||||||
)]
|
|
||||||
before_enqueued_at: Option<OffsetDateTime>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
skip_serializing_if = "Option::is_none",
|
|
||||||
serialize_with = "time::serde::rfc3339::option::serialize"
|
|
||||||
)]
|
|
||||||
after_started_at: Option<OffsetDateTime>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
skip_serializing_if = "Option::is_none",
|
|
||||||
serialize_with = "time::serde::rfc3339::option::serialize"
|
|
||||||
)]
|
|
||||||
before_started_at: Option<OffsetDateTime>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
skip_serializing_if = "Option::is_none",
|
|
||||||
serialize_with = "time::serde::rfc3339::option::serialize"
|
|
||||||
)]
|
|
||||||
after_finished_at: Option<OffsetDateTime>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
skip_serializing_if = "Option::is_none",
|
|
||||||
serialize_with = "time::serde::rfc3339::option::serialize"
|
|
||||||
)]
|
|
||||||
before_finished_at: Option<OffsetDateTime>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct TaskCommonQuery {
|
|
||||||
types: Option<Vec<Kind>>,
|
|
||||||
uids: Option<Vec<TaskId>>,
|
|
||||||
canceled_by: Option<Vec<TaskId>>,
|
|
||||||
statuses: Option<Vec<Status>>,
|
|
||||||
index_uids: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct TasksFilterQuery {
|
pub struct TasksFilterQuery {
|
||||||
limit: u32,
|
#[deserr(error = DeserrError<InvalidTaskLimit>, default = DEFAULT_LIMIT(), from(String) = parse_str::<u32> -> TakeErrorMessage<ParseIntError>)]
|
||||||
from: Option<TaskId>,
|
pub limit: u32,
|
||||||
common: TaskCommonQuery,
|
#[deserr(error = DeserrError<InvalidTaskFrom>, from(Option<String>) = parse_option_str::<TaskId> -> TakeErrorMessage<ParseIntError>)]
|
||||||
dates: TaskDateQuery,
|
pub from: Option<TaskId>,
|
||||||
|
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskUids>, from(Option<CS<String>>) = parse_option_cs::<u32> -> TakeErrorMessage<ParseIntError>)]
|
||||||
|
pub uids: Option<Vec<u32>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskCanceledBy>, from(Option<CS<String>>) = parse_option_cs::<u32> -> TakeErrorMessage<ParseIntError>)]
|
||||||
|
pub canceled_by: Option<Vec<u32>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskTypes>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<Kind> -> TakeErrorMessage<ResponseError>)]
|
||||||
|
pub types: Option<Vec<Kind>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskStatuses>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<Status> -> TakeErrorMessage<ResponseError>)]
|
||||||
|
pub statuses: Option<Vec<Status>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidIndexUid>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<IndexUid> -> TakeErrorMessage<ResponseError>)]
|
||||||
|
pub index_uids: Option<Vec<IndexUid>>,
|
||||||
|
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskAfterEnqueuedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub after_enqueued_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskBeforeEnqueuedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub before_enqueued_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskAfterStartedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub after_started_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskBeforeStartedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub before_started_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskAfterFinishedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub after_finished_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskBeforeFinishedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub before_finished_at: Option<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
||||||
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct TaskDeletionOrCancelationQuery {
|
pub struct TaskDeletionOrCancelationQuery {
|
||||||
common: TaskCommonQuery,
|
#[deserr(error = DeserrError<InvalidTaskUids>, from(Option<CS<String>>) = parse_option_cs::<u32> -> TakeErrorMessage<ParseIntError>)]
|
||||||
dates: TaskDateQuery,
|
pub uids: Option<Vec<u32>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskCanceledBy>, from(Option<CS<String>>) = parse_option_cs::<u32> -> TakeErrorMessage<ParseIntError>)]
|
||||||
|
pub canceled_by: Option<Vec<u32>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskTypes>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<Kind> -> TakeErrorMessage<ResponseError>)]
|
||||||
|
pub types: Option<Vec<Kind>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskStatuses>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<Status> -> TakeErrorMessage<ResponseError>)]
|
||||||
|
pub statuses: Option<Vec<Status>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidIndexUid>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<IndexUid> -> TakeErrorMessage<ResponseError>)]
|
||||||
|
pub index_uids: Option<Vec<IndexUid>>,
|
||||||
|
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskAfterEnqueuedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub after_enqueued_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskBeforeEnqueuedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub before_enqueued_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskAfterStartedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub after_started_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskBeforeStartedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub before_started_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskAfterFinishedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub after_finished_at: Option<OffsetDateTime>,
|
||||||
|
#[deserr(error = DeserrError<InvalidTaskBeforeFinishedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
||||||
|
pub before_finished_at: Option<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cancel_tasks(
|
async fn cancel_tasks(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
||||||
params: web::Query<TaskDeletionOrCancelationQueryRaw>,
|
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let query = params.into_inner().validate()?;
|
|
||||||
let TaskDeletionOrCancelationQuery {
|
let TaskDeletionOrCancelationQuery {
|
||||||
common: TaskCommonQuery { types, uids, canceled_by, statuses, index_uids },
|
types,
|
||||||
dates:
|
uids,
|
||||||
TaskDateQuery {
|
canceled_by,
|
||||||
|
statuses,
|
||||||
|
index_uids,
|
||||||
after_enqueued_at,
|
after_enqueued_at,
|
||||||
before_enqueued_at,
|
before_enqueued_at,
|
||||||
after_started_at,
|
after_started_at,
|
||||||
before_started_at,
|
before_started_at,
|
||||||
after_finished_at,
|
after_finished_at,
|
||||||
before_finished_at,
|
before_finished_at,
|
||||||
},
|
} = params.into_inner();
|
||||||
} = query;
|
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Tasks Canceled".to_string(),
|
"Tasks Canceled".to_string(),
|
||||||
@ -485,7 +306,7 @@ async fn cancel_tasks(
|
|||||||
from: None,
|
from: None,
|
||||||
statuses,
|
statuses,
|
||||||
types,
|
types,
|
||||||
index_uids,
|
index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()),
|
||||||
uids,
|
uids,
|
||||||
canceled_by,
|
canceled_by,
|
||||||
before_enqueued_at,
|
before_enqueued_at,
|
||||||
@ -516,22 +337,24 @@ async fn cancel_tasks(
|
|||||||
|
|
||||||
async fn delete_tasks(
|
async fn delete_tasks(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
|
||||||
params: web::Query<TaskDeletionOrCancelationQueryRaw>,
|
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let TaskDeletionOrCancelationQuery {
|
let TaskDeletionOrCancelationQuery {
|
||||||
common: TaskCommonQuery { types, uids, canceled_by, statuses, index_uids },
|
types,
|
||||||
dates:
|
uids,
|
||||||
TaskDateQuery {
|
canceled_by,
|
||||||
|
statuses,
|
||||||
|
index_uids,
|
||||||
|
|
||||||
after_enqueued_at,
|
after_enqueued_at,
|
||||||
before_enqueued_at,
|
before_enqueued_at,
|
||||||
after_started_at,
|
after_started_at,
|
||||||
before_started_at,
|
before_started_at,
|
||||||
after_finished_at,
|
after_finished_at,
|
||||||
before_finished_at,
|
before_finished_at,
|
||||||
},
|
} = params.into_inner();
|
||||||
} = params.into_inner().validate()?;
|
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Tasks Deleted".to_string(),
|
"Tasks Deleted".to_string(),
|
||||||
@ -556,7 +379,7 @@ async fn delete_tasks(
|
|||||||
from: None,
|
from: None,
|
||||||
statuses,
|
statuses,
|
||||||
types,
|
types,
|
||||||
index_uids,
|
index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()),
|
||||||
uids,
|
uids,
|
||||||
canceled_by,
|
canceled_by,
|
||||||
after_enqueued_at,
|
after_enqueued_at,
|
||||||
@ -595,26 +418,28 @@ pub struct AllTasks {
|
|||||||
|
|
||||||
async fn get_tasks(
|
async fn get_tasks(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
|
||||||
params: web::Query<TasksFilterQueryRaw>,
|
params: QueryParameter<TasksFilterQuery, DeserrError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let params = params.into_inner();
|
||||||
analytics.get_tasks(¶ms, &req);
|
analytics.get_tasks(¶ms, &req);
|
||||||
|
|
||||||
let TasksFilterQuery {
|
let TasksFilterQuery {
|
||||||
common: TaskCommonQuery { types, uids, canceled_by, statuses, index_uids },
|
types,
|
||||||
|
uids,
|
||||||
|
canceled_by,
|
||||||
|
statuses,
|
||||||
|
index_uids,
|
||||||
limit,
|
limit,
|
||||||
from,
|
from,
|
||||||
dates:
|
|
||||||
TaskDateQuery {
|
|
||||||
after_enqueued_at,
|
after_enqueued_at,
|
||||||
before_enqueued_at,
|
before_enqueued_at,
|
||||||
after_started_at,
|
after_started_at,
|
||||||
before_started_at,
|
before_started_at,
|
||||||
after_finished_at,
|
after_finished_at,
|
||||||
before_finished_at,
|
before_finished_at,
|
||||||
},
|
} = params;
|
||||||
} = params.into_inner().validate()?;
|
|
||||||
|
|
||||||
// We +1 just to know if there is more after this "page" or not.
|
// We +1 just to know if there is more after this "page" or not.
|
||||||
let limit = limit.saturating_add(1);
|
let limit = limit.saturating_add(1);
|
||||||
@ -624,7 +449,7 @@ async fn get_tasks(
|
|||||||
from,
|
from,
|
||||||
statuses,
|
statuses,
|
||||||
types,
|
types,
|
||||||
index_uids,
|
index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()),
|
||||||
uids,
|
uids,
|
||||||
canceled_by,
|
canceled_by,
|
||||||
before_enqueued_at,
|
before_enqueued_at,
|
||||||
@ -691,23 +516,15 @@ async fn get_task(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) mod date_deserializer {
|
|
||||||
use index_scheduler::error::DateField;
|
|
||||||
use meilisearch_types::error::ResponseError;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
|
||||||
use time::macros::format_description;
|
|
||||||
use time::{Date, Duration, OffsetDateTime, Time};
|
|
||||||
|
|
||||||
pub enum DeserializeDateOption {
|
pub enum DeserializeDateOption {
|
||||||
Before,
|
Before,
|
||||||
After,
|
After,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize_date(
|
pub fn deserialize_date(
|
||||||
field_name: DateField,
|
|
||||||
value: &str,
|
value: &str,
|
||||||
option: DeserializeDateOption,
|
option: DeserializeDateOption,
|
||||||
) -> std::result::Result<OffsetDateTime, ResponseError> {
|
) -> std::result::Result<OffsetDateTime, TakeErrorMessage<InvalidTaskDateError>> {
|
||||||
// We can't parse using time's rfc3339 format, since then we won't know what part of the
|
// We can't parse using time's rfc3339 format, since then we won't know what part of the
|
||||||
// datetime was not explicitly specified, and thus we won't be able to increment it to the
|
// datetime was not explicitly specified, and thus we won't be able to increment it to the
|
||||||
// next step.
|
// next step.
|
||||||
@ -729,295 +546,245 @@ pub(crate) mod date_deserializer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(index_scheduler::Error::InvalidTaskDate {
|
Err(TakeErrorMessage(InvalidTaskDateError(value.to_owned())))
|
||||||
field: field_name,
|
|
||||||
date: value.to_string(),
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn deserialize_date_before(
|
||||||
|
value: Option<String>,
|
||||||
|
) -> std::result::Result<Option<OffsetDateTime>, TakeErrorMessage<InvalidTaskDateError>> {
|
||||||
|
if let Some(value) = value {
|
||||||
|
let date = deserialize_date(&value, DeserializeDateOption::Before)?;
|
||||||
|
Ok(Some(date))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
pub fn deserialize_date_after(
|
||||||
|
value: Option<String>,
|
||||||
|
) -> std::result::Result<Option<OffsetDateTime>, TakeErrorMessage<InvalidTaskDateError>> {
|
||||||
|
if let Some(value) = value {
|
||||||
|
let date = deserialize_date(&value, DeserializeDateOption::After)?;
|
||||||
|
Ok(Some(date))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct InvalidTaskDateError(String);
|
||||||
|
impl std::fmt::Display for InvalidTaskDateError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "`{}` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl std::error::Error for InvalidTaskDateError {}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use deserr::DeserializeFromValue;
|
||||||
use meili_snap::snapshot;
|
use meili_snap::snapshot;
|
||||||
|
use meilisearch_types::error::DeserrError;
|
||||||
|
|
||||||
use crate::routes::tasks::{TaskDeletionOrCancelationQueryRaw, TasksFilterQueryRaw};
|
use crate::extractors::query_parameters::QueryParameter;
|
||||||
|
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
|
||||||
|
|
||||||
|
fn deserr_query_params<T>(j: &str) -> Result<T, actix_web::Error>
|
||||||
|
where
|
||||||
|
T: DeserializeFromValue<DeserrError>,
|
||||||
|
{
|
||||||
|
QueryParameter::<T, DeserrError>::from_query(j).map(|p| p.0)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn deserialize_task_filter_dates() {
|
fn deserialize_task_filter_dates() {
|
||||||
{
|
{
|
||||||
let json = r#" {
|
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
|
||||||
"afterEnqueuedAt": "2021-12-03",
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
"beforeEnqueuedAt": "2021-12-03",
|
|
||||||
"afterStartedAt": "2021-12-03",
|
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
||||||
"beforeStartedAt": "2021-12-03",
|
snapshot!(format!("{:?}", query.before_enqueued_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
||||||
"afterFinishedAt": "2021-12-03",
|
snapshot!(format!("{:?}", query.after_started_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
||||||
"beforeFinishedAt": "2021-12-03"
|
snapshot!(format!("{:?}", query.before_started_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
||||||
} "#;
|
snapshot!(format!("{:?}", query.after_finished_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
snapshot!(format!("{:?}", query.before_finished_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
||||||
.unwrap()
|
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
|
||||||
snapshot!(format!("{:?}", query.dates.before_enqueued_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
|
||||||
snapshot!(format!("{:?}", query.dates.after_started_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
|
||||||
snapshot!(format!("{:?}", query.dates.before_started_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
|
||||||
snapshot!(format!("{:?}", query.dates.after_finished_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
|
||||||
snapshot!(format!("{:?}", query.dates.before_finished_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "afterEnqueuedAt": "2021-12-03T23:45:23Z", "beforeEnqueuedAt": "2021-12-03T23:45:23Z" } "#;
|
let params =
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
|
||||||
.unwrap()
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.validate()
|
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00");
|
||||||
.unwrap();
|
snapshot!(format!("{:?}", query.before_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00");
|
||||||
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00");
|
|
||||||
snapshot!(format!("{:?}", query.dates.before_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06-06:20" } "#;
|
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 -06:20:00");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 -06:20:00");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06+00:00" } "#;
|
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 +00:00:00");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 +00:00:00");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06.200000300Z" } "#;
|
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.2000003 +00:00:00");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.2000003 +00:00:00");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "afterFinishedAt": "2021" } "#;
|
let params = "afterFinishedAt=2021";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task `afterFinishedAt` `2021` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "beforeFinishedAt": "2021" } "#;
|
let params = "beforeFinishedAt=2021";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task `beforeFinishedAt` `2021` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "afterEnqueuedAt": "2021-12" } "#;
|
let params = "afterEnqueuedAt=2021-12";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task `afterEnqueuedAt` `2021-12` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let json = r#" { "beforeEnqueuedAt": "2021-12-03T23" } "#;
|
let params = "beforeEnqueuedAt=2021-12-03T23";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task `beforeEnqueuedAt` `2021-12-03T23` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "afterStartedAt": "2021-12-03T23:45" } "#;
|
let params = "afterStartedAt=2021-12-03T23:45";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`.");
|
||||||
.validate()
|
}
|
||||||
.unwrap_err();
|
{
|
||||||
snapshot!(format!("{err}"), @"Task `afterStartedAt` `2021-12-03T23:45` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
|
let params = "beforeStartedAt=2021-12-03T23:45";
|
||||||
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
let json = r#" { "beforeStartedAt": "2021-12-03T23:45" } "#;
|
snapshot!(format!("{err}"), @"`2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.");
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
|
||||||
.unwrap()
|
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task `beforeStartedAt` `2021-12-03T23:45` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn deserialize_task_filter_uids() {
|
fn deserialize_task_filter_uids() {
|
||||||
{
|
{
|
||||||
let json = r#" { "uids": "78,1,12,73" } "#;
|
let params = "uids=78,1,12,73";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.uids.unwrap()), @"[78, 1, 12, 73]");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.common.uids.unwrap()), @"[78, 1, 12, 73]");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "uids": "1" } "#;
|
let params = "uids=1";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.uids.unwrap()), @"[1]");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.common.uids.unwrap()), @"[1]");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "uids": "78,hello,world" } "#;
|
let params = "uids=78,hello,world";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task uid `hello` is invalid. It should only contain numeric characters.");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "uids": "cat" } "#;
|
let params = "uids=cat";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task uid `cat` is invalid. It should only contain numeric characters.");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn deserialize_task_filter_status() {
|
fn deserialize_task_filter_status() {
|
||||||
{
|
{
|
||||||
let json = r#" { "statuses": "succeeded,failed,enqueued,processing,canceled" } "#;
|
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.statuses.unwrap()), @"[Succeeded, Failed, Enqueued, Processing, Canceled]");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.common.statuses.unwrap()), @"[Succeeded, Failed, Enqueued, Processing, Canceled]");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "statuses": "enqueued" } "#;
|
let params = "statuses=enqueued";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.statuses.unwrap()), @"[Enqueued]");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.common.statuses.unwrap()), @"[Enqueued]");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "statuses": "finished" } "#;
|
let params = "statuses=finished";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`finished` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task status `finished` is invalid. Available task statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn deserialize_task_filter_types() {
|
fn deserialize_task_filter_types() {
|
||||||
{
|
{
|
||||||
let json = r#" { "types": "documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation" }"#;
|
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.types.unwrap()), @"[DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation]");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.common.types.unwrap()), @"[DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation]");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "types": "settingsUpdate" } "#;
|
let params = "types=settingsUpdate";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.types.unwrap()), @"[SettingsUpdate]");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.common.types.unwrap()), @"[SettingsUpdate]");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "types": "createIndex" } "#;
|
let params = "types=createIndex";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`createIndex` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"Task type `createIndex` is invalid. Available task types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn deserialize_task_filter_index_uids() {
|
fn deserialize_task_filter_index_uids() {
|
||||||
{
|
{
|
||||||
let json = r#" { "indexUids": "toto,tata-78" }"#;
|
let params = "indexUids=toto,tata-78";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.index_uids.unwrap()), @r###"[IndexUid("toto"), IndexUid("tata-78")]"###);
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.common.index_uids.unwrap()), @r###"["toto", "tata-78"]"###);
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "indexUids": "index_a" } "#;
|
let params = "indexUids=index_a";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query.index_uids.unwrap()), @r###"[IndexUid("index_a")]"###);
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query.common.index_uids.unwrap()), @r###"["index_a"]"###);
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "indexUids": "1,hé" } "#;
|
let params = "indexUids=1,hé";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"hé is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let json = r#" { "indexUids": "hé" } "#;
|
let params = "indexUids=hé";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
.unwrap()
|
snapshot!(format!("{err}"), @"`hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.");
|
||||||
.validate()
|
|
||||||
.unwrap_err();
|
|
||||||
snapshot!(format!("{err}"), @"hé is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn deserialize_task_filter_general() {
|
fn deserialize_task_filter_general() {
|
||||||
{
|
{
|
||||||
let json = r#" { "from": 12, "limit": 15, "indexUids": "toto,tata-78", "statuses": "succeeded,enqueued", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#;
|
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||||
let query =
|
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
||||||
serde_json::from_str::<TasksFilterQueryRaw>(json).unwrap().validate().unwrap();
|
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: 15, from: Some(12), uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: Some([Succeeded, Enqueued]), index_uids: Some([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
|
||||||
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: 15, from: Some(12), common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), canceled_by: None, statuses: Some([Succeeded, Enqueued]), index_uids: Some(["toto", "tata-78"]) }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }"###);
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Stars should translate to `None` in the query
|
// Stars should translate to `None` in the query
|
||||||
// Verify value of the default limit
|
// Verify value of the default limit
|
||||||
let json = r#" { "indexUids": "*", "statuses": "succeeded,*", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#;
|
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||||
let query =
|
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
||||||
serde_json::from_str::<TasksFilterQueryRaw>(json).unwrap().validate().unwrap();
|
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: 20, from: None, uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||||
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: 20, from: None, common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), canceled_by: None, statuses: None, index_uids: None }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Stars should also translate to `None` in task deletion/cancelation queries
|
// Stars should also translate to `None` in task deletion/cancelation queries
|
||||||
let json = r#" { "indexUids": "*", "statuses": "succeeded,*", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#;
|
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||||
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
.unwrap()
|
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||||
.validate()
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), canceled_by: None, statuses: None, index_uids: None }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Stars in uids not allowed
|
// Stars in uids not allowed
|
||||||
let json = r#" { "uids": "*" }"#;
|
let params = "uids=*";
|
||||||
let err =
|
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
|
||||||
serde_json::from_str::<TasksFilterQueryRaw>(json).unwrap().validate().unwrap_err();
|
snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`.");
|
||||||
snapshot!(format!("{err}"), @"Task uid `*` is invalid. It should only contain numeric characters.");
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// From not allowed in task deletion/cancelation queries
|
// From not allowed in task deletion/cancelation queries
|
||||||
let json = r#" { "from": 12 }"#;
|
let params = "from=12";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"unknown field `from` at line 1 column 15");
|
snapshot!(format!("{err}"), @"Json deserialize error: unknown field `from`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Limit not allowed in task deletion/cancelation queries
|
// Limit not allowed in task deletion/cancelation queries
|
||||||
let json = r#" { "limit": 12 }"#;
|
let params = "limit=12";
|
||||||
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"unknown field `limit` at line 1 column 16");
|
snapshot!(format!("{err}"), @"Json deserialize error: unknown field `limit`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,12 @@
|
|||||||
use std::cmp::min;
|
use std::cmp::min;
|
||||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||||
use std::convert::Infallible;
|
use std::str::FromStr;
|
||||||
use std::fmt;
|
|
||||||
use std::num::ParseIntError;
|
|
||||||
use std::str::{FromStr, ParseBoolError};
|
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use deserr::{
|
use deserr::DeserializeFromValue;
|
||||||
DeserializeError, DeserializeFromValue, ErrorKind, IntoValue, MergeWithError, ValuePointerRef,
|
|
||||||
};
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode};
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
|
use meilisearch_types::error::DeserrError;
|
||||||
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
||||||
use meilisearch_types::{milli, Document};
|
use meilisearch_types::{milli, Document};
|
||||||
use milli::tokenizer::TokenizerBuilder;
|
use milli::tokenizer::TokenizerBuilder;
|
||||||
@ -34,32 +30,41 @@ pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
|
|||||||
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct SearchQuery {
|
pub struct SearchQuery {
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchQ>)]
|
||||||
pub q: Option<String>,
|
pub q: Option<String>,
|
||||||
#[deserr(default = DEFAULT_SEARCH_OFFSET())]
|
#[deserr(error = DeserrError<InvalidSearchOffset>, default = DEFAULT_SEARCH_OFFSET())]
|
||||||
pub offset: usize,
|
pub offset: usize,
|
||||||
#[deserr(default = DEFAULT_SEARCH_LIMIT())]
|
#[deserr(error = DeserrError<InvalidSearchLimit>, default = DEFAULT_SEARCH_LIMIT())]
|
||||||
pub limit: usize,
|
pub limit: usize,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchPage>)]
|
||||||
pub page: Option<usize>,
|
pub page: Option<usize>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchHitsPerPage>)]
|
||||||
pub hits_per_page: Option<usize>,
|
pub hits_per_page: Option<usize>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchAttributesToRetrieve>)]
|
||||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchAttributesToCrop>)]
|
||||||
pub attributes_to_crop: Option<Vec<String>>,
|
pub attributes_to_crop: Option<Vec<String>>,
|
||||||
#[deserr(default = DEFAULT_CROP_LENGTH())]
|
#[deserr(error = DeserrError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
|
||||||
pub crop_length: usize,
|
pub crop_length: usize,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchAttributesToHighlight>)]
|
||||||
pub attributes_to_highlight: Option<HashSet<String>>,
|
pub attributes_to_highlight: Option<HashSet<String>>,
|
||||||
#[deserr(default)]
|
#[deserr(error = DeserrError<InvalidSearchShowMatchesPosition>, default)]
|
||||||
pub show_matches_position: bool,
|
pub show_matches_position: bool,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchFilter>)]
|
||||||
pub filter: Option<Value>,
|
pub filter: Option<Value>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchSort>)]
|
||||||
pub sort: Option<Vec<String>>,
|
pub sort: Option<Vec<String>>,
|
||||||
|
#[deserr(error = DeserrError<InvalidSearchFacets>)]
|
||||||
pub facets: Option<Vec<String>>,
|
pub facets: Option<Vec<String>>,
|
||||||
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
#[deserr(error = DeserrError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
||||||
pub highlight_pre_tag: String,
|
pub highlight_pre_tag: String,
|
||||||
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
#[deserr(error = DeserrError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
||||||
pub highlight_post_tag: String,
|
pub highlight_post_tag: String,
|
||||||
#[deserr(default = DEFAULT_CROP_MARKER())]
|
#[deserr(error = DeserrError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
|
||||||
pub crop_marker: String,
|
pub crop_marker: String,
|
||||||
#[deserr(default)]
|
#[deserr(error = DeserrError<InvalidSearchMatchingStrategy>, default)]
|
||||||
pub matching_strategy: MatchingStrategy,
|
pub matching_strategy: MatchingStrategy,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,96 +99,6 @@ impl From<MatchingStrategy> for TermsMatchingStrategy {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct SearchDeserError {
|
|
||||||
error: String,
|
|
||||||
code: Code,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for SearchDeserError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for SearchDeserError {}
|
|
||||||
impl ErrorCode for SearchDeserError {
|
|
||||||
fn error_code(&self) -> Code {
|
|
||||||
self.code
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<SearchDeserError> for SearchDeserError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: SearchDeserError,
|
|
||||||
_merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
Err(other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DeserializeError for SearchDeserError {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
let code = match location.last_field() {
|
|
||||||
Some("q") => Code::InvalidSearchQ,
|
|
||||||
Some("offset") => Code::InvalidSearchOffset,
|
|
||||||
Some("limit") => Code::InvalidSearchLimit,
|
|
||||||
Some("page") => Code::InvalidSearchPage,
|
|
||||||
Some("hitsPerPage") => Code::InvalidSearchHitsPerPage,
|
|
||||||
Some("attributesToRetrieve") => Code::InvalidSearchAttributesToRetrieve,
|
|
||||||
Some("attributesToCrop") => Code::InvalidSearchAttributesToCrop,
|
|
||||||
Some("cropLength") => Code::InvalidSearchCropLength,
|
|
||||||
Some("attributesToHighlight") => Code::InvalidSearchAttributesToHighlight,
|
|
||||||
Some("showMatchesPosition") => Code::InvalidSearchShowMatchesPosition,
|
|
||||||
Some("filter") => Code::InvalidSearchFilter,
|
|
||||||
Some("sort") => Code::InvalidSearchSort,
|
|
||||||
Some("facets") => Code::InvalidSearchFacets,
|
|
||||||
Some("highlightPreTag") => Code::InvalidSearchHighlightPreTag,
|
|
||||||
Some("highlightPostTag") => Code::InvalidSearchHighlightPostTag,
|
|
||||||
Some("cropMarker") => Code::InvalidSearchCropMarker,
|
|
||||||
Some("matchingStrategy") => Code::InvalidSearchMatchingStrategy,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(SearchDeserError { error, code })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<ParseBoolError> for SearchDeserError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: ParseBoolError,
|
|
||||||
merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
SearchDeserError::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
ErrorKind::Unexpected { msg: other.to_string() },
|
|
||||||
merge_location,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<ParseIntError> for SearchDeserError {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: ParseIntError,
|
|
||||||
merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
SearchDeserError::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
ErrorKind::Unexpected { msg: other.to_string() },
|
|
||||||
merge_location,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, PartialEq, Eq)]
|
||||||
pub struct SearchHit {
|
pub struct SearchHit {
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
@ -695,7 +610,7 @@ fn parse_filter(facets: &Value) -> Result<Option<Filter>, MeilisearchHttpError>
|
|||||||
Ok(condition)
|
Ok(condition)
|
||||||
}
|
}
|
||||||
Value::Array(arr) => parse_filter_array(arr),
|
Value::Array(arr) => parse_filter_array(arr),
|
||||||
v => Err(MeilisearchHttpError::InvalidExpression(&["Array"], v.clone())),
|
v => Err(MeilisearchHttpError::InvalidExpression(&["String", "Array"], v.clone())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -197,6 +197,76 @@ impl Index<'_> {
|
|||||||
self.service.patch_encoded(url, settings, self.encoder).await
|
self.service.patch_encoded(url, settings, self.encoder).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_displayed_attributes(
|
||||||
|
&self,
|
||||||
|
settings: Value,
|
||||||
|
) -> (Value, StatusCode) {
|
||||||
|
let url =
|
||||||
|
format!("/indexes/{}/settings/displayed-attributes", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.put_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_searchable_attributes(
|
||||||
|
&self,
|
||||||
|
settings: Value,
|
||||||
|
) -> (Value, StatusCode) {
|
||||||
|
let url =
|
||||||
|
format!("/indexes/{}/settings/searchable-attributes", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.put_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_filterable_attributes(
|
||||||
|
&self,
|
||||||
|
settings: Value,
|
||||||
|
) -> (Value, StatusCode) {
|
||||||
|
let url =
|
||||||
|
format!("/indexes/{}/settings/filterable-attributes", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.put_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_sortable_attributes(
|
||||||
|
&self,
|
||||||
|
settings: Value,
|
||||||
|
) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/indexes/{}/settings/sortable-attributes", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.put_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_ranking_rules(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/indexes/{}/settings/ranking-rules", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.put_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_stop_words(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/indexes/{}/settings/stop-words", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.put_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_synonyms(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/indexes/{}/settings/synonyms", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.put_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_distinct_attribute(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/indexes/{}/settings/distinct-attribute", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.put_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_typo_tolerance(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/indexes/{}/settings/typo-tolerance", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.patch_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_faceting(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/indexes/{}/settings/faceting", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.patch_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_settings_pagination(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
|
let url = format!("/indexes/{}/settings/pagination", urlencode(self.uid.as_ref()));
|
||||||
|
self.service.patch_encoded(url, settings, self.encoder).await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
||||||
self.service.delete(url).await
|
self.service.delete(url).await
|
||||||
|
@ -926,7 +926,7 @@ async fn error_primary_key_inference() {
|
|||||||
"indexedDocuments": 1
|
"indexedDocuments": 1
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"message": "The primary key inference process failed because the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.",
|
"message": "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.",
|
||||||
"code": "index_primary_key_no_candidate_found",
|
"code": "index_primary_key_no_candidate_found",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#index-primary-key-no-candidate-found"
|
"link": "https://docs.meilisearch.com/errors#index-primary-key-no-candidate-found"
|
||||||
@ -966,7 +966,7 @@ async fn error_primary_key_inference() {
|
|||||||
"indexedDocuments": 1
|
"indexedDocuments": 1
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"message": "The primary key inference process failed because the engine found 3 fields ending with `id` in their name, such as 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.",
|
"message": "The primary key inference failed as the engine found 3 fields ending with `id` in their names: 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.",
|
||||||
"code": "index_primary_key_multiple_candidates_found",
|
"code": "index_primary_key_multiple_candidates_found",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#index-primary-key-multiple-candidates-found"
|
"link": "https://docs.meilisearch.com/errors#index-primary-key-multiple-candidates-found"
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use meili_snap::*;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use super::DOCUMENTS;
|
use super::DOCUMENTS;
|
||||||
@ -37,104 +38,368 @@ async fn search_unexisting_parameter() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_invalid_crop_marker() {
|
async fn search_bad_q() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
// object
|
let (response, code) = index.search_post(json!({"q": ["doggo"]})).await;
|
||||||
let response = index.search_post(json!({"cropMarker": { "marker": "<crop>" }})).await;
|
snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
(
|
{
|
||||||
Object {
|
"message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.q`.",
|
||||||
"message": String("invalid type: Map `{\"marker\":\"<crop>\"}`, expected a String at `.cropMarker`."),
|
"code": "invalid_search_q",
|
||||||
"code": String("invalid_search_crop_marker"),
|
"type": "invalid_request",
|
||||||
"type": String("invalid_request"),
|
"link": "https://docs.meilisearch.com/errors#invalid-search-q"
|
||||||
"link": String("https://docs.meilisearch.com/errors#invalid-search-crop-marker"),
|
}
|
||||||
},
|
"###);
|
||||||
400,
|
// Can't make the `q` fail with a get search since it'll accept anything as a string.
|
||||||
)
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_offset() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"offset": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Integer at `.offset`.",
|
||||||
|
"code": "invalid_search_offset",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-offset"
|
||||||
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
// array
|
let (response, code) = index.search_get(json!({"offset": "doggo"})).await;
|
||||||
let response = index.search_post(json!({"cropMarker": ["marker", "<crop>"]})).await;
|
snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
(
|
{
|
||||||
Object {
|
"message": "invalid digit found in string at `.offset`.",
|
||||||
"message": String("invalid type: Sequence `[\"marker\",\"<crop>\"]`, expected a String at `.cropMarker`."),
|
"code": "invalid_search_offset",
|
||||||
"code": String("invalid_search_crop_marker"),
|
"type": "invalid_request",
|
||||||
"type": String("invalid_request"),
|
"link": "https://docs.meilisearch.com/errors#invalid-search-offset"
|
||||||
"link": String("https://docs.meilisearch.com/errors#invalid-search-crop-marker"),
|
}
|
||||||
},
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_invalid_highlight_pre_tag() {
|
async fn search_bad_limit() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
// object
|
let (response, code) = index.search_post(json!({"limit": "doggo"})).await;
|
||||||
let response = index.search_post(json!({"highlightPreTag": { "marker": "<em>" }})).await;
|
snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
(
|
{
|
||||||
Object {
|
"message": "invalid type: String `\"doggo\"`, expected a Integer at `.limit`.",
|
||||||
"message": String("invalid type: Map `{\"marker\":\"<em>\"}`, expected a String at `.highlightPreTag`."),
|
"code": "invalid_search_limit",
|
||||||
"code": String("invalid_search_highlight_pre_tag"),
|
"type": "invalid_request",
|
||||||
"type": String("invalid_request"),
|
"link": "https://docs.meilisearch.com/errors#invalid-search-limit"
|
||||||
"link": String("https://docs.meilisearch.com/errors#invalid-search-highlight-pre-tag"),
|
}
|
||||||
},
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
// array
|
let (response, code) = index.search_get(json!({"limit": "doggo"})).await;
|
||||||
let response = index.search_post(json!({"highlightPreTag": ["marker", "<em>"]})).await;
|
snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
(
|
{
|
||||||
Object {
|
"message": "invalid digit found in string at `.limit`.",
|
||||||
"message": String("invalid type: Sequence `[\"marker\",\"<em>\"]`, expected a String at `.highlightPreTag`."),
|
"code": "invalid_search_limit",
|
||||||
"code": String("invalid_search_highlight_pre_tag"),
|
"type": "invalid_request",
|
||||||
"type": String("invalid_request"),
|
"link": "https://docs.meilisearch.com/errors#invalid-search-limit"
|
||||||
"link": String("https://docs.meilisearch.com/errors#invalid-search-highlight-pre-tag"),
|
}
|
||||||
},
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_invalid_highlight_post_tag() {
|
async fn search_bad_page() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
// object
|
let (response, code) = index.search_post(json!({"page": "doggo"})).await;
|
||||||
let response = index.search_post(json!({"highlightPostTag": { "marker": "</em>" }})).await;
|
snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
(
|
{
|
||||||
Object {
|
"message": "invalid type: String `\"doggo\"`, expected a Integer at `.page`.",
|
||||||
"message": String("invalid type: Map `{\"marker\":\"</em>\"}`, expected a String at `.highlightPostTag`."),
|
"code": "invalid_search_page",
|
||||||
"code": String("invalid_search_highlight_post_tag"),
|
"type": "invalid_request",
|
||||||
"type": String("invalid_request"),
|
"link": "https://docs.meilisearch.com/errors#invalid-search-page"
|
||||||
"link": String("https://docs.meilisearch.com/errors#invalid-search-highlight-post-tag"),
|
}
|
||||||
},
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
// array
|
let (response, code) = index.search_get(json!({"page": "doggo"})).await;
|
||||||
let response = index.search_post(json!({"highlightPostTag": ["marker", "</em>"]})).await;
|
snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
(
|
{
|
||||||
Object {
|
"message": "invalid digit found in string at `.page`.",
|
||||||
"message": String("invalid type: Sequence `[\"marker\",\"</em>\"]`, expected a String at `.highlightPostTag`."),
|
"code": "invalid_search_page",
|
||||||
"code": String("invalid_search_highlight_post_tag"),
|
"type": "invalid_request",
|
||||||
"type": String("invalid_request"),
|
"link": "https://docs.meilisearch.com/errors#invalid-search-page"
|
||||||
"link": String("https://docs.meilisearch.com/errors#invalid-search-highlight-post-tag"),
|
}
|
||||||
},
|
"###);
|
||||||
400,
|
}
|
||||||
)
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_hits_per_page() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"hitsPerPage": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Integer at `.hitsPerPage`.",
|
||||||
|
"code": "invalid_search_hits_per_page",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-hits-per-page"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.search_get(json!({"hitsPerPage": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.hitsPerPage`.",
|
||||||
|
"code": "invalid_search_hits_per_page",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-hits-per-page"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_attributes_to_crop() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"attributesToCrop": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.attributesToCrop`.",
|
||||||
|
"code": "invalid_search_attributes_to_crop",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-attributes-to-crop"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `attributes_to_crop` fail with a get search since it'll accept anything as an array of strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_crop_length() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"cropLength": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Integer at `.cropLength`.",
|
||||||
|
"code": "invalid_search_crop_length",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-crop-length"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.search_get(json!({"cropLength": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.cropLength`.",
|
||||||
|
"code": "invalid_search_crop_length",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-crop-length"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_attributes_to_highlight() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"attributesToHighlight": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.attributesToHighlight`.",
|
||||||
|
"code": "invalid_search_attributes_to_highlight",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-attributes-to-highlight"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `attributes_to_highlight` fail with a get search since it'll accept anything as an array of strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_filter() {
|
||||||
|
// Since a filter is deserialized as a json Value it will never fail to deserialize.
|
||||||
|
// Thus the error message is not generated by deserr but written by us.
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
// Also, to trigger the error message we need to effectively create the index or else it'll throw an
|
||||||
|
// index does not exists error.
|
||||||
|
let (_, code) = index.create(None).await;
|
||||||
|
server.wait_task(0).await;
|
||||||
|
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({ "filter": true })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid syntax for the filter parameter: `expected String, Array, found: true`.",
|
||||||
|
"code": "invalid_search_filter",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `filter` fail with a get search since it'll accept anything as a strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_sort() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"sort": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.sort`.",
|
||||||
|
"code": "invalid_search_sort",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `sort` fail with a get search since it'll accept anything as a strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_show_matches_position() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"showMatchesPosition": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Boolean at `.showMatchesPosition`.",
|
||||||
|
"code": "invalid_search_show_matches_position",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.search_get(json!({"showMatchesPosition": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "provided string was not `true` or `false` at `.showMatchesPosition`.",
|
||||||
|
"code": "invalid_search_show_matches_position",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_facets() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"facets": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.facets`.",
|
||||||
|
"code": "invalid_search_facets",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-facets"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `attributes_to_highlight` fail with a get search since it'll accept anything as an array of strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_highlight_pre_tag() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"highlightPreTag": ["doggo"]})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.highlightPreTag`.",
|
||||||
|
"code": "invalid_search_highlight_pre_tag",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-highlight-pre-tag"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `highlight_pre_tag` fail with a get search since it'll accept anything as a strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_highlight_post_tag() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"highlightPostTag": ["doggo"]})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.highlightPostTag`.",
|
||||||
|
"code": "invalid_search_highlight_post_tag",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-highlight-post-tag"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `highlight_post_tag` fail with a get search since it'll accept anything as a strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_crop_marker() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"cropMarker": ["doggo"]})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.cropMarker`.",
|
||||||
|
"code": "invalid_search_crop_marker",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-crop-marker"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `crop_marker` fail with a get search since it'll accept anything as a strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_matching_strategy() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"matchingStrategy": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Json deserialize error: unknown value `doggo`, expected one of `last`, `all` at `.matchingStrategy`.",
|
||||||
|
"code": "invalid_search_matching_strategy",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.search_get(json!({"matchingStrategy": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Json deserialize error: unknown value `doggo`, expected one of `last`, `all` at `.matchingStrategy`.",
|
||||||
|
"code": "invalid_search_matching_strategy",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy"
|
||||||
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -151,9 +416,9 @@ async fn filter_invalid_syntax_object() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": "title & Glass"}), |response, code| {
|
.search(json!({"filter": "title & Glass"}), |response, code| {
|
||||||
@ -176,9 +441,9 @@ async fn filter_invalid_syntax_array() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": ["title & Glass"]}), |response, code| {
|
.search(json!({"filter": ["title & Glass"]}), |response, code| {
|
||||||
@ -201,9 +466,9 @@ async fn filter_invalid_syntax_string() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass",
|
"message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": "title = Glass XOR title = Glass"}), |response, code| {
|
.search(json!({"filter": "title = Glass XOR title = Glass"}), |response, code| {
|
||||||
@ -226,9 +491,9 @@ async fn filter_invalid_attribute_array() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
|
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": ["many = Glass"]}), |response, code| {
|
.search(json!({"filter": ["many = Glass"]}), |response, code| {
|
||||||
@ -251,9 +516,9 @@ async fn filter_invalid_attribute_string() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
|
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": "many = Glass"}), |response, code| {
|
.search(json!({"filter": "many = Glass"}), |response, code| {
|
||||||
@ -276,9 +541,9 @@ async fn filter_reserved_geo_attribute_array() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
|
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": ["_geo = Glass"]}), |response, code| {
|
.search(json!({"filter": ["_geo = Glass"]}), |response, code| {
|
||||||
@ -301,9 +566,9 @@ async fn filter_reserved_geo_attribute_string() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
|
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": "_geo = Glass"}), |response, code| {
|
.search(json!({"filter": "_geo = Glass"}), |response, code| {
|
||||||
@ -326,9 +591,9 @@ async fn filter_reserved_attribute_array() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
|
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": ["_geoDistance = Glass"]}), |response, code| {
|
.search(json!({"filter": ["_geoDistance = Glass"]}), |response, code| {
|
||||||
@ -351,9 +616,9 @@ async fn filter_reserved_attribute_string() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
|
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
|
||||||
"code": "invalid_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-filter"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(json!({"filter": "_geoDistance = Glass"}), |response, code| {
|
.search(json!({"filter": "_geoDistance = Glass"}), |response, code| {
|
||||||
@ -376,9 +641,9 @@ async fn sort_geo_reserved_attribute() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
|
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
|
||||||
"code": "invalid_sort",
|
"code": "invalid_search_sort",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-sort"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@ -406,9 +671,9 @@ async fn sort_reserved_attribute() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
|
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
|
||||||
"code": "invalid_sort",
|
"code": "invalid_search_sort",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-sort"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@ -436,9 +701,9 @@ async fn sort_unsortable_attribute() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.",
|
"message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.",
|
||||||
"code": "invalid_sort",
|
"code": "invalid_search_sort",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-sort"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@ -466,9 +731,9 @@ async fn sort_invalid_syntax() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
|
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
|
||||||
"code": "invalid_sort",
|
"code": "invalid_search_sort",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-sort"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@ -500,9 +765,9 @@ async fn sort_unset_ranking_rule() {
|
|||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "The sort ranking rule must be specified in the ranking rules settings to use the sort parameter at search time.",
|
"message": "The sort ranking rule must be specified in the ranking rules settings to use the sort parameter at search time.",
|
||||||
"code": "invalid_sort",
|
"code": "invalid_search_sort",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-sort"
|
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
|
@ -200,11 +200,14 @@ async fn search_with_filter_string_notation() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
|
meili_snap::snapshot!(code, @"202 Accepted");
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
let (_, code) = index.add_documents(documents, None).await;
|
||||||
index.wait_task(1).await;
|
meili_snap::snapshot!(code, @"202 Accepted");
|
||||||
|
let res = index.wait_task(1).await;
|
||||||
|
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@ -220,11 +223,15 @@ async fn search_with_filter_string_notation() {
|
|||||||
|
|
||||||
let index = server.index("nested");
|
let index = server.index("nested");
|
||||||
|
|
||||||
|
let (_, code) =
|
||||||
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
|
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
|
||||||
|
meili_snap::snapshot!(code, @"202 Accepted");
|
||||||
|
|
||||||
let documents = NESTED_DOCUMENTS.clone();
|
let documents = NESTED_DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
let (_, code) = index.add_documents(documents, None).await;
|
||||||
index.wait_task(3).await;
|
meili_snap::snapshot!(code, @"202 Accepted");
|
||||||
|
let res = index.wait_task(3).await;
|
||||||
|
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
|
312
meilisearch/tests/settings/errors.rs
Normal file
312
meilisearch/tests/settings/errors.rs
Normal file
@ -0,0 +1,312 @@
|
|||||||
|
use meili_snap::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_displayed_attributes() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "displayedAttributes": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.displayedAttributes`.",
|
||||||
|
"code": "invalid_settings_displayed_attributes",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_displayed_attributes(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
|
||||||
|
"code": "invalid_settings_displayed_attributes",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_searchable_attributes() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "searchableAttributes": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.searchableAttributes`.",
|
||||||
|
"code": "invalid_settings_searchable_attributes",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_searchable_attributes(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
|
||||||
|
"code": "invalid_settings_searchable_attributes",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_filterable_attributes() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "filterableAttributes": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.filterableAttributes`.",
|
||||||
|
"code": "invalid_settings_filterable_attributes",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_filterable_attributes(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
|
||||||
|
"code": "invalid_settings_filterable_attributes",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_sortable_attributes() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "sortableAttributes": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.sortableAttributes`.",
|
||||||
|
"code": "invalid_settings_sortable_attributes",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_sortable_attributes(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
|
||||||
|
"code": "invalid_settings_sortable_attributes",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_ranking_rules() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "rankingRules": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.rankingRules`.",
|
||||||
|
"code": "invalid_settings_ranking_rules",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_ranking_rules(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
|
||||||
|
"code": "invalid_settings_ranking_rules",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_stop_words() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "stopWords": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at `.stopWords`.",
|
||||||
|
"code": "invalid_settings_stop_words",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_stop_words(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
|
||||||
|
"code": "invalid_settings_stop_words",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_synonyms() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "synonyms": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Map at `.synonyms`.",
|
||||||
|
"code": "invalid_settings_synonyms",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_synonyms(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Map at ``.",
|
||||||
|
"code": "invalid_settings_synonyms",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_distinct_attribute() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "distinctAttribute": ["doggo"] })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.distinctAttribute`.",
|
||||||
|
"code": "invalid_settings_distinct_attribute",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_distinct_attribute(json!(["doggo"])).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: Sequence `[\"doggo\"]`, expected a String at ``.",
|
||||||
|
"code": "invalid_settings_distinct_attribute",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_typo_tolerance() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "typoTolerance": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Map at `.typoTolerance`.",
|
||||||
|
"code": "invalid_settings_typo_tolerance",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_typo_tolerance(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Map at ``.",
|
||||||
|
"code": "invalid_settings_typo_tolerance",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_faceting() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "faceting": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Map at `.faceting`.",
|
||||||
|
"code": "invalid_settings_faceting",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-faceting"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_faceting(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Map at ``.",
|
||||||
|
"code": "invalid_settings_faceting",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-faceting"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn settings_bad_pagination() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings(json!({ "pagination": "doggo" })).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Map at `.pagination`.",
|
||||||
|
"code": "invalid_settings_pagination",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-pagination"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.update_settings_pagination(json!("doggo")).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid type: String `\"doggo\"`, expected a Map at ``.",
|
||||||
|
"code": "invalid_settings_pagination",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-pagination"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
@ -179,15 +179,15 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test##! ");
|
let index = server.index("test##! ");
|
||||||
let (response, code) = index.update_settings(json!({})).await;
|
let (response, code) = index.update_settings(json!({})).await;
|
||||||
assert_eq!(code, 400);
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
let expected = json!({
|
{
|
||||||
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||||
"code": "invalid_index_uid",
|
"code": "invalid_index_uid",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"});
|
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
|
||||||
|
}
|
||||||
assert_eq!(response, expected);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! test_setting_routes {
|
macro_rules! test_setting_routes {
|
||||||
@ -278,22 +278,16 @@ async fn error_set_invalid_ranking_rules() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
|
|
||||||
let (_response, _code) =
|
let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
|
||||||
index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
index.wait_task(1).await;
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
let (response, code) = index.get_task(1).await;
|
{
|
||||||
|
"message": "`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules. at `.rankingRules[0]`.",
|
||||||
assert_eq!(code, 200);
|
"code": "invalid_settings_ranking_rules",
|
||||||
assert_eq!(response["status"], "failed");
|
|
||||||
|
|
||||||
let expected_error = json!({
|
|
||||||
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules."#,
|
|
||||||
"code": "invalid_ranking_rule",
|
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-ranking-rule"
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
|
||||||
});
|
}
|
||||||
|
"###);
|
||||||
assert_eq!(response["error"], expected_error);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
mod distinct;
|
mod distinct;
|
||||||
|
mod errors;
|
||||||
mod get_settings;
|
mod get_settings;
|
||||||
|
498
meilisearch/tests/tasks/errors.rs
Normal file
498
meilisearch/tests/tasks/errors.rs
Normal file
@ -0,0 +1,498 @@
|
|||||||
|
use meili_snap::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_uids() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"uids": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.uids`.",
|
||||||
|
"code": "invalid_task_uids",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"uids": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.uids`.",
|
||||||
|
"code": "invalid_task_uids",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"uids": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.uids`.",
|
||||||
|
"code": "invalid_task_uids",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_canceled_by() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"canceledBy": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.canceledBy`.",
|
||||||
|
"code": "invalid_task_canceled_by",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"canceledBy": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.canceledBy`.",
|
||||||
|
"code": "invalid_task_canceled_by",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"canceledBy": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.canceledBy`.",
|
||||||
|
"code": "invalid_task_canceled_by",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_types() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"types": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`.",
|
||||||
|
"code": "invalid_task_types",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-types"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"types": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`.",
|
||||||
|
"code": "invalid_task_types",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-types"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"types": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`.",
|
||||||
|
"code": "invalid_task_types",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-types"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_statuses() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"statuses": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`.",
|
||||||
|
"code": "invalid_task_statuses",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-statuses"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"statuses": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`.",
|
||||||
|
"code": "invalid_task_statuses",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-statuses"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"statuses": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`.",
|
||||||
|
"code": "invalid_task_statuses",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-statuses"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_index_uids() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"indexUids": "the good doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.",
|
||||||
|
"code": "invalid_index_uid",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"indexUids": "the good doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.",
|
||||||
|
"code": "invalid_index_uid",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"indexUids": "the good doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.",
|
||||||
|
"code": "invalid_index_uid",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_limit() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"limit": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.limit`.",
|
||||||
|
"code": "invalid_task_limit",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-limit"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"limit": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Json deserialize error: unknown field `limit`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.",
|
||||||
|
"code": "bad_request",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#bad-request"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"limit": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Json deserialize error: unknown field `limit`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.",
|
||||||
|
"code": "bad_request",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#bad-request"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_from() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"from": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "invalid digit found in string at `.from`.",
|
||||||
|
"code": "invalid_task_from",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-from"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"from": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Json deserialize error: unknown field `from`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.",
|
||||||
|
"code": "bad_request",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#bad-request"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"from": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Json deserialize error: unknown field `from`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.",
|
||||||
|
"code": "bad_request",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#bad-request"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_after_enqueued_at() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"afterEnqueuedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`.",
|
||||||
|
"code": "invalid_task_after_enqueued_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"afterEnqueuedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`.",
|
||||||
|
"code": "invalid_task_after_enqueued_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"afterEnqueuedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`.",
|
||||||
|
"code": "invalid_task_after_enqueued_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_before_enqueued_at() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"beforeEnqueuedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`.",
|
||||||
|
"code": "invalid_task_before_enqueued_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"beforeEnqueuedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`.",
|
||||||
|
"code": "invalid_task_before_enqueued_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"beforeEnqueuedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`.",
|
||||||
|
"code": "invalid_task_before_enqueued_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_after_started_at() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"afterStartedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`.",
|
||||||
|
"code": "invalid_task_after_started_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"afterStartedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`.",
|
||||||
|
"code": "invalid_task_after_started_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"afterStartedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`.",
|
||||||
|
"code": "invalid_task_after_started_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_before_started_at() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"beforeStartedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.",
|
||||||
|
"code": "invalid_task_before_started_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"beforeStartedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.",
|
||||||
|
"code": "invalid_task_before_started_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"beforeStartedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.",
|
||||||
|
"code": "invalid_task_before_started_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_after_finished_at() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"afterFinishedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`.",
|
||||||
|
"code": "invalid_task_after_finished_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"afterFinishedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`.",
|
||||||
|
"code": "invalid_task_after_finished_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"afterFinishedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`.",
|
||||||
|
"code": "invalid_task_after_finished_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn task_bad_before_finished_at() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
|
||||||
|
let (response, code) = server.tasks_filter(json!({"beforeFinishedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`.",
|
||||||
|
"code": "invalid_task_before_finished_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.cancel_tasks(json!({"beforeFinishedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`.",
|
||||||
|
"code": "invalid_task_before_finished_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = server.delete_tasks(json!({"beforeFinishedAt": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`.",
|
||||||
|
"code": "invalid_task_before_finished_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
@ -1,4 +1,6 @@
|
|||||||
use meili_snap::insta::{self, assert_json_snapshot};
|
mod errors;
|
||||||
|
|
||||||
|
use meili_snap::insta::assert_json_snapshot;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
@ -179,9 +181,9 @@ async fn get_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.tasks_filter(json!( { "lol": "pied" })).await;
|
let (response, code) = server.tasks_filter(json!( { "lol": "pied" })).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Query deserialize error: unknown field `lol`",
|
"message": "Json deserialize error: unknown field `lol`, expected one of `limit`, `from`, `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.",
|
||||||
"code": "bad_request",
|
"code": "bad_request",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#bad-request"
|
"link": "https://docs.meilisearch.com/errors#bad-request"
|
||||||
@ -190,9 +192,9 @@ async fn get_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.tasks_filter(json!( { "uids": "pied" })).await;
|
let (response, code) = server.tasks_filter(json!( { "uids": "pied" })).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
|
"message": "invalid digit found in string at `.uids`.",
|
||||||
"code": "invalid_task_uids",
|
"code": "invalid_task_uids",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||||
@ -201,20 +203,20 @@ async fn get_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.tasks_filter(json!( { "from": "pied" })).await;
|
let (response, code) = server.tasks_filter(json!( { "from": "pied" })).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Query deserialize error: invalid digit found in string",
|
"message": "invalid digit found in string at `.from`.",
|
||||||
"code": "bad_request",
|
"code": "invalid_task_from",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#bad-request"
|
"link": "https://docs.meilisearch.com/errors#invalid-task-from"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = server.tasks_filter(json!( { "beforeStartedAt": "pied" })).await;
|
let (response, code) = server.tasks_filter(json!( { "beforeStartedAt": "pied" })).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Task `beforeStartedAt` `pied` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
"message": "`pied` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.",
|
||||||
"code": "invalid_task_before_started_at",
|
"code": "invalid_task_before_started_at",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
|
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
|
||||||
@ -228,7 +230,7 @@ async fn delete_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.delete_tasks(json!(null)).await;
|
let (response, code) = server.delete_tasks(json!(null)).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
|
"message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
|
||||||
"code": "missing_task_filters",
|
"code": "missing_task_filters",
|
||||||
@ -239,9 +241,9 @@ async fn delete_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.delete_tasks(json!({ "lol": "pied" })).await;
|
let (response, code) = server.delete_tasks(json!({ "lol": "pied" })).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Query deserialize error: unknown field `lol`",
|
"message": "Json deserialize error: unknown field `lol`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.",
|
||||||
"code": "bad_request",
|
"code": "bad_request",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#bad-request"
|
"link": "https://docs.meilisearch.com/errors#bad-request"
|
||||||
@ -250,9 +252,9 @@ async fn delete_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.delete_tasks(json!({ "uids": "pied" })).await;
|
let (response, code) = server.delete_tasks(json!({ "uids": "pied" })).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
|
"message": "invalid digit found in string at `.uids`.",
|
||||||
"code": "invalid_task_uids",
|
"code": "invalid_task_uids",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||||
@ -266,7 +268,7 @@ async fn cancel_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.cancel_tasks(json!(null)).await;
|
let (response, code) = server.cancel_tasks(json!(null)).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
|
"message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
|
||||||
"code": "missing_task_filters",
|
"code": "missing_task_filters",
|
||||||
@ -277,9 +279,9 @@ async fn cancel_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.cancel_tasks(json!({ "lol": "pied" })).await;
|
let (response, code) = server.cancel_tasks(json!({ "lol": "pied" })).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Query deserialize error: unknown field `lol`",
|
"message": "Json deserialize error: unknown field `lol`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.",
|
||||||
"code": "bad_request",
|
"code": "bad_request",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#bad-request"
|
"link": "https://docs.meilisearch.com/errors#bad-request"
|
||||||
@ -288,9 +290,9 @@ async fn cancel_task_filter_error() {
|
|||||||
|
|
||||||
let (response, code) = server.cancel_tasks(json!({ "uids": "pied" })).await;
|
let (response, code) = server.cancel_tasks(json!({ "uids": "pied" })).await;
|
||||||
assert_eq!(code, 400, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
insta::assert_json_snapshot!(response, @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
|
"message": "invalid digit found in string at `.uids`.",
|
||||||
"code": "invalid_task_uids",
|
"code": "invalid_task_uids",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||||
@ -517,46 +519,26 @@ async fn test_summarized_settings_update() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
// here we should find my payload even in the failed task.
|
// here we should find my payload even in the failed task.
|
||||||
index.update_settings(json!({ "rankingRules": ["custom"] })).await;
|
let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await;
|
||||||
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "`custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules. at `.rankingRules[0]`.",
|
||||||
|
"code": "invalid_settings_ranking_rules",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
let (task, _) = index.get_task(0).await;
|
let (task, _) = index.get_task(0).await;
|
||||||
dbg!(&task);
|
|
||||||
assert_json_snapshot!(task,
|
assert_json_snapshot!(task,
|
||||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||||
@r###"
|
@r###"
|
||||||
{
|
{
|
||||||
"uid": 0,
|
"uid": 0,
|
||||||
"indexUid": "test",
|
"indexUid": "test",
|
||||||
"status": "failed",
|
|
||||||
"type": "settingsUpdate",
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"rankingRules": [
|
|
||||||
"custom"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"error": {
|
|
||||||
"message": "`custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
|
|
||||||
"code": "invalid_ranking_rule",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-ranking-rule"
|
|
||||||
},
|
|
||||||
"duration": "[duration]",
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
|
|
||||||
index.wait_task(1).await;
|
|
||||||
let (task, _) = index.get_task(1).await;
|
|
||||||
assert_json_snapshot!(task,
|
|
||||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
|
||||||
@r###"
|
|
||||||
{
|
|
||||||
"uid": 1,
|
|
||||||
"indexUid": "test",
|
|
||||||
"status": "succeeded",
|
"status": "succeeded",
|
||||||
"type": "settingsUpdate",
|
"type": "settingsUpdate",
|
||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
|
Loading…
Reference in New Issue
Block a user