4786: Update dependencies r=Kerollmops a=irevoire

# Pull Request

## Related issue
Fixes #4753

## What does this PR do?
- Update all dependencies except rustls
- [x] Release charabia
- [x] Update charabia
- [x] Double check that the docker build works after updating charabia



Co-authored-by: Tamo <tamo@meilisearch.com>
Co-authored-by: Clément Renault <clement@meilisearch.com>
This commit is contained in:
meili-bors[bot] 2024-07-10 13:23:54 +00:00 committed by GitHub
commit 2099b4f0dd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
49 changed files with 1665 additions and 1442 deletions

View File

@ -167,7 +167,7 @@ jobs:
- uses: helix-editor/rust-toolchain@v1 - uses: helix-editor/rust-toolchain@v1
with: with:
profile: minimal profile: minimal
toolchain: nightly-2024-06-25 toolchain: nightly-2024-07-09
override: true override: true
components: rustfmt components: rustfmt
- name: Cache dependencies - name: Cache dependencies

View File

@ -52,6 +52,16 @@ cargo test
This command will be triggered to each PR as a requirement for merging it. This command will be triggered to each PR as a requirement for merging it.
#### Faster build
You can set the `LINDERA_CACHE` environment variable to speed up your successive builds by up to 2 minutes.
It'll store some built artifacts in the directory of your choice.
We recommend using the standard `$HOME/.cache/lindera` directory:
```sh
export LINDERA_CACHE=$HOME/.cache/lindera
```
#### Snapshot-based tests #### Snapshot-based tests
We are using [insta](https://insta.rs) to perform snapshot-based testing. We are using [insta](https://insta.rs) to perform snapshot-based testing.
@ -63,7 +73,7 @@ Furthermore, we provide some macros on top of insta, notably a way to use snapsh
To effectively debug snapshot-based hashes, we recommend you export the `MEILI_TEST_FULL_SNAPS` environment variable so that snapshot are fully created locally: To effectively debug snapshot-based hashes, we recommend you export the `MEILI_TEST_FULL_SNAPS` environment variable so that snapshot are fully created locally:
``` ```sh
export MEILI_TEST_FULL_SNAPS=true # add this to your .bashrc, .zshrc, ... export MEILI_TEST_FULL_SNAPS=true # add this to your .bashrc, .zshrc, ...
``` ```

2317
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
# Compile # Compile
FROM rust:1.75.0-alpine3.18 AS compiler FROM rust:1.79.0-alpine3.20 AS compiler
RUN apk add -q --no-cache build-base openssl-dev RUN apk add -q --no-cache build-base openssl-dev
@ -20,7 +20,7 @@ RUN set -eux; \
cargo build --release -p meilisearch -p meilitool cargo build --release -p meilisearch -p meilitool
# Run # Run
FROM alpine:3.16 FROM alpine:3.20
ENV MEILI_HTTP_ADDR 0.0.0.0:7700 ENV MEILI_HTTP_ADDR 0.0.0.0:7700
ENV MEILI_SERVER_PROVIDER docker ENV MEILI_SERVER_PROVIDER docker

View File

@ -11,24 +11,24 @@ edition.workspace = true
license.workspace = true license.workspace = true
[dependencies] [dependencies]
anyhow = "1.0.79" anyhow = "1.0.86"
csv = "1.3.0" csv = "1.3.0"
milli = { path = "../milli" } milli = { path = "../milli" }
mimalloc = { version = "0.1.39", default-features = false } mimalloc = { version = "0.1.43", default-features = false }
serde_json = { version = "1.0.111", features = ["preserve_order"] } serde_json = { version = "1.0.120", features = ["preserve_order"] }
[dev-dependencies] [dev-dependencies]
criterion = { version = "0.5.1", features = ["html_reports"] } criterion = { version = "0.5.1", features = ["html_reports"] }
rand = "0.8.5" rand = "0.8.5"
rand_chacha = "0.3.1" rand_chacha = "0.3.1"
roaring = "0.10.2" roaring = "0.10.6"
[build-dependencies] [build-dependencies]
anyhow = "1.0.79" anyhow = "1.0.86"
bytes = "1.5.0" bytes = "1.6.0"
convert_case = "0.6.0" convert_case = "0.6.0"
flate2 = "1.0.28" flate2 = "1.0.30"
reqwest = { version = "0.11.23", features = ["blocking", "rustls-tls"], default-features = false } reqwest = { version = "0.12.5", features = ["blocking", "rustls-tls"], default-features = false }
[features] [features]
default = ["milli/all-tokenizations"] default = ["milli/all-tokenizations"]

View File

@ -11,8 +11,8 @@ license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
time = { version = "0.3.34", features = ["parsing"] } time = { version = "0.3.36", features = ["parsing"] }
[build-dependencies] [build-dependencies]
anyhow = "1.0.80" anyhow = "1.0.86"
vergen-git2 = "1.0.0-beta.2" vergen-git2 = "1.0.0"

View File

@ -11,22 +11,21 @@ readme.workspace = true
license.workspace = true license.workspace = true
[dependencies] [dependencies]
anyhow = "1.0.79" anyhow = "1.0.86"
flate2 = "1.0.28" flate2 = "1.0.30"
http = "0.2.11" http = "1.1.0"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" } meilisearch-types = { path = "../meilisearch-types" }
once_cell = "1.19.0" once_cell = "1.19.0"
regex = "1.10.2" regex = "1.10.5"
roaring = { version = "0.10.2", features = ["serde"] } roaring = { version = "0.10.6", features = ["serde"] }
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.204", features = ["derive"] }
serde_json = { version = "1.0.111", features = ["preserve_order"] } serde_json = { version = "1.0.120", features = ["preserve_order"] }
tar = "0.4.40" tar = "0.4.41"
tempfile = "3.9.0" tempfile = "3.10.1"
thiserror = "1.0.56" thiserror = "1.0.61"
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] } time = { version = "0.3.36", features = ["serde-well-known", "formatting", "parsing", "macros"] }
tracing = "0.1.40" tracing = "0.1.40"
uuid = { version = "1.6.1", features = ["serde", "v4"] } uuid = { version = "1.10.0", features = ["serde", "v4"] }
[dev-dependencies] [dev-dependencies]
big_s = "1.0.2" big_s = "1.0.2"

View File

@ -425,7 +425,7 @@ pub(crate) mod test {
let mut dump = v2::V2Reader::open(dir).unwrap().to_v3(); let mut dump = v2::V2Reader::open(dir).unwrap().to_v3();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
// tasks // tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();

View File

@ -358,7 +358,7 @@ pub(crate) mod test {
let mut dump = v3::V3Reader::open(dir).unwrap().to_v4(); let mut dump = v3::V3Reader::open(dir).unwrap().to_v4();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
// tasks // tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();

View File

@ -394,8 +394,8 @@ pub(crate) mod test {
let mut dump = v4::V4Reader::open(dir).unwrap().to_v5(); let mut dump = v4::V4Reader::open(dir).unwrap().to_v5();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d"); insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
// tasks // tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();

View File

@ -442,8 +442,8 @@ pub(crate) mod test {
let mut dump = v5::V5Reader::open(dir).unwrap().to_v6(); let mut dump = v5::V5Reader::open(dir).unwrap().to_v6();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d"); insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
// tasks // tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();

View File

@ -216,7 +216,7 @@ pub(crate) mod test {
let mut dump = DumpReader::open(dump).unwrap(); let mut dump = DumpReader::open(dump).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2024-05-16 15:51:34.151044 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2024-05-16 15:51:34.151044 +00:00:00");
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None"); insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
// tasks // tasks
@ -337,7 +337,7 @@ pub(crate) mod test {
let mut dump = DumpReader::open(dump).unwrap(); let mut dump = DumpReader::open(dump).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-07-06 7:10:27.21958 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2023-07-06 7:10:27.21958 +00:00:00");
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None"); insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
// tasks // tasks
@ -383,8 +383,8 @@ pub(crate) mod test {
let mut dump = DumpReader::open(dump).unwrap(); let mut dump = DumpReader::open(dump).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d"); insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
// tasks // tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
@ -463,8 +463,8 @@ pub(crate) mod test {
let mut dump = DumpReader::open(dump).unwrap(); let mut dump = DumpReader::open(dump).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d"); insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
// tasks // tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
@ -540,7 +540,7 @@ pub(crate) mod test {
let mut dump = DumpReader::open(dump).unwrap(); let mut dump = DumpReader::open(dump).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
assert_eq!(dump.instance_uid().unwrap(), None); assert_eq!(dump.instance_uid().unwrap(), None);
// tasks // tasks
@ -633,7 +633,7 @@ pub(crate) mod test {
let mut dump = DumpReader::open(dump).unwrap(); let mut dump = DumpReader::open(dump).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
assert_eq!(dump.instance_uid().unwrap(), None); assert_eq!(dump.instance_uid().unwrap(), None);
// tasks // tasks
@ -726,7 +726,7 @@ pub(crate) mod test {
let mut dump = DumpReader::open(dump).unwrap(); let mut dump = DumpReader::open(dump).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
assert_eq!(dump.instance_uid().unwrap(), None); assert_eq!(dump.instance_uid().unwrap(), None);
// tasks // tasks

View File

@ -252,7 +252,7 @@ pub(crate) mod test {
let mut dump = V2Reader::open(dir).unwrap(); let mut dump = V2Reader::open(dir).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
// tasks // tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
@ -349,7 +349,7 @@ pub(crate) mod test {
let mut dump = V2Reader::open(dir).unwrap(); let mut dump = V2Reader::open(dir).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
// tasks // tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();

View File

@ -267,7 +267,7 @@ pub(crate) mod test {
let mut dump = V3Reader::open(dir).unwrap(); let mut dump = V3Reader::open(dir).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
// tasks // tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();

View File

@ -262,8 +262,8 @@ pub(crate) mod test {
let mut dump = V4Reader::open(dir).unwrap(); let mut dump = V4Reader::open(dir).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d"); insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
// tasks // tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();

View File

@ -299,8 +299,8 @@ pub(crate) mod test {
let mut dump = V5Reader::open(dir).unwrap(); let mut dump = V5Reader::open(dir).unwrap();
// top level infos // top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00"); insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d"); insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
// tasks // tasks
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap(); let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();

View File

@ -281,7 +281,7 @@ pub(crate) mod test {
let dump_path = dump.path(); let dump_path = dump.path();
// ==== checking global file hierarchy (we want to be sure there isn't too many files or too few) // ==== checking global file hierarchy (we want to be sure there isn't too many files or too few)
insta::assert_display_snapshot!(create_directory_hierarchy(dump_path), @r###" insta::assert_snapshot!(create_directory_hierarchy(dump_path), @r###"
. .
---- indexes/ ---- indexes/
---- doggos/ ---- doggos/

View File

@ -11,10 +11,7 @@ edition.workspace = true
license.workspace = true license.workspace = true
[dependencies] [dependencies]
tempfile = "3.9.0" tempfile = "3.10.1"
thiserror = "1.0.56" thiserror = "1.0.61"
tracing = "0.1.40" tracing = "0.1.40"
uuid = { version = "1.6.1", features = ["serde", "v4"] } uuid = { version = "1.10.0", features = ["serde", "v4"] }
[dev-dependencies]
faux = "0.1.10"

View File

@ -14,7 +14,7 @@ license.workspace = true
[dependencies] [dependencies]
nom = "7.1.3" nom = "7.1.3"
nom_locate = "4.2.0" nom_locate = "4.2.0"
unescaper = "0.1.3" unescaper = "0.1.5"
[dev-dependencies] [dev-dependencies]
insta = "1.34.0" insta = "1.39.0"

View File

@ -564,121 +564,121 @@ pub mod tests {
#[test] #[test]
fn parse_escaped() { fn parse_escaped() {
insta::assert_display_snapshot!(p(r"title = 'foo\\'"), @r#"{title} = {foo\}"#); insta::assert_snapshot!(p(r"title = 'foo\\'"), @r#"{title} = {foo\}"#);
insta::assert_display_snapshot!(p(r"title = 'foo\\\\'"), @r#"{title} = {foo\\}"#); insta::assert_snapshot!(p(r"title = 'foo\\\\'"), @r#"{title} = {foo\\}"#);
insta::assert_display_snapshot!(p(r"title = 'foo\\\\\\'"), @r#"{title} = {foo\\\}"#); insta::assert_snapshot!(p(r"title = 'foo\\\\\\'"), @r#"{title} = {foo\\\}"#);
insta::assert_display_snapshot!(p(r"title = 'foo\\\\\\\\'"), @r#"{title} = {foo\\\\}"#); insta::assert_snapshot!(p(r"title = 'foo\\\\\\\\'"), @r#"{title} = {foo\\\\}"#);
// but it also works with other sequences // but it also works with other sequences
insta::assert_display_snapshot!(p(r#"title = 'foo\x20\n\t\"\'"'"#), @"{title} = {foo \n\t\"\'\"}"); insta::assert_snapshot!(p(r#"title = 'foo\x20\n\t\"\'"'"#), @"{title} = {foo \n\t\"\'\"}");
} }
#[test] #[test]
fn parse() { fn parse() {
// Test equal // Test equal
insta::assert_display_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}"); insta::assert_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}");
insta::assert_display_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}"); insta::assert_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}");
insta::assert_display_snapshot!(p("channel = 'Mister Mv'"), @"{channel} = {Mister Mv}"); insta::assert_snapshot!(p("channel = 'Mister Mv'"), @"{channel} = {Mister Mv}");
insta::assert_display_snapshot!(p("channel = \"Mister Mv\""), @"{channel} = {Mister Mv}"); insta::assert_snapshot!(p("channel = \"Mister Mv\""), @"{channel} = {Mister Mv}");
insta::assert_display_snapshot!(p("'dog race' = Borzoi"), @"{dog race} = {Borzoi}"); insta::assert_snapshot!(p("'dog race' = Borzoi"), @"{dog race} = {Borzoi}");
insta::assert_display_snapshot!(p("\"dog race\" = Chusky"), @"{dog race} = {Chusky}"); insta::assert_snapshot!(p("\"dog race\" = Chusky"), @"{dog race} = {Chusky}");
insta::assert_display_snapshot!(p("\"dog race\" = \"Bernese Mountain\""), @"{dog race} = {Bernese Mountain}"); insta::assert_snapshot!(p("\"dog race\" = \"Bernese Mountain\""), @"{dog race} = {Bernese Mountain}");
insta::assert_display_snapshot!(p("'dog race' = 'Bernese Mountain'"), @"{dog race} = {Bernese Mountain}"); insta::assert_snapshot!(p("'dog race' = 'Bernese Mountain'"), @"{dog race} = {Bernese Mountain}");
insta::assert_display_snapshot!(p("\"dog race\" = 'Bernese Mountain'"), @"{dog race} = {Bernese Mountain}"); insta::assert_snapshot!(p("\"dog race\" = 'Bernese Mountain'"), @"{dog race} = {Bernese Mountain}");
// Test IN // Test IN
insta::assert_display_snapshot!(p("colour IN[]"), @"{colour} IN[]"); insta::assert_snapshot!(p("colour IN[]"), @"{colour} IN[]");
insta::assert_display_snapshot!(p("colour IN[green]"), @"{colour} IN[{green}, ]"); insta::assert_snapshot!(p("colour IN[green]"), @"{colour} IN[{green}, ]");
insta::assert_display_snapshot!(p("colour IN[green,]"), @"{colour} IN[{green}, ]"); insta::assert_snapshot!(p("colour IN[green,]"), @"{colour} IN[{green}, ]");
insta::assert_display_snapshot!(p("colour NOT IN[green,blue]"), @"NOT ({colour} IN[{green}, {blue}, ])"); insta::assert_snapshot!(p("colour NOT IN[green,blue]"), @"NOT ({colour} IN[{green}, {blue}, ])");
insta::assert_display_snapshot!(p(" colour IN [ green , blue , ]"), @"{colour} IN[{green}, {blue}, ]"); insta::assert_snapshot!(p(" colour IN [ green , blue , ]"), @"{colour} IN[{green}, {blue}, ]");
// Test IN + OR/AND/() // Test IN + OR/AND/()
insta::assert_display_snapshot!(p(" colour IN [green, blue] AND color = green "), @"AND[{colour} IN[{green}, {blue}, ], {color} = {green}, ]"); insta::assert_snapshot!(p(" colour IN [green, blue] AND color = green "), @"AND[{colour} IN[{green}, {blue}, ], {color} = {green}, ]");
insta::assert_display_snapshot!(p("NOT (colour IN [green, blue]) AND color = green "), @"AND[NOT ({colour} IN[{green}, {blue}, ]), {color} = {green}, ]"); insta::assert_snapshot!(p("NOT (colour IN [green, blue]) AND color = green "), @"AND[NOT ({colour} IN[{green}, {blue}, ]), {color} = {green}, ]");
insta::assert_display_snapshot!(p("x = 1 OR NOT (colour IN [green, blue] OR color = green) "), @"OR[{x} = {1}, NOT (OR[{colour} IN[{green}, {blue}, ], {color} = {green}, ]), ]"); insta::assert_snapshot!(p("x = 1 OR NOT (colour IN [green, blue] OR color = green) "), @"OR[{x} = {1}, NOT (OR[{colour} IN[{green}, {blue}, ], {color} = {green}, ]), ]");
// Test whitespace start/end // Test whitespace start/end
insta::assert_display_snapshot!(p(" colour = green "), @"{colour} = {green}"); insta::assert_snapshot!(p(" colour = green "), @"{colour} = {green}");
insta::assert_display_snapshot!(p(" (colour = green OR colour = red) "), @"OR[{colour} = {green}, {colour} = {red}, ]"); insta::assert_snapshot!(p(" (colour = green OR colour = red) "), @"OR[{colour} = {green}, {colour} = {red}, ]");
insta::assert_display_snapshot!(p(" colour IN [green, blue] AND color = green "), @"AND[{colour} IN[{green}, {blue}, ], {color} = {green}, ]"); insta::assert_snapshot!(p(" colour IN [green, blue] AND color = green "), @"AND[{colour} IN[{green}, {blue}, ], {color} = {green}, ]");
insta::assert_display_snapshot!(p(" colour NOT IN [green, blue] "), @"NOT ({colour} IN[{green}, {blue}, ])"); insta::assert_snapshot!(p(" colour NOT IN [green, blue] "), @"NOT ({colour} IN[{green}, {blue}, ])");
insta::assert_display_snapshot!(p(" colour IN [green, blue] "), @"{colour} IN[{green}, {blue}, ]"); insta::assert_snapshot!(p(" colour IN [green, blue] "), @"{colour} IN[{green}, {blue}, ]");
// Test conditions // Test conditions
insta::assert_display_snapshot!(p("channel != ponce"), @"{channel} != {ponce}"); insta::assert_snapshot!(p("channel != ponce"), @"{channel} != {ponce}");
insta::assert_display_snapshot!(p("NOT channel = ponce"), @"NOT ({channel} = {ponce})"); insta::assert_snapshot!(p("NOT channel = ponce"), @"NOT ({channel} = {ponce})");
insta::assert_display_snapshot!(p("subscribers < 1000"), @"{subscribers} < {1000}"); insta::assert_snapshot!(p("subscribers < 1000"), @"{subscribers} < {1000}");
insta::assert_display_snapshot!(p("subscribers > 1000"), @"{subscribers} > {1000}"); insta::assert_snapshot!(p("subscribers > 1000"), @"{subscribers} > {1000}");
insta::assert_display_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}"); insta::assert_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}");
insta::assert_display_snapshot!(p("subscribers >= 1000"), @"{subscribers} >= {1000}"); insta::assert_snapshot!(p("subscribers >= 1000"), @"{subscribers} >= {1000}");
insta::assert_display_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}"); insta::assert_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}");
insta::assert_display_snapshot!(p("subscribers 100 TO 1000"), @"{subscribers} {100} TO {1000}"); insta::assert_snapshot!(p("subscribers 100 TO 1000"), @"{subscribers} {100} TO {1000}");
// Test NOT // Test NOT
insta::assert_display_snapshot!(p("NOT subscribers < 1000"), @"NOT ({subscribers} < {1000})"); insta::assert_snapshot!(p("NOT subscribers < 1000"), @"NOT ({subscribers} < {1000})");
insta::assert_display_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})"); insta::assert_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})");
// Test NULL + NOT NULL // Test NULL + NOT NULL
insta::assert_display_snapshot!(p("subscribers IS NULL"), @"{subscribers} IS NULL"); insta::assert_snapshot!(p("subscribers IS NULL"), @"{subscribers} IS NULL");
insta::assert_display_snapshot!(p("NOT subscribers IS NULL"), @"NOT ({subscribers} IS NULL)"); insta::assert_snapshot!(p("NOT subscribers IS NULL"), @"NOT ({subscribers} IS NULL)");
insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)"); insta::assert_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)");
insta::assert_display_snapshot!(p("NOT subscribers IS NOT NULL"), @"{subscribers} IS NULL"); insta::assert_snapshot!(p("NOT subscribers IS NOT NULL"), @"{subscribers} IS NULL");
insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)"); insta::assert_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)");
// Test EMPTY + NOT EMPTY // Test EMPTY + NOT EMPTY
insta::assert_display_snapshot!(p("subscribers IS EMPTY"), @"{subscribers} IS EMPTY"); insta::assert_snapshot!(p("subscribers IS EMPTY"), @"{subscribers} IS EMPTY");
insta::assert_display_snapshot!(p("NOT subscribers IS EMPTY"), @"NOT ({subscribers} IS EMPTY)"); insta::assert_snapshot!(p("NOT subscribers IS EMPTY"), @"NOT ({subscribers} IS EMPTY)");
insta::assert_display_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)"); insta::assert_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
insta::assert_display_snapshot!(p("NOT subscribers IS NOT EMPTY"), @"{subscribers} IS EMPTY"); insta::assert_snapshot!(p("NOT subscribers IS NOT EMPTY"), @"{subscribers} IS EMPTY");
insta::assert_display_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)"); insta::assert_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
// Test EXISTS + NOT EXITS // Test EXISTS + NOT EXITS
insta::assert_display_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS"); insta::assert_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS");
insta::assert_display_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)"); insta::assert_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)");
insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)"); insta::assert_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
insta::assert_display_snapshot!(p("NOT subscribers NOT EXISTS"), @"{subscribers} EXISTS"); insta::assert_snapshot!(p("NOT subscribers NOT EXISTS"), @"{subscribers} EXISTS");
insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)"); insta::assert_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
// Test nested NOT // Test nested NOT
insta::assert_display_snapshot!(p("NOT NOT NOT NOT x = 5"), @"{x} = {5}"); insta::assert_snapshot!(p("NOT NOT NOT NOT x = 5"), @"{x} = {5}");
insta::assert_display_snapshot!(p("NOT NOT (NOT NOT x = 5)"), @"{x} = {5}"); insta::assert_snapshot!(p("NOT NOT (NOT NOT x = 5)"), @"{x} = {5}");
// Test geo radius // Test geo radius
insta::assert_display_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})"); insta::assert_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
insta::assert_display_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))"); insta::assert_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
insta::assert_display_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})"); insta::assert_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
// Test geo bounding box // Test geo bounding box
insta::assert_display_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])"); insta::assert_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
insta::assert_display_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))"); insta::assert_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
insta::assert_display_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])"); insta::assert_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
// Test OR + AND // Test OR + AND
insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]"); insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
insta::assert_display_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]"); insta::assert_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000"), @"OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, ]"); insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000"), @"OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, ]");
insta::assert_display_snapshot!( insta::assert_snapshot!(
p("channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000 OR colour = red OR colour = blue AND size = 7"), p("channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000 OR colour = red OR colour = blue AND size = 7"),
@"OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, {colour} = {red}, AND[{colour} = {blue}, {size} = {7}, ], ]" @"OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, {colour} = {red}, AND[{colour} = {blue}, {size} = {7}, ], ]"
); );
// Test parentheses // Test parentheses
insta::assert_display_snapshot!(p("channel = ponce AND ( 'dog race' != 'bernese mountain' OR subscribers > 1000 )"), @"AND[{channel} = {ponce}, OR[{dog race} != {bernese mountain}, {subscribers} > {1000}, ], ]"); insta::assert_snapshot!(p("channel = ponce AND ( 'dog race' != 'bernese mountain' OR subscribers > 1000 )"), @"AND[{channel} = {ponce}, OR[{dog race} != {bernese mountain}, {subscribers} > {1000}, ], ]");
insta::assert_display_snapshot!(p("(channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000) AND _geoRadius(12, 13, 14)"), @"AND[OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, ], _geoRadius({12}, {13}, {14}), ]"); insta::assert_snapshot!(p("(channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000) AND _geoRadius(12, 13, 14)"), @"AND[OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, ], _geoRadius({12}, {13}, {14}), ]");
// Test recursion // Test recursion
// This is the most that is allowed // This is the most that is allowed
insta::assert_display_snapshot!( insta::assert_snapshot!(
p("(((((((((((((((((((((((((((((((((((((((((((((((((x = 1)))))))))))))))))))))))))))))))))))))))))))))))))"), p("(((((((((((((((((((((((((((((((((((((((((((((((((x = 1)))))))))))))))))))))))))))))))))))))))))))))))))"),
@"{x} = {1}" @"{x} = {1}"
); );
insta::assert_display_snapshot!( insta::assert_snapshot!(
p("NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT x = 1"), p("NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT x = 1"),
@"NOT ({x} = {1})" @"NOT ({x} = {1})"
); );
// Confusing keywords // Confusing keywords
insta::assert_display_snapshot!(p(r#"NOT "OR" EXISTS AND "EXISTS" NOT EXISTS"#), @"AND[NOT ({OR} EXISTS), NOT ({EXISTS} EXISTS), ]"); insta::assert_snapshot!(p(r#"NOT "OR" EXISTS AND "EXISTS" NOT EXISTS"#), @"AND[NOT ({OR} EXISTS), NOT ({EXISTS} EXISTS), ]");
} }
#[test] #[test]
@ -689,182 +689,182 @@ pub mod tests {
Fc::parse(s).unwrap_err().to_string() Fc::parse(s).unwrap_err().to_string()
} }
insta::assert_display_snapshot!(p("channel = Ponce = 12"), @r###" insta::assert_snapshot!(p("channel = Ponce = 12"), @r###"
Found unexpected characters at the end of the filter: `= 12`. You probably forgot an `OR` or an `AND` rule. Found unexpected characters at the end of the filter: `= 12`. You probably forgot an `OR` or an `AND` rule.
17:21 channel = Ponce = 12 17:21 channel = Ponce = 12
"###); "###);
insta::assert_display_snapshot!(p("channel = "), @r###" insta::assert_snapshot!(p("channel = "), @r###"
Was expecting a value but instead got nothing. Was expecting a value but instead got nothing.
14:14 channel = 14:14 channel =
"###); "###);
insta::assert_display_snapshot!(p("channel = 🐻"), @r###" insta::assert_snapshot!(p("channel = 🐻"), @r###"
Was expecting a value but instead got `🐻`. Was expecting a value but instead got `🐻`.
11:12 channel = 🐻 11:12 channel = 🐻
"###); "###);
insta::assert_display_snapshot!(p("channel = 🐻 AND followers < 100"), @r###" insta::assert_snapshot!(p("channel = 🐻 AND followers < 100"), @r###"
Was expecting a value but instead got `🐻`. Was expecting a value but instead got `🐻`.
11:12 channel = 🐻 AND followers < 100 11:12 channel = 🐻 AND followers < 100
"###); "###);
insta::assert_display_snapshot!(p("'OR'"), @r###" insta::assert_snapshot!(p("'OR'"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
1:5 'OR' 1:5 'OR'
"###); "###);
insta::assert_display_snapshot!(p("OR"), @r###" insta::assert_snapshot!(p("OR"), @r###"
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes. Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
1:3 OR 1:3 OR
"###); "###);
insta::assert_display_snapshot!(p("channel Ponce"), @r###" insta::assert_snapshot!(p("channel Ponce"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
1:14 channel Ponce 1:14 channel Ponce
"###); "###);
insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###" insta::assert_snapshot!(p("channel = Ponce OR"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
19:19 channel = Ponce OR 19:19 channel = Ponce OR
"###); "###);
insta::assert_display_snapshot!(p("_geoRadius"), @r###" insta::assert_snapshot!(p("_geoRadius"), @r###"
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`. The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
1:11 _geoRadius 1:11 _geoRadius
"###); "###);
insta::assert_display_snapshot!(p("_geoRadius = 12"), @r###" insta::assert_snapshot!(p("_geoRadius = 12"), @r###"
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`. The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
1:16 _geoRadius = 12 1:16 _geoRadius = 12
"###); "###);
insta::assert_display_snapshot!(p("_geoBoundingBox"), @r###" insta::assert_snapshot!(p("_geoBoundingBox"), @r###"
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`. The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
1:16 _geoBoundingBox 1:16 _geoBoundingBox
"###); "###);
insta::assert_display_snapshot!(p("_geoBoundingBox = 12"), @r###" insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r###"
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`. The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
1:21 _geoBoundingBox = 12 1:21 _geoBoundingBox = 12
"###); "###);
insta::assert_display_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###" insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###"
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`. The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
1:26 _geoBoundingBox(1.0, 1.0) 1:26 _geoBoundingBox(1.0, 1.0)
"###); "###);
insta::assert_display_snapshot!(p("_geoPoint(12, 13, 14)"), @r###" insta::assert_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates. `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
1:22 _geoPoint(12, 13, 14) 1:22 _geoPoint(12, 13, 14)
"###); "###);
insta::assert_display_snapshot!(p("position <= _geoPoint(12, 13, 14)"), @r###" insta::assert_snapshot!(p("position <= _geoPoint(12, 13, 14)"), @r###"
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates. `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
13:34 position <= _geoPoint(12, 13, 14) 13:34 position <= _geoPoint(12, 13, 14)
"###); "###);
insta::assert_display_snapshot!(p("_geoDistance(12, 13, 14)"), @r###" insta::assert_snapshot!(p("_geoDistance(12, 13, 14)"), @r###"
`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates. `_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
1:25 _geoDistance(12, 13, 14) 1:25 _geoDistance(12, 13, 14)
"###); "###);
insta::assert_display_snapshot!(p("position <= _geoDistance(12, 13, 14)"), @r###" insta::assert_snapshot!(p("position <= _geoDistance(12, 13, 14)"), @r###"
`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates. `_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
13:37 position <= _geoDistance(12, 13, 14) 13:37 position <= _geoDistance(12, 13, 14)
"###); "###);
insta::assert_display_snapshot!(p("_geo(12, 13, 14)"), @r###" insta::assert_snapshot!(p("_geo(12, 13, 14)"), @r###"
`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates. `_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
1:17 _geo(12, 13, 14) 1:17 _geo(12, 13, 14)
"###); "###);
insta::assert_display_snapshot!(p("position <= _geo(12, 13, 14)"), @r###" insta::assert_snapshot!(p("position <= _geo(12, 13, 14)"), @r###"
`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates. `_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
13:29 position <= _geo(12, 13, 14) 13:29 position <= _geo(12, 13, 14)
"###); "###);
insta::assert_display_snapshot!(p("position <= _geoRadius(12, 13, 14)"), @r###" insta::assert_snapshot!(p("position <= _geoRadius(12, 13, 14)"), @r###"
The `_geoRadius` filter is an operation and can't be used as a value. The `_geoRadius` filter is an operation and can't be used as a value.
13:35 position <= _geoRadius(12, 13, 14) 13:35 position <= _geoRadius(12, 13, 14)
"###); "###);
insta::assert_display_snapshot!(p("channel = 'ponce"), @r###" insta::assert_snapshot!(p("channel = 'ponce"), @r###"
Expression `\'ponce` is missing the following closing delimiter: `'`. Expression `\'ponce` is missing the following closing delimiter: `'`.
11:17 channel = 'ponce 11:17 channel = 'ponce
"###); "###);
insta::assert_display_snapshot!(p("channel = \"ponce"), @r###" insta::assert_snapshot!(p("channel = \"ponce"), @r###"
Expression `\"ponce` is missing the following closing delimiter: `"`. Expression `\"ponce` is missing the following closing delimiter: `"`.
11:17 channel = "ponce 11:17 channel = "ponce
"###); "###);
insta::assert_display_snapshot!(p("channel = mv OR (followers >= 1000"), @r###" insta::assert_snapshot!(p("channel = mv OR (followers >= 1000"), @r###"
Expression `(followers >= 1000` is missing the following closing delimiter: `)`. Expression `(followers >= 1000` is missing the following closing delimiter: `)`.
17:35 channel = mv OR (followers >= 1000 17:35 channel = mv OR (followers >= 1000
"###); "###);
insta::assert_display_snapshot!(p("channel = mv OR followers >= 1000)"), @r###" insta::assert_snapshot!(p("channel = mv OR followers >= 1000)"), @r###"
Found unexpected characters at the end of the filter: `)`. You probably forgot an `OR` or an `AND` rule. Found unexpected characters at the end of the filter: `)`. You probably forgot an `OR` or an `AND` rule.
34:35 channel = mv OR followers >= 1000) 34:35 channel = mv OR followers >= 1000)
"###); "###);
insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###" insta::assert_snapshot!(p("colour NOT EXIST"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
1:17 colour NOT EXIST 1:17 colour NOT EXIST
"###); "###);
insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###" insta::assert_snapshot!(p("subscribers 100 TO1000"), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
1:23 subscribers 100 TO1000 1:23 subscribers 100 TO1000
"###); "###);
insta::assert_display_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###" insta::assert_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule. Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
17:44 channel = ponce ORdog != 'bernese mountain' 17:44 channel = ponce ORdog != 'bernese mountain'
"###); "###);
insta::assert_display_snapshot!(p("colour IN blue, green]"), @r###" insta::assert_snapshot!(p("colour IN blue, green]"), @r###"
Expected `[` after `IN` keyword. Expected `[` after `IN` keyword.
11:23 colour IN blue, green] 11:23 colour IN blue, green]
"###); "###);
insta::assert_display_snapshot!(p("colour IN [blue, green, 'blue' > 2]"), @r###" insta::assert_snapshot!(p("colour IN [blue, green, 'blue' > 2]"), @r###"
Expected only comma-separated field names inside `IN[..]` but instead found `> 2]`. Expected only comma-separated field names inside `IN[..]` but instead found `> 2]`.
32:36 colour IN [blue, green, 'blue' > 2] 32:36 colour IN [blue, green, 'blue' > 2]
"###); "###);
insta::assert_display_snapshot!(p("colour IN [blue, green, AND]"), @r###" insta::assert_snapshot!(p("colour IN [blue, green, AND]"), @r###"
Expected only comma-separated field names inside `IN[..]` but instead found `AND]`. Expected only comma-separated field names inside `IN[..]` but instead found `AND]`.
25:29 colour IN [blue, green, AND] 25:29 colour IN [blue, green, AND]
"###); "###);
insta::assert_display_snapshot!(p("colour IN [blue, green"), @r###" insta::assert_snapshot!(p("colour IN [blue, green"), @r###"
Expected matching `]` after the list of field names given to `IN[` Expected matching `]` after the list of field names given to `IN[`
23:23 colour IN [blue, green 23:23 colour IN [blue, green
"###); "###);
insta::assert_display_snapshot!(p("colour IN ['blue, green"), @r###" insta::assert_snapshot!(p("colour IN ['blue, green"), @r###"
Expression `\'blue, green` is missing the following closing delimiter: `'`. Expression `\'blue, green` is missing the following closing delimiter: `'`.
12:24 colour IN ['blue, green 12:24 colour IN ['blue, green
"###); "###);
insta::assert_display_snapshot!(p("x = EXISTS"), @r###" insta::assert_snapshot!(p("x = EXISTS"), @r###"
Was expecting a value but instead got `EXISTS`, which is a reserved keyword. To use `EXISTS` as a field name or a value, surround it by quotes. Was expecting a value but instead got `EXISTS`, which is a reserved keyword. To use `EXISTS` as a field name or a value, surround it by quotes.
5:11 x = EXISTS 5:11 x = EXISTS
"###); "###);
insta::assert_display_snapshot!(p("AND = 8"), @r###" insta::assert_snapshot!(p("AND = 8"), @r###"
Was expecting a value but instead got `AND`, which is a reserved keyword. To use `AND` as a field name or a value, surround it by quotes. Was expecting a value but instead got `AND`, which is a reserved keyword. To use `AND` as a field name or a value, surround it by quotes.
1:4 AND = 8 1:4 AND = 8
"###); "###);
insta::assert_display_snapshot!(p("((((((((((((((((((((((((((((((((((((((((((((((((((x = 1))))))))))))))))))))))))))))))))))))))))))))))))))"), @r###" insta::assert_snapshot!(p("((((((((((((((((((((((((((((((((((((((((((((((((((x = 1))))))))))))))))))))))))))))))))))))))))))))))))))"), @r###"
The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions. The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions.
51:106 ((((((((((((((((((((((((((((((((((((((((((((((((((x = 1)))))))))))))))))))))))))))))))))))))))))))))))))) 51:106 ((((((((((((((((((((((((((((((((((((((((((((((((((x = 1))))))))))))))))))))))))))))))))))))))))))))))))))
"###); "###);
insta::assert_display_snapshot!( insta::assert_snapshot!(
p("NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT x = 1"), p("NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT x = 1"),
@r###" @r###"
The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions. The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions.
@ -872,40 +872,40 @@ pub mod tests {
"### "###
); );
insta::assert_display_snapshot!(p(r#"NOT OR EXISTS AND EXISTS NOT EXISTS"#), @r###" insta::assert_snapshot!(p(r#"NOT OR EXISTS AND EXISTS NOT EXISTS"#), @r###"
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes. Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS 5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
"###); "###);
insta::assert_display_snapshot!(p(r#"value NULL"#), @r###" insta::assert_snapshot!(p(r#"value NULL"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
1:11 value NULL 1:11 value NULL
"###); "###);
insta::assert_display_snapshot!(p(r#"value NOT NULL"#), @r###" insta::assert_snapshot!(p(r#"value NOT NULL"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
1:15 value NOT NULL 1:15 value NOT NULL
"###); "###);
insta::assert_display_snapshot!(p(r#"value EMPTY"#), @r###" insta::assert_snapshot!(p(r#"value EMPTY"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
1:12 value EMPTY 1:12 value EMPTY
"###); "###);
insta::assert_display_snapshot!(p(r#"value NOT EMPTY"#), @r###" insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
1:16 value NOT EMPTY 1:16 value NOT EMPTY
"###); "###);
insta::assert_display_snapshot!(p(r#"value IS"#), @r###" insta::assert_snapshot!(p(r#"value IS"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
1:9 value IS 1:9 value IS
"###); "###);
insta::assert_display_snapshot!(p(r#"value IS NOT"#), @r###" insta::assert_snapshot!(p(r#"value IS NOT"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
1:13 value IS NOT 1:13 value IS NOT
"###); "###);
insta::assert_display_snapshot!(p(r#"value IS EXISTS"#), @r###" insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
1:16 value IS EXISTS 1:16 value IS EXISTS
"###); "###);
insta::assert_display_snapshot!(p(r#"value IS NOT EXISTS"#), @r###" insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`. Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
1:20 value IS NOT EXISTS 1:20 value IS NOT EXISTS
"###); "###);

View File

@ -12,9 +12,9 @@ license.workspace = true
[dependencies] [dependencies]
arbitrary = { version = "1.3.2", features = ["derive"] } arbitrary = { version = "1.3.2", features = ["derive"] }
clap = { version = "4.4.17", features = ["derive"] } clap = { version = "4.5.9", features = ["derive"] }
fastrand = "2.0.1" fastrand = "2.1.0"
milli = { path = "../milli" } milli = { path = "../milli" }
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.204", features = ["derive"] }
serde_json = { version = "1.0.111", features = ["preserve_order"] } serde_json = { version = "1.0.120", features = ["preserve_order"] }
tempfile = "3.9.0" tempfile = "3.10.1"

View File

@ -11,38 +11,38 @@ edition.workspace = true
license.workspace = true license.workspace = true
[dependencies] [dependencies]
anyhow = "1.0.79" anyhow = "1.0.86"
bincode = "1.3.3" bincode = "1.3.3"
csv = "1.3.0" csv = "1.3.0"
derive_builder = "0.12.0" derive_builder = "0.20.0"
dump = { path = "../dump" } dump = { path = "../dump" }
enum-iterator = "1.5.0" enum-iterator = "2.1.0"
file-store = { path = "../file-store" } file-store = { path = "../file-store" }
flate2 = "1.0.28" flate2 = "1.0.30"
meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" } meilisearch-types = { path = "../meilisearch-types" }
page_size = "0.5.0" page_size = "0.6.0"
rayon = "1.8.1" rayon = "1.10.0"
roaring = { version = "0.10.2", features = ["serde"] } roaring = { version = "0.10.6", features = ["serde"] }
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.204", features = ["derive"] }
serde_json = { version = "1.0.111", features = ["preserve_order"] } serde_json = { version = "1.0.120", features = ["preserve_order"] }
synchronoise = "1.0.1" synchronoise = "1.0.1"
tempfile = "3.9.0" tempfile = "3.10.1"
thiserror = "1.0.56" thiserror = "1.0.61"
time = { version = "0.3.31", features = [ time = { version = "0.3.36", features = [
"serde-well-known", "serde-well-known",
"formatting", "formatting",
"parsing", "parsing",
"macros", "macros",
] } ] }
tracing = "0.1.40" tracing = "0.1.40"
ureq = "2.9.7" ureq = "2.10.0"
uuid = { version = "1.6.1", features = ["serde", "v4"] } uuid = { version = "1.10.0", features = ["serde", "v4"] }
[dev-dependencies] [dev-dependencies]
arroy = "0.4.0" arroy = "0.4.0"
big_s = "1.0.2" big_s = "1.0.2"
crossbeam = "0.8.4" crossbeam = "0.8.4"
insta = { version = "1.34.0", features = ["json", "redactions"] } insta = { version = "1.39.0", features = ["json", "redactions"] }
maplit = "1.0.2" maplit = "1.0.2"
meili-snap = { path = "../meili-snap" } meili-snap = { path = "../meili-snap" }

View File

@ -11,6 +11,6 @@ edition.workspace = true
license.workspace = true license.workspace = true
[dependencies] [dependencies]
insta = { version = "^1.34.0", features = ["json", "redactions"] } insta = { version = "^1.39.0", features = ["json", "redactions"] }
md5 = "0.7.0" md5 = "0.7.0"
once_cell = "1.19" once_cell = "1.19"

View File

@ -11,16 +11,16 @@ edition.workspace = true
license.workspace = true license.workspace = true
[dependencies] [dependencies]
base64 = "0.21.7" base64 = "0.22.1"
enum-iterator = "1.5.0" enum-iterator = "2.1.0"
hmac = "0.12.1" hmac = "0.12.1"
maplit = "1.0.2" maplit = "1.0.2"
meilisearch-types = { path = "../meilisearch-types" } meilisearch-types = { path = "../meilisearch-types" }
rand = "0.8.5" rand = "0.8.5"
roaring = { version = "0.10.2", features = ["serde"] } roaring = { version = "0.10.6", features = ["serde"] }
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.204", features = ["derive"] }
serde_json = { version = "1.0.111", features = ["preserve_order"] } serde_json = { version = "1.0.120", features = ["preserve_order"] }
sha2 = "0.10.8" sha2 = "0.10.8"
thiserror = "1.0.56" thiserror = "1.0.61"
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] } time = { version = "0.3.36", features = ["serde-well-known", "formatting", "parsing", "macros"] }
uuid = { version = "1.6.1", features = ["serde", "v4"] } uuid = { version = "1.10.0", features = ["serde", "v4"] }

View File

@ -11,36 +11,36 @@ edition.workspace = true
license.workspace = true license.workspace = true
[dependencies] [dependencies]
actix-web = { version = "4.6.0", default-features = false } actix-web = { version = "4.8.0", default-features = false }
anyhow = "1.0.79" anyhow = "1.0.86"
convert_case = "0.6.0" convert_case = "0.6.0"
csv = "1.3.0" csv = "1.3.0"
deserr = { version = "0.6.1", features = ["actix-web"] } deserr = { version = "0.6.2", features = ["actix-web"] }
either = { version = "1.9.0", features = ["serde"] } either = { version = "1.13.0", features = ["serde"] }
enum-iterator = "1.5.0" enum-iterator = "2.1.0"
file-store = { path = "../file-store" } file-store = { path = "../file-store" }
flate2 = "1.0.28" flate2 = "1.0.30"
fst = "0.4.7" fst = "0.4.7"
memmap2 = "0.7.1" memmap2 = "0.9.4"
milli = { path = "../milli" } milli = { path = "../milli" }
roaring = { version = "0.10.2", features = ["serde"] } roaring = { version = "0.10.6", features = ["serde"] }
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.204", features = ["derive"] }
serde-cs = "0.2.4" serde-cs = "0.2.4"
serde_json = "1.0.111" serde_json = "1.0.120"
tar = "0.4.40" tar = "0.4.41"
tempfile = "3.9.0" tempfile = "3.10.1"
thiserror = "1.0.56" thiserror = "1.0.61"
time = { version = "0.3.31", features = [ time = { version = "0.3.36", features = [
"serde-well-known", "serde-well-known",
"formatting", "formatting",
"parsing", "parsing",
"macros", "macros",
] } ] }
tokio = "1.35" tokio = "1.38"
uuid = { version = "1.6.1", features = ["serde", "v4"] } uuid = { version = "1.10.0", features = ["serde", "v4"] }
[dev-dependencies] [dev-dependencies]
insta = "1.34.0" insta = "1.39.0"
meili-snap = { path = "../meili-snap" } meili-snap = { path = "../meili-snap" }
[features] [features]

View File

@ -14,104 +14,99 @@ default-run = "meilisearch"
[dependencies] [dependencies]
actix-cors = "0.7.0" actix-cors = "0.7.0"
actix-http = { version = "3.7.0", default-features = false, features = [ actix-http = { version = "3.8.0", default-features = false, features = [
"compress-brotli", "compress-brotli",
"compress-gzip", "compress-gzip",
"rustls-0_21", "rustls-0_21",
] } ] }
actix-utils = "3.0.1" actix-utils = "3.0.1"
actix-web = { version = "4.6.0", default-features = false, features = [ actix-web = { version = "4.8.0", default-features = false, features = [
"macros", "macros",
"compress-brotli", "compress-brotli",
"compress-gzip", "compress-gzip",
"cookies", "cookies",
"rustls-0_21", "rustls-0_21",
] } ] }
actix-web-static-files = { version = "4.0.1", optional = true } anyhow = { version = "1.0.86", features = ["backtrace"] }
anyhow = { version = "1.0.79", features = ["backtrace"] } async-trait = "0.1.81"
async-stream = "0.3.5" bstr = "1.9.1"
async-trait = "0.1.77" byte-unit = { version = "5.1.4", default-features = false, features = [
bstr = "1.9.0"
byte-unit = { version = "4.0.19", default-features = false, features = [
"std", "std",
"byte",
"serde", "serde",
] } ] }
bytes = "1.5.0" bytes = "1.6.0"
clap = { version = "4.4.17", features = ["derive", "env"] } clap = { version = "4.5.9", features = ["derive", "env"] }
crossbeam-channel = "0.5.11" crossbeam-channel = "0.5.13"
deserr = { version = "0.6.1", features = ["actix-web"] } deserr = { version = "0.6.2", features = ["actix-web"] }
dump = { path = "../dump" } dump = { path = "../dump" }
either = "1.9.0" either = "1.13.0"
file-store = { path = "../file-store" } file-store = { path = "../file-store" }
flate2 = "1.0.28" flate2 = "1.0.30"
fst = "0.4.7" fst = "0.4.7"
futures = "0.3.30" futures = "0.3.30"
futures-util = "0.3.30" futures-util = "0.3.30"
http = "0.2.11"
index-scheduler = { path = "../index-scheduler" } index-scheduler = { path = "../index-scheduler" }
indexmap = { version = "2.1.0", features = ["serde"] } indexmap = { version = "2.2.6", features = ["serde"] }
is-terminal = "0.4.10" is-terminal = "0.4.12"
itertools = "0.11.0" itertools = "0.13.0"
jsonwebtoken = "9.2.0" jsonwebtoken = "9.3.0"
lazy_static = "1.4.0" lazy_static = "1.5.0"
meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" } meilisearch-types = { path = "../meilisearch-types" }
mimalloc = { version = "0.1.39", default-features = false } mimalloc = { version = "0.1.43", default-features = false }
mime = "0.3.17" mime = "0.3.17"
num_cpus = "1.16.0" num_cpus = "1.16.0"
obkv = "0.2.1" obkv = "0.2.2"
once_cell = "1.19.0" once_cell = "1.19.0"
ordered-float = "4.2.0" ordered-float = "4.2.1"
parking_lot = "0.12.1" parking_lot = "0.12.3"
permissive-json-pointer = { path = "../permissive-json-pointer" } permissive-json-pointer = { path = "../permissive-json-pointer" }
pin-project-lite = "0.2.13" pin-project-lite = "0.2.14"
platform-dirs = "0.3.0" platform-dirs = "0.3.0"
prometheus = { version = "0.13.3", features = ["process"] } prometheus = { version = "0.13.4", features = ["process"] }
rand = "0.8.5" rand = "0.8.5"
rayon = "1.8.0" rayon = "1.10.0"
regex = "1.10.2" regex = "1.10.5"
reqwest = { version = "0.11.23", features = [ reqwest = { version = "0.12.5", features = [
"rustls-tls", "rustls-tls",
"json", "json",
], default-features = false } ], default-features = false }
rustls = "0.21.12" rustls = "0.21.12"
rustls-pemfile = "1.0.2" rustls-pemfile = "1.0.4"
segment = { version = "0.2.3", optional = true } segment = { version = "0.2.4", optional = true }
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.204", features = ["derive"] }
serde_json = { version = "1.0.111", features = ["preserve_order"] } serde_json = { version = "1.0.120", features = ["preserve_order"] }
sha2 = "0.10.8" sha2 = "0.10.8"
siphasher = "1.0.0" siphasher = "1.0.1"
slice-group-by = "0.3.1" slice-group-by = "0.3.1"
static-files = { version = "0.2.3", optional = true } static-files = { version = "0.2.4", optional = true }
sysinfo = "0.30.5" sysinfo = "0.30.13"
tar = "0.4.40" tar = "0.4.41"
tempfile = "3.9.0" tempfile = "3.10.1"
thiserror = "1.0.56" thiserror = "1.0.61"
time = { version = "0.3.31", features = [ time = { version = "0.3.36", features = [
"serde-well-known", "serde-well-known",
"formatting", "formatting",
"parsing", "parsing",
"macros", "macros",
] } ] }
tokio = { version = "1.35.1", features = ["full"] } tokio = { version = "1.38.0", features = ["full"] }
tokio-stream = "0.1.14" toml = "0.8.14"
toml = "0.8.8" uuid = { version = "1.10.0", features = ["serde", "v4"] }
uuid = { version = "1.6.1", features = ["serde", "v4"] }
walkdir = "2.4.0"
serde_urlencoded = "0.7.1" serde_urlencoded = "0.7.1"
termcolor = "1.4.1" termcolor = "1.4.1"
url = { version = "2.5.0", features = ["serde"] } url = { version = "2.5.2", features = ["serde"] }
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["json"] } tracing-subscriber = { version = "0.3.18", features = ["json"] }
tracing-trace = { version = "0.1.0", path = "../tracing-trace" } tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
tracing-actix-web = "0.7.10" tracing-actix-web = "0.7.11"
build-info = { version = "1.7.0", path = "../build-info" } build-info = { version = "1.7.0", path = "../build-info" }
[dev-dependencies] [dev-dependencies]
actix-rt = "2.9.0" actix-rt = "2.10.0"
assert-json-diff = "2.0.2"
brotli = "6.0.0" brotli = "6.0.0"
insta = "1.34.0" insta = "1.39.0"
manifest-dir-macros = "0.1.18" manifest-dir-macros = "0.1.18"
maplit = "1.0.2" maplit = "1.0.2"
meili-snap = { path = "../meili-snap" } meili-snap = { path = "../meili-snap" }
@ -120,23 +115,22 @@ urlencoding = "2.1.3"
yaup = "0.3.1" yaup = "0.3.1"
[build-dependencies] [build-dependencies]
anyhow = { version = "1.0.79", optional = true } anyhow = { version = "1.0.86", optional = true }
cargo_toml = { version = "0.18.0", optional = true } cargo_toml = { version = "0.20.3", optional = true }
hex = { version = "0.4.3", optional = true } hex = { version = "0.4.3", optional = true }
reqwest = { version = "0.11.23", features = [ reqwest = { version = "0.12.5", features = [
"blocking", "blocking",
"rustls-tls", "rustls-tls",
], default-features = false, optional = true } ], default-features = false, optional = true }
sha-1 = { version = "0.10.1", optional = true } sha-1 = { version = "0.10.1", optional = true }
static-files = { version = "0.2.3", optional = true } static-files = { version = "0.2.4", optional = true }
tempfile = { version = "3.9.0", optional = true } tempfile = { version = "3.10.1", optional = true }
zip = { version = "0.6.6", optional = true } zip = { version = "2.1.3", optional = true }
[features] [features]
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"] default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
analytics = ["segment"] analytics = ["segment"]
mini-dashboard = [ mini-dashboard = [
"actix-web-static-files",
"static-files", "static-files",
"anyhow", "anyhow",
"cargo_toml", "cargo_toml",

View File

@ -5,10 +5,9 @@ use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use actix_web::http::header::USER_AGENT; use actix_web::http::header::{CONTENT_TYPE, USER_AGENT};
use actix_web::HttpRequest; use actix_web::HttpRequest;
use byte_unit::Byte; use byte_unit::Byte;
use http::header::CONTENT_TYPE;
use index_scheduler::IndexScheduler; use index_scheduler::IndexScheduler;
use meilisearch_auth::{AuthController, AuthFilter}; use meilisearch_auth::{AuthController, AuthFilter};
use meilisearch_types::InstanceUid; use meilisearch_types::InstanceUid;

View File

@ -1,6 +1,6 @@
use actix_web as aweb; use actix_web as aweb;
use aweb::error::{JsonPayloadError, QueryPayloadError}; use aweb::error::{JsonPayloadError, QueryPayloadError};
use byte_unit::Byte; use byte_unit::{Byte, UnitType};
use meilisearch_types::document_formats::{DocumentFormatError, PayloadType}; use meilisearch_types::document_formats::{DocumentFormatError, PayloadType};
use meilisearch_types::error::{Code, ErrorCode, ResponseError}; use meilisearch_types::error::{Code, ErrorCode, ResponseError};
use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError}; use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError};
@ -33,7 +33,7 @@ pub enum MeilisearchHttpError {
TooManySearchRequests(usize), TooManySearchRequests(usize),
#[error("Internal error: Search limiter is down.")] #[error("Internal error: Search limiter is down.")]
SearchLimiterIsDown, SearchLimiterIsDown,
#[error("The provided payload reached the size limit. The maximum accepted payload size is {}.", Byte::from_bytes(*.0 as u64).get_appropriate_unit(true))] #[error("The provided payload reached the size limit. The maximum accepted payload size is {}.", Byte::from_u64(*.0 as u64).get_appropriate_unit(UnitType::Binary))]
PayloadTooLarge(usize), PayloadTooLarge(usize),
#[error("Two indexes must be given for each swap. The list `[{}]` contains {} indexes.", #[error("Two indexes must be given for each swap. The list `[{}]` contains {} indexes.",
.0.iter().map(|uid| format!("\"{uid}\"")).collect::<Vec<_>>().join(", "), .0.len() .0.iter().map(|uid| format!("\"{uid}\"")).collect::<Vec<_>>().join(", "), .0.len()

View File

@ -15,6 +15,7 @@ use std::fs::File;
use std::io::{BufReader, BufWriter}; use std::io::{BufReader, BufWriter};
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::path::Path; use std::path::Path;
use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use std::thread::{self, available_parallelism}; use std::thread::{self, available_parallelism};
use std::time::Duration; use std::time::Duration;
@ -23,13 +24,13 @@ use actix_cors::Cors;
use actix_http::body::MessageBody; use actix_http::body::MessageBody;
use actix_web::dev::{ServiceFactory, ServiceResponse}; use actix_web::dev::{ServiceFactory, ServiceResponse};
use actix_web::error::JsonPayloadError; use actix_web::error::JsonPayloadError;
use actix_web::http::header::{CONTENT_TYPE, USER_AGENT};
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest}; use actix_web::{web, HttpRequest};
use analytics::Analytics; use analytics::Analytics;
use anyhow::bail; use anyhow::bail;
use error::PayloadError; use error::PayloadError;
use extractors::payload::PayloadConfig; use extractors::payload::PayloadConfig;
use http::header::CONTENT_TYPE;
use index_scheduler::{IndexScheduler, IndexSchedulerOptions}; use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
use meilisearch_auth::AuthController; use meilisearch_auth::AuthController;
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
@ -167,7 +168,7 @@ impl tracing_actix_web::RootSpanBuilder for AwebTracingLogger {
let conn_info = request.connection_info(); let conn_info = request.connection_info();
let headers = request.headers(); let headers = request.headers();
let user_agent = headers let user_agent = headers
.get(http::header::USER_AGENT) .get(USER_AGENT)
.map(|value| String::from_utf8_lossy(value.as_bytes()).into_owned()) .map(|value| String::from_utf8_lossy(value.as_bytes()).into_owned())
.unwrap_or_default(); .unwrap_or_default();
info_span!("HTTP request", method = %request.method(), host = conn_info.host(), route = %request.path(), query_parameters = %request.query_string(), %user_agent, status_code = Empty, error = Empty) info_span!("HTTP request", method = %request.method(), host = conn_info.host(), route = %request.path(), query_parameters = %request.query_string(), %user_agent, status_code = Empty, error = Empty)
@ -300,15 +301,15 @@ fn open_or_create_database_unchecked(
dumps_path: opt.dump_dir.clone(), dumps_path: opt.dump_dir.clone(),
webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()), webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()),
webhook_authorization_header: opt.task_webhook_authorization_header.clone(), webhook_authorization_header: opt.task_webhook_authorization_header.clone(),
task_db_size: opt.max_task_db_size.get_bytes() as usize, task_db_size: opt.max_task_db_size.as_u64() as usize,
index_base_map_size: opt.max_index_size.get_bytes() as usize, index_base_map_size: opt.max_index_size.as_u64() as usize,
enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage, enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage,
indexer_config: (&opt.indexer_options).try_into()?, indexer_config: (&opt.indexer_options).try_into()?,
autobatching_enabled: true, autobatching_enabled: true,
cleanup_enabled: !opt.experimental_replication_parameters, cleanup_enabled: !opt.experimental_replication_parameters,
max_number_of_tasks: 1_000_000, max_number_of_tasks: 1_000_000,
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks, max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().get_bytes() as usize, index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().as_u64() as usize,
index_count: DEFAULT_INDEX_COUNT, index_count: DEFAULT_INDEX_COUNT,
instance_features, instance_features,
})?) })?)
@ -476,7 +477,7 @@ pub fn configure_data(
opt.experimental_search_queue_size, opt.experimental_search_queue_size,
available_parallelism().unwrap_or(NonZeroUsize::new(2).unwrap()), available_parallelism().unwrap_or(NonZeroUsize::new(2).unwrap()),
); );
let http_payload_size_limit = opt.http_payload_size_limit.get_bytes() as usize; let http_payload_size_limit = opt.http_payload_size_limit.as_u64() as usize;
config config
.app_data(index_scheduler) .app_data(index_scheduler)
.app_data(auth) .app_data(auth)

View File

@ -9,7 +9,7 @@ use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use std::{env, fmt, fs}; use std::{env, fmt, fs};
use byte_unit::{Byte, ByteError}; use byte_unit::{Byte, ParseError, UnitType};
use clap::Parser; use clap::Parser;
use meilisearch_types::features::InstanceTogglableFeatures; use meilisearch_types::features::InstanceTogglableFeatures;
use meilisearch_types::milli::update::IndexerConfig; use meilisearch_types::milli::update::IndexerConfig;
@ -674,7 +674,7 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
Ok(Self { Ok(Self {
log_every_n: Some(DEFAULT_LOG_EVERY_N), log_every_n: Some(DEFAULT_LOG_EVERY_N),
max_memory: other.max_indexing_memory.map(|b| b.get_bytes() as usize), max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize),
thread_pool: Some(thread_pool), thread_pool: Some(thread_pool),
max_positions_per_attributes: None, max_positions_per_attributes: None,
skip_index_budget: other.skip_index_budget, skip_index_budget: other.skip_index_budget,
@ -688,23 +688,25 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
pub struct MaxMemory(Option<Byte>); pub struct MaxMemory(Option<Byte>);
impl FromStr for MaxMemory { impl FromStr for MaxMemory {
type Err = ByteError; type Err = ParseError;
fn from_str(s: &str) -> Result<MaxMemory, ByteError> { fn from_str(s: &str) -> Result<MaxMemory, Self::Err> {
Byte::from_str(s).map(Some).map(MaxMemory) Byte::from_str(s).map(Some).map(MaxMemory)
} }
} }
impl Default for MaxMemory { impl Default for MaxMemory {
fn default() -> MaxMemory { fn default() -> MaxMemory {
MaxMemory(total_memory_bytes().map(|bytes| bytes * 2 / 3).map(Byte::from_bytes)) MaxMemory(total_memory_bytes().map(|bytes| bytes * 2 / 3).map(Byte::from_u64))
} }
} }
impl fmt::Display for MaxMemory { impl fmt::Display for MaxMemory {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.0 { match self.0 {
Some(memory) => write!(f, "{}", memory.get_appropriate_unit(true)), Some(memory) => {
write!(f, "{}", memory.get_appropriate_unit(UnitType::Binary))
}
None => f.write_str("unknown"), None => f.write_str("unknown"),
} }
} }
@ -844,11 +846,11 @@ fn default_env() -> String {
} }
fn default_max_index_size() -> Byte { fn default_max_index_size() -> Byte {
Byte::from_bytes(INDEX_SIZE) Byte::from_u64(INDEX_SIZE)
} }
fn default_max_task_db_size() -> Byte { fn default_max_task_db_size() -> Byte {
Byte::from_bytes(TASK_DB_SIZE) Byte::from_u64(TASK_DB_SIZE)
} }
fn default_http_payload_size_limit() -> Byte { fn default_http_payload_size_limit() -> Byte {

View File

@ -615,6 +615,8 @@ fn some_documents<'a, 't: 'a>(
document.remove("_vectors"); document.remove("_vectors");
} }
RetrieveVectors::Retrieve => { RetrieveVectors::Retrieve => {
// Clippy is simply wrong
#[allow(clippy::manual_unwrap_or_default)]
let mut vectors = match document.remove("_vectors") { let mut vectors = match document.remove("_vectors") {
Some(Value::Object(map)) => map, Some(Value::Object(map)) => map,
_ => Default::default(), _ => Default::default(),

View File

@ -1150,6 +1150,8 @@ fn make_hits(
permissive_json_pointer::select_values(&displayed_document, attributes_to_retrieve); permissive_json_pointer::select_values(&displayed_document, attributes_to_retrieve);
if retrieve_vectors == RetrieveVectors::Retrieve { if retrieve_vectors == RetrieveVectors::Retrieve {
// Clippy is wrong
#[allow(clippy::manual_unwrap_or_default)]
let mut vectors = match document.remove("_vectors") { let mut vectors = match document.remove("_vectors") {
Some(Value::Object(map)) => map, Some(Value::Object(map)) => map,
_ => Default::default(), _ => Default::default(),

View File

@ -1,5 +1,5 @@
use actix_web::http::StatusCode;
use actix_web::test; use actix_web::test;
use http::StatusCode;
use jsonwebtoken::{EncodingKey, Header}; use jsonwebtoken::{EncodingKey, Header};
use meili_snap::*; use meili_snap::*;
use uuid::Uuid; use uuid::Uuid;

View File

@ -6,7 +6,7 @@ use std::time::Duration;
use actix_http::body::MessageBody; use actix_http::body::MessageBody;
use actix_web::dev::ServiceResponse; use actix_web::dev::ServiceResponse;
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use byte_unit::{Byte, ByteUnit}; use byte_unit::{Byte, Unit};
use clap::Parser; use clap::Parser;
use meilisearch::option::{IndexerOpts, MaxMemory, Opt}; use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
use meilisearch::{analytics, create_app, setup_meilisearch, SubscriberForSecondLayer}; use meilisearch::{analytics, create_app, setup_meilisearch, SubscriberForSecondLayer};
@ -231,9 +231,9 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
env: "development".to_owned(), env: "development".to_owned(),
#[cfg(feature = "analytics")] #[cfg(feature = "analytics")]
no_analytics: true, no_analytics: true,
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(), max_index_size: Byte::from_u64_with_unit(100, Unit::MiB).unwrap(),
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(), max_task_db_size: Byte::from_u64_with_unit(1, Unit::GiB).unwrap(),
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(), http_payload_size_limit: Byte::from_u64_with_unit(10, Unit::MiB).unwrap(),
snapshot_dir: ".".into(), snapshot_dir: ".".into(),
indexer_options: IndexerOpts { indexer_options: IndexerOpts {
// memory has to be unlimited because several meilisearch are running in test context. // memory has to be unlimited because several meilisearch are running in test context.

View File

@ -2274,7 +2274,7 @@ async fn error_add_documents_payload_size() {
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###" @r###"
{ {
"message": "The provided payload reached the size limit. The maximum accepted payload size is 10.00 MiB.", "message": "The provided payload reached the size limit. The maximum accepted payload size is 10 MiB.",
"code": "payload_too_large", "code": "payload_too_large",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#payload_too_large" "link": "https://docs.meilisearch.com/errors#payload_too_large"

View File

@ -1,5 +1,5 @@
use actix_web::http::header::ACCEPT_ENCODING;
use actix_web::test; use actix_web::test;
use http::header::ACCEPT_ENCODING;
use meili_snap::*; use meili_snap::*;
use urlencoding::encode as urlencode; use urlencoding::encode as urlencode;

View File

@ -1,6 +1,5 @@
use actix_web::http::header::ContentType; use actix_web::http::header::{ContentType, ACCEPT_ENCODING};
use actix_web::test; use actix_web::test;
use http::header::ACCEPT_ENCODING;
use meili_snap::{json_string, snapshot}; use meili_snap::{json_string, snapshot};
use meilisearch::Opt; use meilisearch::Opt;

View File

@ -9,11 +9,11 @@ edition.workspace = true
license.workspace = true license.workspace = true
[dependencies] [dependencies]
anyhow = "1.0.79" anyhow = "1.0.86"
clap = { version = "4.4.17", features = ["derive"] } clap = { version = "4.5.9", features = ["derive"] }
dump = { path = "../dump" } dump = { path = "../dump" }
file-store = { path = "../file-store" } file-store = { path = "../file-store" }
meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" } meilisearch-types = { path = "../meilisearch-types" }
time = { version = "0.3.31", features = ["formatting"] } time = { version = "0.3.36", features = ["formatting"] }
uuid = { version = "1.6.1", features = ["v4"], default-features = false } uuid = { version = "1.10.0", features = ["v4"], default-features = false }

View File

@ -14,81 +14,80 @@ license.workspace = true
[dependencies] [dependencies]
bimap = { version = "0.6.3", features = ["serde"] } bimap = { version = "0.6.3", features = ["serde"] }
bincode = "1.3.3" bincode = "1.3.3"
bstr = "1.9.0" bstr = "1.9.1"
bytemuck = { version = "1.14.0", features = ["extern_crate_alloc"] } bytemuck = { version = "1.16.1", features = ["extern_crate_alloc"] }
byteorder = "1.5.0" byteorder = "1.5.0"
charabia = { version = "0.8.11", default-features = false } charabia = { version = "0.8.12", default-features = false }
concat-arrays = "0.1.2" concat-arrays = "0.1.2"
crossbeam-channel = "0.5.11" crossbeam-channel = "0.5.13"
deserr = "0.6.1" deserr = "0.6.2"
either = { version = "1.9.0", features = ["serde"] } either = { version = "1.13.0", features = ["serde"] }
flatten-serde-json = { path = "../flatten-serde-json" } flatten-serde-json = { path = "../flatten-serde-json" }
fst = "0.4.7" fst = "0.4.7"
fxhash = "0.2.1" fxhash = "0.2.1"
geoutils = "0.5.1" geoutils = "0.5.1"
grenad = { version = "0.4.6", default-features = false, features = [ grenad = { version = "0.4.7", default-features = false, features = [
"rayon", "rayon",
"tempfile", "tempfile",
] } ] }
heed = { version = "0.20.1", default-features = false, features = [ heed = { version = "0.20.3", default-features = false, features = [
"serde-json", "serde-json",
"serde-bincode", "serde-bincode",
"read-txn-no-tls", "read-txn-no-tls",
] } ] }
indexmap = { version = "2.1.0", features = ["serde"] } indexmap = { version = "2.2.6", features = ["serde"] }
json-depth-checker = { path = "../json-depth-checker" } json-depth-checker = { path = "../json-depth-checker" }
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] } levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
memmap2 = "0.7.1" memmap2 = "0.9.4"
obkv = "0.2.1" obkv = "0.2.2"
once_cell = "1.19.0" once_cell = "1.19.0"
ordered-float = "4.2.0" ordered-float = "4.2.1"
rand_pcg = { version = "0.3.1", features = ["serde1"] } rayon = "1.10.0"
rayon = "1.8.0" roaring = { version = "0.10.6", features = ["serde"] }
roaring = { version = "0.10.2", features = ["serde"] } rstar = { version = "0.12.0", features = ["serde"] }
rstar = { version = "0.11.0", features = ["serde"] } serde = { version = "1.0.204", features = ["derive"] }
serde = { version = "1.0.195", features = ["derive"] } serde_json = { version = "1.0.120", features = ["preserve_order"] }
serde_json = { version = "1.0.111", features = ["preserve_order"] }
slice-group-by = "0.3.1" slice-group-by = "0.3.1"
smallstr = { version = "0.3.0", features = ["serde"] } smallstr = { version = "0.3.0", features = ["serde"] }
smallvec = "1.12.0" smallvec = "1.13.2"
smartstring = "1.0.1" smartstring = "1.0.1"
tempfile = "3.9.0" tempfile = "3.10.1"
thiserror = "1.0.56" thiserror = "1.0.61"
time = { version = "0.3.31", features = [ time = { version = "0.3.36", features = [
"serde-well-known", "serde-well-known",
"formatting", "formatting",
"parsing", "parsing",
"macros", "macros",
] } ] }
uuid = { version = "1.6.1", features = ["v4"] } uuid = { version = "1.10.0", features = ["v4"] }
filter-parser = { path = "../filter-parser" } filter-parser = { path = "../filter-parser" }
# documents words self-join # documents words self-join
itertools = "0.11.0" itertools = "0.13.0"
csv = "1.3.0" csv = "1.3.0"
candle-core = { version = "0.4.1" } candle-core = { version = "0.6.0" }
candle-transformers = { version = "0.4.1" } candle-transformers = { version = "0.6.0" }
candle-nn = { version = "0.4.1" } candle-nn = { version = "0.6.0" }
tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default-features = false, features = [ tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default-features = false, features = [
"onig", "onig",
] } ] }
hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default-features = false, features = [ hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default-features = false, features = [
"online", "online",
] } ] }
tiktoken-rs = "0.5.8" tiktoken-rs = "0.5.9"
liquid = "0.26.4" liquid = "0.26.6"
arroy = "0.4.0" arroy = "0.4.0"
rand = "0.8.5" rand = "0.8.5"
tracing = "0.1.40" tracing = "0.1.40"
ureq = { version = "2.9.7", features = ["json"] } ureq = { version = "2.10.0", features = ["json"] }
url = "2.5.0" url = "2.5.2"
[dev-dependencies] [dev-dependencies]
mimalloc = { version = "0.1.39", default-features = false } mimalloc = { version = "0.1.43", default-features = false }
big_s = "1.0.2" big_s = "1.0.2"
insta = "1.34.0" insta = "1.39.0"
maplit = "1.0.2" maplit = "1.0.2"
md5 = "0.7.0" md5 = "0.7.0"
meili-snap = { path = "../meili-snap" } meili-snap = { path = "../meili-snap" }

View File

@ -25,7 +25,7 @@ impl RoaringBitmapLenCodec {
} }
}; };
if size > u16::max_value() as usize + 1 { if size > u16::MAX as usize + 1 {
return Err(io::Error::new(io::ErrorKind::Other, "size is greater than supported")); return Err(io::Error::new(io::ErrorKind::Other, "size is greater than supported"));
} }

View File

@ -2120,7 +2120,7 @@ pub(crate) mod tests {
.filter(Filter::from_str("_geoBoundingBox([-80, 0], [80, 0])").unwrap().unwrap()) .filter(Filter::from_str("_geoBoundingBox([-80, 0], [80, 0])").unwrap().unwrap())
.execute() .execute()
.unwrap_err(); .unwrap_err();
insta::assert_display_snapshot!( insta::assert_snapshot!(
error, error,
@r###" @r###"
The top latitude `-80` is below the bottom latitude `80`. The top latitude `-80` is below the bottom latitude `80`.
@ -2133,7 +2133,7 @@ pub(crate) mod tests {
.filter(Filter::from_str("_geoBoundingBox([-10, 0], [10, 0])").unwrap().unwrap()) .filter(Filter::from_str("_geoBoundingBox([-10, 0], [10, 0])").unwrap().unwrap())
.execute() .execute()
.unwrap_err(); .unwrap_err();
insta::assert_display_snapshot!( insta::assert_snapshot!(
error, error,
@r###" @r###"
The top latitude `-10` is below the bottom latitude `10`. The top latitude `-10` is below the bottom latitude `10`.
@ -2715,7 +2715,7 @@ pub(crate) mod tests {
documents!({ "id" : "doggo", "_geo": { "lat": 1, "lng": 2, "doggo": "are the best" }}), documents!({ "id" : "doggo", "_geo": { "lat": 1, "lng": 2, "doggo": "are the best" }}),
) )
.unwrap_err(); .unwrap_err();
insta::assert_display_snapshot!(err, @r###"The `_geo` field in the document with the id: `"\"doggo\""` contains the following unexpected fields: `{"doggo":"are the best"}`."###); insta::assert_snapshot!(err, @r###"The `_geo` field in the document with the id: `"\"doggo\""` contains the following unexpected fields: `{"doggo":"are the best"}`."###);
db_snap!(index, geo_faceted_documents_ids); // ensure that no documents were inserted db_snap!(index, geo_faceted_documents_ids); // ensure that no documents were inserted
@ -2725,7 +2725,7 @@ pub(crate) mod tests {
documents!({ "id" : "doggo", "_geo": { "lat": 1, "lng": 2, "doggo": "are the best", "and": { "all": ["cats", { "are": "beautiful" } ] } } }), documents!({ "id" : "doggo", "_geo": { "lat": 1, "lng": 2, "doggo": "are the best", "and": { "all": ["cats", { "are": "beautiful" } ] } } }),
) )
.unwrap_err(); .unwrap_err();
insta::assert_display_snapshot!(err, @r###"The `_geo` field in the document with the id: `"\"doggo\""` contains the following unexpected fields: `{"and":{"all":["cats",{"are":"beautiful"}]},"doggo":"are the best"}`."###); insta::assert_snapshot!(err, @r###"The `_geo` field in the document with the id: `"\"doggo\""` contains the following unexpected fields: `{"and":{"all":["cats",{"are":"beautiful"}]},"doggo":"are the best"}`."###);
db_snap!(index, geo_faceted_documents_ids); // ensure that no documents were inserted db_snap!(index, geo_faceted_documents_ids); // ensure that no documents were inserted
} }

View File

@ -316,7 +316,7 @@ impl QueryGraph {
term_docids term_docids
.into_iter() .into_iter()
.map(|(idx, docids)| match docids.len() { .map(|(idx, docids)| match docids.len() {
0 => (idx, u64::max_value()), 0 => (idx, u64::MAX),
frequency => (idx, frequency), frequency => (idx, frequency),
}) })
.collect() .collect()

View File

@ -15,7 +15,7 @@ impl AvailableDocumentsIds {
available -= docids; available -= docids;
let iter = match last_id.checked_add(1) { let iter = match last_id.checked_add(1) {
Some(id) => id..=u32::max_value(), Some(id) => id..=u32::MAX,
#[allow(clippy::reversed_empty_ranges)] #[allow(clippy::reversed_empty_ranges)]
None => 1..=0, // empty range iterator None => 1..=0, // empty range iterator
}; };
@ -24,7 +24,7 @@ impl AvailableDocumentsIds {
} }
None => { None => {
let empty = RoaringBitmap::new().into_iter(); let empty = RoaringBitmap::new().into_iter();
AvailableDocumentsIds { iter: empty.chain(0..=u32::max_value()) } AvailableDocumentsIds { iter: empty.chain(0..=u32::MAX) }
} }
} }
} }
@ -46,7 +46,7 @@ mod tests {
fn empty() { fn empty() {
let base = RoaringBitmap::new(); let base = RoaringBitmap::new();
let left = AvailableDocumentsIds::from_documents_ids(&base); let left = AvailableDocumentsIds::from_documents_ids(&base);
let right = 0..=u32::max_value(); let right = 0..=u32::MAX;
left.zip(right).take(500).for_each(|(l, r)| assert_eq!(l, r)); left.zip(right).take(500).for_each(|(l, r)| assert_eq!(l, r));
} }
@ -59,7 +59,7 @@ mod tests {
base.insert(405); base.insert(405);
let left = AvailableDocumentsIds::from_documents_ids(&base); let left = AvailableDocumentsIds::from_documents_ids(&base);
let right = (0..=u32::max_value()).filter(|&n| n != 0 && n != 10 && n != 100 && n != 405); let right = (0..=u32::MAX).filter(|&n| n != 0 && n != 10 && n != 100 && n != 405);
left.zip(right).take(500).for_each(|(l, r)| assert_eq!(l, r)); left.zip(right).take(500).for_each(|(l, r)| assert_eq!(l, r));
} }
} }

View File

@ -2229,10 +2229,10 @@ mod tests {
{ "id": 3, "name": "jean", "age": 25 }, { "id": 3, "name": "jean", "age": 25 },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"3"); insta::assert_snapshot!(added.unwrap(), @"3");
let (builder, removed) = builder.remove_documents(vec![S("2")]).unwrap(); let (builder, removed) = builder.remove_documents(vec![S("2")]).unwrap();
insta::assert_display_snapshot!(removed.unwrap(), @"1"); insta::assert_snapshot!(removed.unwrap(), @"1");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2271,17 +2271,17 @@ mod tests {
{ "id": 3, "name": "jean", "age": 25 }, { "id": 3, "name": "jean", "age": 25 },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"3"); insta::assert_snapshot!(added.unwrap(), @"3");
let documents = documents!([ let documents = documents!([
{ "id": 2, "catto": "jorts" }, { "id": 2, "catto": "jorts" },
{ "id": 3, "legs": 4 }, { "id": 3, "legs": 4 },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"2"); insta::assert_snapshot!(added.unwrap(), @"2");
let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap(); let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap();
insta::assert_display_snapshot!(removed.unwrap(), @"2"); insta::assert_snapshot!(removed.unwrap(), @"2");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2319,7 +2319,7 @@ mod tests {
{ "id": 3, "name": "jean", "age": 25 }, { "id": 3, "name": "jean", "age": 25 },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"3"); insta::assert_snapshot!(added.unwrap(), @"3");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2354,10 +2354,10 @@ mod tests {
{ "id": 3, "legs": 4 }, { "id": 3, "legs": 4 },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"2"); insta::assert_snapshot!(added.unwrap(), @"2");
let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap(); let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap();
insta::assert_display_snapshot!(removed.unwrap(), @"2"); insta::assert_snapshot!(removed.unwrap(), @"2");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2390,14 +2390,14 @@ mod tests {
.unwrap(); .unwrap();
let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap(); let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap();
insta::assert_display_snapshot!(removed.unwrap(), @"0"); insta::assert_snapshot!(removed.unwrap(), @"0");
let documents = documents!([ let documents = documents!([
{ "id": 2, "doggo": { "name": "jean", "age": 20 } }, { "id": 2, "doggo": { "name": "jean", "age": 20 } },
{ "id": 3, "name": "bob", "age": 25 }, { "id": 3, "name": "bob", "age": 25 },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"2"); insta::assert_snapshot!(added.unwrap(), @"2");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2432,7 +2432,7 @@ mod tests {
let (builder, removed) = let (builder, removed) =
builder.remove_documents(vec![S("1"), S("2"), S("1"), S("2")]).unwrap(); builder.remove_documents(vec![S("1"), S("2"), S("1"), S("2")]).unwrap();
insta::assert_display_snapshot!(removed.unwrap(), @"0"); insta::assert_snapshot!(removed.unwrap(), @"0");
let documents = documents!([ let documents = documents!([
{ "id": 1, "doggo": "kevin" }, { "id": 1, "doggo": "kevin" },
@ -2440,11 +2440,11 @@ mod tests {
{ "id": 3, "name": "bob", "age": 25 }, { "id": 3, "name": "bob", "age": 25 },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"3"); insta::assert_snapshot!(added.unwrap(), @"3");
let (builder, removed) = let (builder, removed) =
builder.remove_documents(vec![S("1"), S("2"), S("1"), S("2")]).unwrap(); builder.remove_documents(vec![S("1"), S("2"), S("1"), S("2")]).unwrap();
insta::assert_display_snapshot!(removed.unwrap(), @"2"); insta::assert_snapshot!(removed.unwrap(), @"2");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2480,7 +2480,7 @@ mod tests {
{ "id": 1, "doggo": "kevin" }, { "id": 1, "doggo": "kevin" },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"1"); insta::assert_snapshot!(added.unwrap(), @"1");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2509,13 +2509,13 @@ mod tests {
.unwrap(); .unwrap();
let (builder, removed) = builder.remove_documents(vec![S("1")]).unwrap(); let (builder, removed) = builder.remove_documents(vec![S("1")]).unwrap();
insta::assert_display_snapshot!(removed.unwrap(), @"1"); insta::assert_snapshot!(removed.unwrap(), @"1");
let documents = documents!([ let documents = documents!([
{ "id": 1, "catto": "jorts" }, { "id": 1, "catto": "jorts" },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"1"); insta::assert_snapshot!(added.unwrap(), @"1");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2692,7 +2692,7 @@ mod tests {
{ "id": 1, "doggo": "bernese" }, { "id": 1, "doggo": "bernese" },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"1"); insta::assert_snapshot!(added.unwrap(), @"1");
// FINISHING // FINISHING
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
@ -2731,13 +2731,13 @@ mod tests {
.unwrap(); .unwrap();
let (builder, removed) = builder.remove_documents(vec![S("1")]).unwrap(); let (builder, removed) = builder.remove_documents(vec![S("1")]).unwrap();
insta::assert_display_snapshot!(removed.unwrap(), @"1"); insta::assert_snapshot!(removed.unwrap(), @"1");
let documents = documents!([ let documents = documents!([
{ "id": 0, "catto": "jorts" }, { "id": 0, "catto": "jorts" },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"1"); insta::assert_snapshot!(added.unwrap(), @"1");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"
@ -2777,7 +2777,7 @@ mod tests {
{ "id": 1, "catto": "jorts" }, { "id": 1, "catto": "jorts" },
]); ]);
let (builder, added) = builder.add_documents(documents).unwrap(); let (builder, added) = builder.add_documents(documents).unwrap();
insta::assert_display_snapshot!(added.unwrap(), @"1"); insta::assert_snapshot!(added.unwrap(), @"1");
let addition = builder.execute().unwrap(); let addition = builder.execute().unwrap();
insta::assert_debug_snapshot!(addition, @r###" insta::assert_debug_snapshot!(addition, @r###"

View File

@ -1,3 +1,3 @@
[toolchain] [toolchain]
channel = "1.75.0" channel = "1.79.0"
components = ["clippy"] components = ["clippy"]

View File

@ -7,17 +7,18 @@ edition = "2021"
[dependencies] [dependencies]
color-spantrace = "0.2.1" color-spantrace = "0.2.1"
fxprof-processed-profile = "0.6.0" fxprof-processed-profile = "0.7.0"
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.204", features = ["derive"] }
serde_json = "1.0.111" serde_json = "1.0.120"
tracing = "0.1.40" tracing = "0.1.40"
tracing-error = "0.2.0" tracing-error = "0.2.0"
tracing-subscriber = "0.3.18" tracing-subscriber = "0.3.18"
byte-unit = { version = "4.0.19", default-features = false, features = [ byte-unit = { version = "5.1.4", default-features = false, features = [
"std", "std",
"byte",
"serde", "serde",
] } ] }
tokio = { version = "1.35.1", features = ["sync"] } tokio = { version = "1.38.0", features = ["sync"] }
[target.'cfg(any(target_os = "linux", target_os = "macos"))'.dependencies] [target.'cfg(any(target_os = "linux", target_os = "macos"))'.dependencies]
libproc = "0.14.2" libproc = "0.14.8"

View File

@ -1,9 +1,10 @@
use std::collections::HashMap; use std::collections::HashMap;
use fxprof_processed_profile::{ use fxprof_processed_profile::{
CategoryPairHandle, CounterHandle, CpuDelta, Frame, FrameFlags, FrameInfo, MarkerDynamicField, CategoryHandle, CategoryPairHandle, CounterHandle, CpuDelta, Frame, FrameFlags, FrameInfo,
MarkerFieldFormat, MarkerLocation, MarkerSchema, MarkerSchemaField, ProcessHandle, Profile, MarkerDynamicField, MarkerFieldFormat, MarkerLocation, MarkerSchema, MarkerSchemaField,
ProfilerMarker, ReferenceTimestamp, SamplingInterval, StringHandle, Timestamp, ProcessHandle, Profile, ProfilerMarker, ReferenceTimestamp, SamplingInterval, StringHandle,
Timestamp,
}; };
use serde_json::json; use serde_json::json;
@ -129,6 +130,7 @@ pub fn to_firefox_profile<R: std::io::Read>(
profile.add_marker_with_stack( profile.add_marker_with_stack(
*thread_handle, *thread_handle,
CategoryHandle::OTHER,
&callsite.name, &callsite.name,
marker, marker,
fxprof_processed_profile::MarkerTiming::Interval( fxprof_processed_profile::MarkerTiming::Interval(
@ -179,6 +181,7 @@ pub fn to_firefox_profile<R: std::io::Read>(
profile.add_marker_with_stack( profile.add_marker_with_stack(
*thread_handle, *thread_handle,
CategoryHandle::OTHER,
&callsite.name, &callsite.name,
marker, marker,
fxprof_processed_profile::MarkerTiming::Instant(timestamp), fxprof_processed_profile::MarkerTiming::Instant(timestamp),

View File

@ -1,6 +1,8 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Read; use std::io::Read;
use byte_unit::UnitType;
use crate::entry::{ use crate::entry::{
Entry, Event, MemoryStats, NewCallsite, NewSpan, NewThread, ResourceId, SpanClose, SpanEnter, Entry, Event, MemoryStats, NewCallsite, NewSpan, NewThread, ResourceId, SpanClose, SpanEnter,
SpanExit, SpanId, SpanExit, SpanId,
@ -190,6 +192,6 @@ fn print_duration(duration: std::time::Duration) -> String {
/// Format only the allocated bytes, deallocated bytes and reallocated bytes in GiB, MiB, KiB, Bytes. /// Format only the allocated bytes, deallocated bytes and reallocated bytes in GiB, MiB, KiB, Bytes.
fn print_memory(MemoryStats { resident }: MemoryStats) -> String { fn print_memory(MemoryStats { resident }: MemoryStats) -> String {
use byte_unit::Byte; use byte_unit::Byte;
let rss_bytes = Byte::from_bytes(resident).get_appropriate_unit(true); let rss_bytes = Byte::from_u64(resident).get_appropriate_unit(UnitType::Binary);
format!("RSS {rss_bytes:.2}") format!("RSS {rss_bytes:.2}")
} }

View File

@ -11,27 +11,27 @@ license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
anyhow = "1.0.79" anyhow = "1.0.86"
build-info = { version = "1.7.0", path = "../build-info" } build-info = { version = "1.7.0", path = "../build-info" }
cargo_metadata = "0.18.1" cargo_metadata = "0.18.1"
clap = { version = "4.4.14", features = ["derive"] } clap = { version = "4.5.9", features = ["derive"] }
futures-core = "0.3.30" futures-core = "0.3.30"
futures-util = "0.3.30" futures-util = "0.3.30"
reqwest = { version = "0.11.23", features = [ reqwest = { version = "0.12.5", features = [
"stream", "stream",
"json", "json",
"rustls-tls", "rustls-tls",
], default-features = false } ], default-features = false }
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.204", features = ["derive"] }
serde_json = "1.0.111" serde_json = "1.0.120"
sha2 = "0.10.8" sha2 = "0.10.8"
sysinfo = "0.30.5" sysinfo = "0.30.13"
time = { version = "0.3.32", features = [ time = { version = "0.3.36", features = [
"serde", "serde",
"serde-human-readable", "serde-human-readable",
"macros", "macros",
] } ] }
tokio = { version = "1.35.1", features = [ tokio = { version = "1.38.0", features = [
"rt", "rt",
"net", "net",
"time", "time",
@ -41,4 +41,4 @@ tokio = { version = "1.35.1", features = [
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = "0.3.18" tracing-subscriber = "0.3.18"
tracing-trace = { version = "0.1.0", path = "../tracing-trace" } tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
uuid = { version = "1.7.0", features = ["v7", "serde"] } uuid = { version = "1.10.0", features = ["v7", "serde"] }