59 lines
1.6 KiB
TOML
Raw Normal View History

[package]
name = "meilisearch-types"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
2024-01-16 15:05:03 +01:00
actix-web = { version = "4.4.1", default-features = false }
anyhow = "1.0.79"
convert_case = "0.6.0"
2024-01-16 15:05:03 +01:00
csv = "1.3.0"
deserr = { version = "0.6.1", features = ["actix-web"] }
either = { version = "1.9.0", features = ["serde"] }
enum-iterator = "1.5.0"
file-store = { path = "../file-store" }
2024-01-16 15:05:03 +01:00
flate2 = "1.0.28"
2022-10-13 15:02:59 +02:00
fst = "0.4.7"
2023-08-09 12:02:13 +02:00
memmap2 = "0.7.1"
milli = { path = "../milli" }
2024-01-16 15:05:03 +01:00
roaring = { version = "0.10.2", features = ["serde"] }
serde = { version = "1.0.195", features = ["derive"] }
serde-cs = "0.2.4"
2024-01-16 15:05:03 +01:00
serde_json = "1.0.111"
tar = "0.4.40"
tempfile = "3.9.0"
thiserror = "1.0.56"
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] }
tokio = "1.35"
uuid = { version = "1.6.1", features = ["serde", "v4"] }
[dev-dependencies]
2024-01-16 15:05:03 +01:00
insta = "1.34.0"
2022-10-20 18:03:35 +02:00
meili-snap = { path = "../meili-snap" }
[features]
2022-10-20 17:27:15 +02:00
# all specialized tokenizations
all-tokenizations = ["milli/all-tokenizations"]
2022-10-20 17:27:15 +02:00
# chinese specialized tokenization
chinese = ["milli/chinese"]
2022-10-20 17:27:15 +02:00
# hebrew specialized tokenization
hebrew = ["milli/hebrew"]
2022-10-20 17:27:15 +02:00
# japanese specialized tokenization
japanese = ["milli/japanese"]
2022-10-20 17:27:15 +02:00
# thai specialized tokenization
thai = ["milli/thai"]
2023-04-26 14:58:32 +02:00
# allow greek specialized tokenization
greek = ["milli/greek"]
2023-10-26 17:01:10 +02:00
# allow khmer specialized tokenization
khmer = ["milli/khmer"]
2024-01-25 18:58:52 +01:00
# allow vietnamese specialized tokenization
vietnamese = ["milli/vietnamese"]