mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-23 21:20:24 +01:00
commit
b215e9e848
@ -1,5 +1,6 @@
|
||||
## v0.10.2
|
||||
|
||||
- Add support for faceted search (#631)
|
||||
- Add support for configuring the lmdb map size (#646, #647)
|
||||
- Add exposed port for Dockerfile (#654)
|
||||
- Add sentry probe
|
||||
|
196
Cargo.lock
generated
196
Cargo.lock
generated
@ -138,15 +138,16 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "actix-rt"
|
||||
version = "1.1.0"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20066d9200ef8d441ac156c76dd36c3f1e9a15976c34e69ae97f7f570b331882"
|
||||
checksum = "143fcc2912e0d1de2bcf4e2f720d2a60c28652ab4179685a1ee159e0fb3db227"
|
||||
dependencies = [
|
||||
"actix-macros",
|
||||
"actix-threadpool",
|
||||
"copyless",
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
"smallvec",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
@ -309,12 +310,9 @@ checksum = "5d2e7343e7fc9de883d1b0341e0b13970f764c14101234857d2ddafa1cb1cac2"
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.3.3"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35b909d1c126f78ace756fc337133356c499eebeefcce930fa5fb018823f2b2d"
|
||||
dependencies = [
|
||||
"const-random",
|
||||
]
|
||||
checksum = "9c251dce3391a07b43218ca070203ecb8f9f520d35ab71312296a59dbceab154"
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
@ -436,9 +434,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "backtrace-sys"
|
||||
version = "0.1.36"
|
||||
version = "0.1.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78848718ee1255a2485d1309ad9cdecfc2e7d0362dd11c6829364c6b35ae1bc7"
|
||||
checksum = "18fbebbe1c9d1f383a9cc7e8ccdb471b91c8d024ee9c2ca5b5346121fe8b4399"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
@ -452,9 +450,9 @@ checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.12.0"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d5ca2cd0adc3f48f9e9ea5a6bbdf9ccc0bfade884847e484d452414c7ccffb3"
|
||||
checksum = "53d1ccbaf7d9ec9537465a97bf19edc1a4e158ecb49fc16178202238c569cc42"
|
||||
|
||||
[[package]]
|
||||
name = "bincode"
|
||||
@ -646,26 +644,6 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ab08c5bed92075075d5db5149887a477b2dc0318c40882a0dfbd34315ac6141"
|
||||
|
||||
[[package]]
|
||||
name = "const-random"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f1af9ac737b2dd2d577701e59fd09ba34822f6f2ebdb30a7647405d9e55e16a"
|
||||
dependencies = [
|
||||
"const-random-macro",
|
||||
"proc-macro-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "const-random-macro"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25e4c606eb459dd29f7c57b2e0879f2b6f14ee130918c2b78ccb58a9624e6c7a"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"proc-macro-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "copyless"
|
||||
version = "0.1.4"
|
||||
@ -688,6 +666,12 @@ version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3a71ab494c0b5b860bdc8407ae08978052417070c2ced38573a9157ad75b8ac"
|
||||
|
||||
[[package]]
|
||||
name = "cow-utils"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79bb3adfaf5f75d24b01aee375f7555907840fa2800e5ec8fa3b9e2031830173"
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.2.0"
|
||||
@ -699,16 +683,16 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "criterion"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fc755679c12bda8e5523a71e4d654b6bf2e14bd838dfc48cde6559a05caf7d1"
|
||||
checksum = "63f696897c88b57f4ffe3c69d8e1a0613c7d0e6c4833363c8560fbde9c47b966"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"cast",
|
||||
"clap",
|
||||
"criterion-plot",
|
||||
"csv",
|
||||
"itertools 0.8.2",
|
||||
"itertools",
|
||||
"lazy_static",
|
||||
"num-traits",
|
||||
"oorandom",
|
||||
@ -724,12 +708,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "criterion-plot"
|
||||
version = "0.4.1"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a01e15e0ea58e8234f96146b1f91fa9d0e4dd7a38da93ff7a75d42c0b9d3a545"
|
||||
checksum = "ddeaf7989f00f2e1d871a26a110f3ed713632feac17f65f03ca938c542618b60"
|
||||
dependencies = [
|
||||
"cast",
|
||||
"itertools 0.8.2",
|
||||
"itertools",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -926,9 +910,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "failure"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8529c2421efa3066a5cbd8063d2244603824daccb6936b079010bb2aa89464b"
|
||||
checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"failure_derive",
|
||||
@ -936,9 +920,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "failure_derive"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "030a733c8287d6213886dd487564ff5c8f6aae10278b3588ed177f9d18f8d231"
|
||||
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -1153,9 +1137,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.2.4"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "377038bf3c89d18d6ca1431e7a5027194fbd724ca10592b9487ede5e8e144f42"
|
||||
checksum = "79b7246d7e4b979c03fa093da39cfb3617a96bbeee6310af63991668d7e843ff"
|
||||
dependencies = [
|
||||
"bytes 0.5.4",
|
||||
"fnv",
|
||||
@ -1172,9 +1156,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.7.1"
|
||||
version = "0.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "479e9d9a1a3f8c489868a935b557ab5710e3e223836da2ecd52901d88935cb56"
|
||||
checksum = "96282e96bfcd3da0d3aa9938bedf1e50df3269b6db08b4876d2da0bb1a0841cf"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"autocfg",
|
||||
@ -1432,9 +1416,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ipconfig"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa79fa216fbe60834a9c0737d7fcd30425b32d1c58854663e24d4c4b328ed83f"
|
||||
checksum = "f7e2f18aece9709094573a9f24f483c4f65caa4298e2f7ae1b71cc65d853fad7"
|
||||
dependencies = [
|
||||
"socket2",
|
||||
"widestring",
|
||||
@ -1442,15 +1426,6 @@ dependencies = [
|
||||
"winreg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484"
|
||||
dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.9.0"
|
||||
@ -1489,9 +1464,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.37"
|
||||
version = "0.3.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a27d435371a2fa5b6d2b028a74bbdb1234f308da363226a2854ca3ff8ba7055"
|
||||
checksum = "fa5a448de267e7358beaf4a5d849518fe9a0c13fce7afd44b06e68550e5562a7"
|
||||
dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
@ -1535,9 +1510,9 @@ checksum = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005"
|
||||
|
||||
[[package]]
|
||||
name = "linked-hash-map"
|
||||
version = "0.5.2"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83"
|
||||
checksum = "8dd5a6d5999d9907cda8ed67bbd137d3af8085216c2ac62de5be860bd41f304a"
|
||||
|
||||
[[package]]
|
||||
name = "lmdb-rkv-sys"
|
||||
@ -1617,17 +1592,19 @@ dependencies = [
|
||||
"byteorder",
|
||||
"chrono",
|
||||
"compact_arena",
|
||||
"cow-utils",
|
||||
"criterion",
|
||||
"crossbeam-channel",
|
||||
"csv",
|
||||
"deunicode",
|
||||
"either",
|
||||
"env_logger",
|
||||
"fst",
|
||||
"hashbrown",
|
||||
"heed",
|
||||
"indexmap",
|
||||
"intervaltree",
|
||||
"itertools 0.9.0",
|
||||
"itertools",
|
||||
"jemallocator",
|
||||
"levenshtein_automata",
|
||||
"log",
|
||||
@ -1708,6 +1685,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"toml",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1768,9 +1746,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.6.21"
|
||||
version = "0.6.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f"
|
||||
checksum = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fuchsia-zircon",
|
||||
@ -1787,9 +1765,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mio-uds"
|
||||
version = "0.6.7"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "966257a94e196b11bb43aca423754d87429960a768de9414f3691d6957abf125"
|
||||
checksum = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0"
|
||||
dependencies = [
|
||||
"iovec",
|
||||
"libc",
|
||||
@ -1828,9 +1806,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "net2"
|
||||
version = "0.2.33"
|
||||
version = "0.2.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
|
||||
checksum = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
@ -1906,9 +1884,9 @@ checksum = "b1c601810575c99596d4afc46f78a678c80105117c379eb3650cf99b8a21ce5b"
|
||||
|
||||
[[package]]
|
||||
name = "oorandom"
|
||||
version = "11.1.0"
|
||||
version = "11.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ebcec7c9c2a95cacc7cd0ecb89d8a8454eca13906f6deb55258ffff0adeb9405"
|
||||
checksum = "94af325bc33c7f60191be4e2c984d48aaa21e2854f473b85398344b60c9b6358"
|
||||
|
||||
[[package]]
|
||||
name = "opaque-debug"
|
||||
@ -2058,18 +2036,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pin-project"
|
||||
version = "0.4.9"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f6a7f5eee6292c559c793430c55c00aea9d3b3d1905e855806ca4d7253426a2"
|
||||
checksum = "82c3bfbfb5bb42f99498c7234bbd768c220eb0cea6818259d0d18a1aa3d2595d"
|
||||
dependencies = [
|
||||
"pin-project-internal",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-internal"
|
||||
version = "0.4.9"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8988430ce790d8682672117bc06dda364c0be32d3abd738234f19f3240bad99a"
|
||||
checksum = "ccbf6449dcfb18562c015526b085b8df1aa3cdab180af8ec2ebd300a3bd28f63"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -2078,9 +2056,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "237844750cfbb86f67afe27eee600dfbbcb6188d734139b534cbfbf4f96792ae"
|
||||
checksum = "f7505eeebd78492e0f6108f7171c4948dbb120ee8119d9d77d0afa5469bef67f"
|
||||
|
||||
[[package]]
|
||||
name = "pin-utils"
|
||||
@ -2096,9 +2074,9 @@ checksum = "05da548ad6865900e60eaba7f589cc0783590a92e940c26953ff81ddbab2d677"
|
||||
|
||||
[[package]]
|
||||
name = "plotters"
|
||||
version = "0.2.12"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e3bb8da247d27ae212529352020f3e5ee16e83c0c258061d27b08ab92675eeb"
|
||||
checksum = "f9b1d9ca091d370ea3a78d5619145d1b59426ab0c9eedbad2514a4cee08bf389"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"num-traits",
|
||||
@ -2162,9 +2140,9 @@ checksum = "8e946095f9d3ed29ec38de908c22f95d9ac008e424c7bcae54c75a79c527c694"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.10"
|
||||
version = "1.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df246d292ff63439fea9bc8c0a270bed0e390d5ebd4db4ba15aba81111b5abe3"
|
||||
checksum = "8872cf6f48eee44265156c111456a700ab3483686b3f96df4cf5481c89157319"
|
||||
dependencies = [
|
||||
"unicode-xid",
|
||||
]
|
||||
@ -2186,9 +2164,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.3"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2bdc6c187c65bca4260c9011c9e3132efe4909da44726bad24cf7572ae338d7f"
|
||||
checksum = "4c1f4b0efa5fc5e8ceb705136bfee52cfdb6a4e3509f770b478cd6ed434232a7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@ -2398,13 +2376,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
version = "0.16.12"
|
||||
version = "0.16.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ba5a8ec64ee89a76c98c549af81ff14813df09c3e6dc4766c3856da48597a0c"
|
||||
checksum = "703516ae74571f24b465b4a1431e81e2ad51336cb0ded733a55a1aa3eccac196"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"spin",
|
||||
"untrusted",
|
||||
"web-sys",
|
||||
@ -2615,9 +2593,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.51"
|
||||
version = "1.0.52"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da07b57ee2623368351e9a0488bb0b261322a15a6e0ae53e243cbdc0f4208da9"
|
||||
checksum = "a7894c8ed05b7a3a279aeb79025fdec1d3158080b75b98a08faf2806bb799edd"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"itoa",
|
||||
@ -2773,9 +2751,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.18"
|
||||
version = "1.0.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "410a7488c0a728c7ceb4ad59b9567eb4053d02e8cc7f5c0e0eeeb39518369213"
|
||||
checksum = "e8e5aa70697bb26ee62214ae3288465ecec0000f05182f039b477001f08f5ae7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -2902,9 +2880,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "0.2.19"
|
||||
version = "0.2.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d9c43f1bb96970e153bcbae39a65e249ccb942bd9d36dbdf086024920417c9c"
|
||||
checksum = "05c1d570eb1a36f0345a5ce9c6c6e665b70b73d11236912c0b477616aeec47b1"
|
||||
dependencies = [
|
||||
"bytes 0.5.4",
|
||||
"fnv",
|
||||
@ -3113,9 +3091,9 @@ checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
version = "0.7.0"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60369ef7a31de49bcb3f6ca728d4ba7300d9a1658f94c727d4cab8c8d9f4aece"
|
||||
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
@ -3123,7 +3101,7 @@ version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd754afd5f60388b4188210c3795392c5f2fd69a1cc947ec4505dbfee955b902"
|
||||
dependencies = [
|
||||
"base64 0.12.0",
|
||||
"base64 0.12.1",
|
||||
"chunked_transfer",
|
||||
"lazy_static",
|
||||
"qstring",
|
||||
@ -3200,9 +3178,9 @@ checksum = "3fc439f2794e98976c88a2a2dafce96b930fe8010b0a256b3c2199a773933168"
|
||||
|
||||
[[package]]
|
||||
name = "vec_map"
|
||||
version = "0.8.1"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
|
||||
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
|
||||
|
||||
[[package]]
|
||||
name = "vergen"
|
||||
@ -3261,9 +3239,9 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.60"
|
||||
version = "0.2.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2cc57ce05287f8376e998cbddfb4c8cb43b84a7ec55cf4551d7c00eef317a47f"
|
||||
checksum = "e3c7d40d09cdbf0f4895ae58cf57d92e1e57a9dd8ed2e8390514b54a47cc5551"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"serde",
|
||||
@ -3273,9 +3251,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.60"
|
||||
version = "0.2.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d967d37bf6c16cca2973ca3af071d0a2523392e4a594548155d89a678f4237cd"
|
||||
checksum = "c3972e137ebf830900db522d6c8fd74d1900dcfc733462e9a12e942b00b4ac94"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"lazy_static",
|
||||
@ -3288,9 +3266,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.10"
|
||||
version = "0.4.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7add542ea1ac7fdaa9dc25e031a6af33b7d63376292bd24140c637d00d1c312a"
|
||||
checksum = "8a369c5e1dfb7569e14d62af4da642a3cbc2f9a3652fe586e26ac22222aa4b04"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
@ -3300,9 +3278,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.60"
|
||||
version = "0.2.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8bd151b63e1ea881bb742cd20e1d6127cef28399558f3b5d415289bc41eee3a4"
|
||||
checksum = "2cd85aa2c579e8892442954685f0d801f9129de24fa2136b2c6a539c76b65776"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@ -3310,9 +3288,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.60"
|
||||
version = "0.2.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d68a5b36eef1be7868f668632863292e37739656a80fc4b9acec7b0bd35a4931"
|
||||
checksum = "8eb197bd3a47553334907ffd2f16507b4f4f01bbec3ac921a7719e0decdfe72a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -3323,15 +3301,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.60"
|
||||
version = "0.2.62"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "daf76fe7d25ac79748a37538b7daeed1c7a6867c92d3245c12c6222e4a20d639"
|
||||
checksum = "a91c2916119c17a8e316507afaaa2dd94b47646048014bbdf6bef098c1bb58ad"
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
version = "0.3.37"
|
||||
version = "0.3.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2d6f51648d8c56c366144378a33290049eafdd784071077f6fe37dae64c1c4cb"
|
||||
checksum = "8bc359e5dd3b46cb9687a051d50a2fdd228e4ba7cf6fcf861a5365c3d671a642"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
|
@ -11,8 +11,10 @@ bincode = "1.2.1"
|
||||
byteorder = "1.3.4"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
compact_arena = "0.4.0"
|
||||
cow-utils = "0.1.2"
|
||||
crossbeam-channel = "0.4.2"
|
||||
deunicode = "1.1.0"
|
||||
either = "1.5.3"
|
||||
env_logger = "0.7.1"
|
||||
fst = { version = "0.3.5", default-features = false }
|
||||
hashbrown = { version = "0.7.1", features = ["serde"] }
|
||||
|
@ -11,7 +11,7 @@ use std::fmt;
|
||||
use compact_arena::{SmallArena, Idx32, mk_arena};
|
||||
use log::debug;
|
||||
use meilisearch_types::DocIndex;
|
||||
use sdset::{Set, SetBuf, exponential_search};
|
||||
use sdset::{Set, SetBuf, exponential_search, SetOperation};
|
||||
use slice_group_by::{GroupBy, GroupByMut};
|
||||
|
||||
use crate::error::Error;
|
||||
@ -28,6 +28,7 @@ pub fn bucket_sort<'c, FI>(
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
facets_docids: Option<SetBuf<DocumentId>>,
|
||||
filter: Option<FI>,
|
||||
criteria: Criteria<'c>,
|
||||
searchable_attrs: Option<ReorderedAttrs>,
|
||||
@ -50,6 +51,7 @@ where
|
||||
reader,
|
||||
query,
|
||||
range,
|
||||
facets_docids,
|
||||
filter,
|
||||
distinct,
|
||||
distinct_size,
|
||||
@ -94,10 +96,17 @@ where
|
||||
let mut queries_kinds = HashMap::new();
|
||||
recurs_operation(&mut queries_kinds, &operation);
|
||||
|
||||
let QueryResult { docids, queries } = traverse_query_tree(reader, &context, &operation)?;
|
||||
let QueryResult { mut docids, queries } = traverse_query_tree(reader, &context, &operation)?;
|
||||
debug!("found {} documents", docids.len());
|
||||
debug!("number of postings {:?}", queries.len());
|
||||
|
||||
if let Some(facets_docids) = facets_docids {
|
||||
let intersection = sdset::duo::OpBuilder::new(docids.as_ref(), facets_docids.as_set())
|
||||
.intersection()
|
||||
.into_set_buf();
|
||||
docids = Cow::Owned(intersection);
|
||||
}
|
||||
|
||||
let before = Instant::now();
|
||||
mk_arena!(arena);
|
||||
let mut bare_matches = cleanup_bare_matches(&mut arena, &docids, queries);
|
||||
@ -179,6 +188,7 @@ pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
facets_docids: Option<SetBuf<DocumentId>>,
|
||||
filter: Option<FI>,
|
||||
distinct: FD,
|
||||
distinct_size: usize,
|
||||
@ -225,10 +235,17 @@ where
|
||||
let mut queries_kinds = HashMap::new();
|
||||
recurs_operation(&mut queries_kinds, &operation);
|
||||
|
||||
let QueryResult { docids, queries } = traverse_query_tree(reader, &context, &operation)?;
|
||||
let QueryResult { mut docids, queries } = traverse_query_tree(reader, &context, &operation)?;
|
||||
debug!("found {} documents", docids.len());
|
||||
debug!("number of postings {:?}", queries.len());
|
||||
|
||||
if let Some(facets_docids) = facets_docids {
|
||||
let intersection = sdset::duo::OpBuilder::new(docids.as_ref(), facets_docids.as_set())
|
||||
.intersection()
|
||||
.into_set_buf();
|
||||
docids = Cow::Owned(intersection);
|
||||
}
|
||||
|
||||
let before = Instant::now();
|
||||
mk_arena!(arena);
|
||||
let mut bare_matches = cleanup_bare_matches(&mut arena, &docids, queries);
|
||||
|
@ -28,7 +28,8 @@ pub enum Error {
|
||||
Serializer(SerializerError),
|
||||
Deserializer(DeserializerError),
|
||||
UnsupportedOperation(UnsupportedOperation),
|
||||
FilterParseError(PestError<Rule>)
|
||||
FilterParseError(PestError<Rule>),
|
||||
FacetError(FacetError),
|
||||
}
|
||||
|
||||
impl From<io::Error> for Error {
|
||||
@ -57,7 +58,13 @@ impl From<PestError<Rule>> for Error {
|
||||
s.to_string()
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FacetError> for Error {
|
||||
fn from(error: FacetError) -> Error {
|
||||
Error::FacetError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_schema::Error> for Error {
|
||||
fn from(error: meilisearch_schema::Error) -> Error {
|
||||
@ -127,6 +134,7 @@ impl fmt::Display for Error {
|
||||
Deserializer(e) => write!(f, "deserializer error; {}", e),
|
||||
UnsupportedOperation(op) => write!(f, "unsupported operation; {}", op),
|
||||
FilterParseError(e) => write!(f, "error parsing filter; {}", e),
|
||||
FacetError(e) => write!(f, "error processing facet filter: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -156,3 +164,40 @@ impl fmt::Display for UnsupportedOperation {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FacetError {
|
||||
EmptyArray,
|
||||
ParsingError(String),
|
||||
UnexpectedToken { expected: &'static [&'static str], found: String },
|
||||
InvalidFormat(String),
|
||||
AttributeNotFound(String),
|
||||
AttributeNotSet { expected: Vec<String>, found: String },
|
||||
InvalidDocumentAttribute(String),
|
||||
}
|
||||
|
||||
impl FacetError {
|
||||
pub fn unexpected_token(expected: &'static [&'static str], found: impl ToString) -> FacetError {
|
||||
FacetError::UnexpectedToken{ expected, found: found.to_string() }
|
||||
}
|
||||
|
||||
pub fn attribute_not_set(expected: Vec<String>, found: impl ToString) -> FacetError {
|
||||
FacetError::AttributeNotSet{ expected, found: found.to_string() }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FacetError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use FacetError::*;
|
||||
|
||||
match self {
|
||||
EmptyArray => write!(f, "empty array in facet filter is unspecified behavior"),
|
||||
ParsingError(msg) => write!(f, "parsing error: {}", msg),
|
||||
UnexpectedToken { expected, found } => write!(f, "unexpected token {}, expected {}", found, expected.join("or")),
|
||||
InvalidFormat(found) => write!(f, "invalid facet: {}, facets should be \"facetName:facetValue\"", found),
|
||||
AttributeNotFound(attr) => write!(f, "unknown {:?} attribute", attr),
|
||||
AttributeNotSet { found, expected } => write!(f, "`{}` is not set as a faceted attribute. available facet attributes: {}", found, expected.join(", ")),
|
||||
InvalidDocumentAttribute(attr) => write!(f, "invalid document attribute {}, accepted types: String and [String]", attr),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
353
meilisearch-core/src/facets.rs
Normal file
353
meilisearch-core/src/facets.rs
Normal file
@ -0,0 +1,353 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::Hash;
|
||||
use std::ops::Deref;
|
||||
|
||||
use cow_utils::CowUtils;
|
||||
use either::Either;
|
||||
use heed::types::{Str, OwnedType};
|
||||
use indexmap::IndexMap;
|
||||
use serde_json::Value;
|
||||
|
||||
use meilisearch_schema::{FieldId, Schema};
|
||||
use meilisearch_types::DocumentId;
|
||||
|
||||
use crate::database::MainT;
|
||||
use crate::error::{FacetError, Error};
|
||||
use crate::store::BEU16;
|
||||
|
||||
/// Data structure used to represent a boolean expression in the form of nested arrays.
|
||||
/// Values in the outer array are and-ed together, values in the inner arrays are or-ed together.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct FacetFilter(Vec<Either<Vec<FacetKey>, FacetKey>>);
|
||||
|
||||
impl Deref for FacetFilter {
|
||||
type Target = Vec<Either<Vec<FacetKey>, FacetKey>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl FacetFilter {
|
||||
pub fn from_str(
|
||||
s: &str,
|
||||
schema: &Schema,
|
||||
attributes_for_faceting: &[FieldId],
|
||||
) -> Result<Self, FacetError> {
|
||||
|
||||
let parsed = serde_json::from_str::<Value>(s).map_err(|e| FacetError::ParsingError(e.to_string()))?;
|
||||
let mut filter = Vec::new();
|
||||
match parsed {
|
||||
Value::Array(and_exprs) => {
|
||||
if and_exprs.is_empty() {
|
||||
return Err(FacetError::EmptyArray);
|
||||
}
|
||||
for expr in and_exprs {
|
||||
match expr {
|
||||
Value::String(s) => {
|
||||
let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?;
|
||||
filter.push(Either::Right(key));
|
||||
}
|
||||
Value::Array(or_exprs) => {
|
||||
if or_exprs.is_empty() {
|
||||
return Err(FacetError::EmptyArray);
|
||||
}
|
||||
let mut inner = Vec::new();
|
||||
for expr in or_exprs {
|
||||
match expr {
|
||||
Value::String(s) => {
|
||||
let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?;
|
||||
inner.push(key);
|
||||
}
|
||||
bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value)),
|
||||
}
|
||||
}
|
||||
filter.push(Either::Left(inner));
|
||||
}
|
||||
bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value)),
|
||||
}
|
||||
}
|
||||
return Ok(Self(filter));
|
||||
}
|
||||
bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Hash)]
|
||||
#[repr(C)]
|
||||
pub struct FacetKey(FieldId, String);
|
||||
|
||||
impl FacetKey {
|
||||
pub fn new(field_id: FieldId, value: String) -> Self {
|
||||
let value = match value.cow_to_lowercase() {
|
||||
Cow::Borrowed(_) => value,
|
||||
Cow::Owned(s) => s,
|
||||
};
|
||||
Self(field_id, value)
|
||||
}
|
||||
|
||||
pub fn key(&self) -> FieldId {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn value(&self) -> &str {
|
||||
&self.1
|
||||
}
|
||||
|
||||
// TODO improve parser
|
||||
fn from_str(
|
||||
s: &str,
|
||||
schema: &Schema,
|
||||
attributes_for_faceting: &[FieldId],
|
||||
) -> Result<Self, FacetError> {
|
||||
let mut split = s.splitn(2, ':');
|
||||
let key = split
|
||||
.next()
|
||||
.ok_or_else(|| FacetError::InvalidFormat(s.to_string()))?
|
||||
.trim();
|
||||
let field_id = schema
|
||||
.id(key)
|
||||
.ok_or_else(|| FacetError::AttributeNotFound(key.to_string()))?;
|
||||
|
||||
if !attributes_for_faceting.contains(&field_id) {
|
||||
return Err(FacetError::attribute_not_set(
|
||||
attributes_for_faceting
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id))
|
||||
.map(str::to_string)
|
||||
.collect::<Vec<_>>(),
|
||||
key))
|
||||
}
|
||||
let value = split
|
||||
.next()
|
||||
.ok_or_else(|| FacetError::InvalidFormat(s.to_string()))?
|
||||
.trim();
|
||||
// unquoting the string if need be:
|
||||
let mut indices = value.char_indices();
|
||||
let value = match (indices.next(), indices.last()) {
|
||||
(Some((s, '\'')), Some((e, '\''))) |
|
||||
(Some((s, '\"')), Some((e, '\"'))) => value[s + 1..e].to_string(),
|
||||
_ => value.to_string(),
|
||||
};
|
||||
Ok(Self::new(field_id, value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for FacetKey {
|
||||
type EItem = FacetKey;
|
||||
|
||||
fn bytes_encode(item: &'a Self::EItem) -> Option<Cow<'a, [u8]>> {
|
||||
let mut buffer = Vec::with_capacity(2 + item.1.len());
|
||||
let id = BEU16::new(item.key().into());
|
||||
let id_bytes = OwnedType::bytes_encode(&id)?;
|
||||
let value_bytes = Str::bytes_encode(item.value())?;
|
||||
buffer.extend_from_slice(id_bytes.as_ref());
|
||||
buffer.extend_from_slice(value_bytes.as_ref());
|
||||
Some(Cow::Owned(buffer))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for FacetKey {
|
||||
type DItem = FacetKey;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (id_bytes, value_bytes) = bytes.split_at(2);
|
||||
let id = OwnedType::<BEU16>::bytes_decode(id_bytes)?;
|
||||
let id = id.get().into();
|
||||
let string = Str::bytes_decode(&value_bytes)?;
|
||||
Some(FacetKey(id, string.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_to_facet_map(
|
||||
facet_map: &mut HashMap<FacetKey, Vec<DocumentId>>,
|
||||
field_id: FieldId,
|
||||
value: Value,
|
||||
document_id: DocumentId,
|
||||
) -> Result<(), FacetError> {
|
||||
let value = match value {
|
||||
Value::String(s) => s,
|
||||
// ignore null
|
||||
Value::Null => return Ok(()),
|
||||
value => return Err(FacetError::InvalidDocumentAttribute(value.to_string())),
|
||||
};
|
||||
let key = FacetKey::new(field_id, value);
|
||||
facet_map.entry(key).or_insert_with(Vec::new).push(document_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn facet_map_from_docids(
|
||||
rtxn: &heed::RoTxn<MainT>,
|
||||
index: &crate::Index,
|
||||
document_ids: &[DocumentId],
|
||||
attributes_for_facetting: &[FieldId],
|
||||
) -> Result<HashMap<FacetKey, Vec<DocumentId>>, Error> {
|
||||
let mut facet_map = HashMap::new();
|
||||
for document_id in document_ids {
|
||||
for result in index
|
||||
.documents_fields
|
||||
.document_fields(rtxn, *document_id)?
|
||||
{
|
||||
let (field_id, bytes) = result?;
|
||||
if attributes_for_facetting.contains(&field_id) {
|
||||
match serde_json::from_slice(bytes)? {
|
||||
Value::Array(values) => {
|
||||
for v in values {
|
||||
add_to_facet_map(&mut facet_map, field_id, v, *document_id)?;
|
||||
}
|
||||
}
|
||||
v => add_to_facet_map(&mut facet_map, field_id, v, *document_id)?,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(facet_map)
|
||||
}
|
||||
|
||||
pub fn facet_map_from_docs(
|
||||
schema: &Schema,
|
||||
documents: &HashMap<DocumentId, IndexMap<String, Value>>,
|
||||
attributes_for_facetting: &[FieldId],
|
||||
) -> Result<HashMap<FacetKey, Vec<DocumentId>>, Error> {
|
||||
let mut facet_map = HashMap::new();
|
||||
let attributes_for_facetting = attributes_for_facetting
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id).map(|name| (id, name)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for (id, document) in documents {
|
||||
for (field_id, name) in &attributes_for_facetting {
|
||||
if let Some(value) = document.get(*name) {
|
||||
match value {
|
||||
Value::Array(values) => {
|
||||
for v in values {
|
||||
add_to_facet_map(&mut facet_map, *field_id, v.clone(), *id)?;
|
||||
}
|
||||
}
|
||||
v => add_to_facet_map(&mut facet_map, *field_id, v.clone(), *id)?,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(facet_map)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use meilisearch_schema::Schema;
|
||||
|
||||
#[test]
|
||||
fn test_facet_key() {
|
||||
let mut schema = Schema::new();
|
||||
let id = schema.insert_and_index("hello").unwrap();
|
||||
let facet_list = [schema.id("hello").unwrap()];
|
||||
assert_eq!(
|
||||
FacetKey::from_str("hello:12", &schema, &facet_list).unwrap(),
|
||||
FacetKey::new(id, "12".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
FacetKey::from_str("hello:\"foo bar\"", &schema, &facet_list).unwrap(),
|
||||
FacetKey::new(id, "foo bar".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
FacetKey::from_str("hello:'foo bar'", &schema, &facet_list).unwrap(),
|
||||
FacetKey::new(id, "foo bar".to_string())
|
||||
);
|
||||
// weird case
|
||||
assert_eq!(
|
||||
FacetKey::from_str("hello:blabla:machin", &schema, &facet_list).unwrap(),
|
||||
FacetKey::new(id, "blabla:machin".to_string())
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
FacetKey::from_str("hello:\"\"", &schema, &facet_list).unwrap(),
|
||||
FacetKey::new(id, "".to_string())
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
FacetKey::from_str("hello:'", &schema, &facet_list).unwrap(),
|
||||
FacetKey::new(id, "'".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
FacetKey::from_str("hello:''", &schema, &facet_list).unwrap(),
|
||||
FacetKey::new(id, "".to_string())
|
||||
);
|
||||
assert!(FacetKey::from_str("hello", &schema, &facet_list).is_err());
|
||||
assert!(FacetKey::from_str("toto:12", &schema, &facet_list).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_facet_array() {
|
||||
use either::Either::{Left, Right};
|
||||
let mut schema = Schema::new();
|
||||
let _id = schema.insert_and_index("hello").unwrap();
|
||||
let facet_list = [schema.id("hello").unwrap()];
|
||||
assert_eq!(
|
||||
FacetFilter::from_str("[[\"hello:12\"]]", &schema, &facet_list).unwrap(),
|
||||
FacetFilter(vec![Left(vec![FacetKey(FieldId(0), "12".to_string())])])
|
||||
);
|
||||
assert_eq!(
|
||||
FacetFilter::from_str("[\"hello:12\"]", &schema, &facet_list).unwrap(),
|
||||
FacetFilter(vec![Right(FacetKey(FieldId(0), "12".to_string()))])
|
||||
);
|
||||
assert_eq!(
|
||||
FacetFilter::from_str("[\"hello:12\", \"hello:13\"]", &schema, &facet_list).unwrap(),
|
||||
FacetFilter(vec![
|
||||
Right(FacetKey(FieldId(0), "12".to_string())),
|
||||
Right(FacetKey(FieldId(0), "13".to_string()))
|
||||
])
|
||||
);
|
||||
assert_eq!(
|
||||
FacetFilter::from_str("[[\"hello:12\", \"hello:13\"]]", &schema, &facet_list).unwrap(),
|
||||
FacetFilter(vec![Left(vec![
|
||||
FacetKey(FieldId(0), "12".to_string()),
|
||||
FacetKey(FieldId(0), "13".to_string())
|
||||
])])
|
||||
);
|
||||
assert_eq!(
|
||||
FacetFilter::from_str(
|
||||
"[[\"hello:12\", \"hello:13\"], \"hello:14\"]",
|
||||
&schema,
|
||||
&facet_list
|
||||
)
|
||||
.unwrap(),
|
||||
FacetFilter(vec![
|
||||
Left(vec![
|
||||
FacetKey(FieldId(0), "12".to_string()),
|
||||
FacetKey(FieldId(0), "13".to_string())
|
||||
]),
|
||||
Right(FacetKey(FieldId(0), "14".to_string()))
|
||||
])
|
||||
);
|
||||
|
||||
// invalid array depths
|
||||
assert!(FacetFilter::from_str(
|
||||
"[[[\"hello:12\", \"hello:13\"], \"hello:14\"]]",
|
||||
&schema,
|
||||
&facet_list
|
||||
)
|
||||
.is_err());
|
||||
assert!(FacetFilter::from_str(
|
||||
"[[[\"hello:12\", \"hello:13\"]], \"hello:14\"]]",
|
||||
&schema,
|
||||
&facet_list
|
||||
)
|
||||
.is_err());
|
||||
assert!(FacetFilter::from_str("\"hello:14\"", &schema, &facet_list).is_err());
|
||||
|
||||
// unexisting key
|
||||
assert!(FacetFilter::from_str("[\"foo:12\"]", &schema, &facet_list).is_err());
|
||||
|
||||
// invalid facet key
|
||||
assert!(FacetFilter::from_str("[\"foo=12\"]", &schema, &facet_list).is_err());
|
||||
assert!(FacetFilter::from_str("[\"foo12\"]", &schema, &facet_list).is_err());
|
||||
assert!(FacetFilter::from_str("[\"\"]", &schema, &facet_list).is_err());
|
||||
|
||||
// empty array error
|
||||
assert!(FacetFilter::from_str("[]", &schema, &facet_list).is_err());
|
||||
assert!(FacetFilter::from_str("[\"hello:12\", []]", &schema, &facet_list).is_err());
|
||||
}
|
||||
}
|
@ -19,14 +19,15 @@ mod ranked_map;
|
||||
mod raw_document;
|
||||
mod reordered_attrs;
|
||||
mod update;
|
||||
pub mod settings;
|
||||
pub mod criterion;
|
||||
pub mod facets;
|
||||
pub mod raw_indexer;
|
||||
pub mod settings;
|
||||
pub mod serde;
|
||||
pub mod store;
|
||||
|
||||
pub use self::database::{BoxUpdateFn, Database, DatabaseOptions, MainT, UpdateT};
|
||||
pub use self::error::{Error, HeedError, FstError, MResult, pest_error};
|
||||
pub use self::error::{Error, HeedError, FstError, MResult, pest_error, FacetError};
|
||||
pub use self::filters::Filter;
|
||||
pub use self::number::{Number, ParseNumberError};
|
||||
pub use self::ranked_map::RankedMap;
|
||||
|
@ -1,66 +1,50 @@
|
||||
use std::ops::Range;
|
||||
use std::borrow::Cow;
|
||||
use std::ops::{Range, Deref};
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::database::MainT;
|
||||
use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct};
|
||||
use crate::{criterion::Criteria, Document, DocumentId};
|
||||
use crate::{reordered_attrs::ReorderedAttrs, store, MResult};
|
||||
use crate::facets::FacetFilter;
|
||||
|
||||
pub struct QueryBuilder<'c, 'f, 'd> {
|
||||
use either::Either;
|
||||
use sdset::SetOperation;
|
||||
|
||||
pub struct QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
criteria: Criteria<'c>,
|
||||
searchable_attrs: Option<ReorderedAttrs>,
|
||||
filter: Option<Box<dyn Fn(DocumentId) -> bool + 'f>>,
|
||||
distinct: Option<(Box<dyn Fn(DocumentId) -> Option<u64> + 'd>, usize)>,
|
||||
timeout: Option<Duration>,
|
||||
main_store: store::Main,
|
||||
postings_lists_store: store::PostingsLists,
|
||||
documents_fields_counts_store: store::DocumentsFieldsCounts,
|
||||
synonyms_store: store::Synonyms,
|
||||
prefix_documents_cache_store: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
||||
index: &'i store::Index,
|
||||
facets: Option<&'fa FacetFilter>,
|
||||
}
|
||||
|
||||
impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
|
||||
pub fn new(
|
||||
main: store::Main,
|
||||
postings_lists: store::PostingsLists,
|
||||
documents_fields_counts: store::DocumentsFieldsCounts,
|
||||
synonyms: store::Synonyms,
|
||||
prefix_documents_cache: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache: store::PrefixPostingsListsCache,
|
||||
) -> QueryBuilder<'c, 'f, 'd> {
|
||||
impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
pub fn new(index: &'i store::Index) -> Self {
|
||||
QueryBuilder::with_criteria(
|
||||
main,
|
||||
postings_lists,
|
||||
documents_fields_counts,
|
||||
synonyms,
|
||||
prefix_documents_cache,
|
||||
prefix_postings_lists_cache,
|
||||
index,
|
||||
Criteria::default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn set_facets(&mut self, facets: Option<&'fa FacetFilter>) {
|
||||
self.facets = facets;
|
||||
}
|
||||
|
||||
pub fn with_criteria(
|
||||
main: store::Main,
|
||||
postings_lists: store::PostingsLists,
|
||||
documents_fields_counts: store::DocumentsFieldsCounts,
|
||||
synonyms: store::Synonyms,
|
||||
prefix_documents_cache: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache: store::PrefixPostingsListsCache,
|
||||
index: &'i store::Index,
|
||||
criteria: Criteria<'c>,
|
||||
) -> QueryBuilder<'c, 'f, 'd> {
|
||||
) -> Self {
|
||||
QueryBuilder {
|
||||
criteria,
|
||||
searchable_attrs: None,
|
||||
filter: None,
|
||||
distinct: None,
|
||||
timeout: None,
|
||||
main_store: main,
|
||||
postings_lists_store: postings_lists,
|
||||
documents_fields_counts_store: documents_fields_counts,
|
||||
synonyms_store: synonyms,
|
||||
prefix_documents_cache_store: prefix_documents_cache,
|
||||
prefix_postings_lists_cache_store: prefix_postings_lists_cache,
|
||||
index,
|
||||
facets: None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,36 +77,70 @@ impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
) -> MResult<(Vec<Document>, usize)> {
|
||||
let facets_docids = match self.facets {
|
||||
Some(facets) => {
|
||||
let mut ands = Vec::with_capacity(facets.len());
|
||||
let mut ors = Vec::new();
|
||||
for f in facets.deref() {
|
||||
match f {
|
||||
Either::Left(keys) => {
|
||||
ors.reserve(keys.len());
|
||||
for key in keys {
|
||||
let docids = self.index.facets.facet_document_ids(reader, &key)?.unwrap_or_default();
|
||||
ors.push(docids);
|
||||
}
|
||||
let sets: Vec<_> = ors.iter().map(Cow::deref).collect();
|
||||
let or_result = sdset::multi::OpBuilder::from_vec(sets).union().into_set_buf();
|
||||
ands.push(Cow::Owned(or_result));
|
||||
ors.clear();
|
||||
}
|
||||
Either::Right(key) =>{
|
||||
match self.index.facets.facet_document_ids(reader, &key)? {
|
||||
Some(docids) => ands.push(docids),
|
||||
// no candidates for search, early return.
|
||||
None => return Ok((vec![], 0)),
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
let ands: Vec<_> = ands.iter().map(Cow::deref).collect();
|
||||
Some(sdset::multi::OpBuilder::from_vec(ands).intersection().into_set_buf())
|
||||
}
|
||||
None => None
|
||||
};
|
||||
|
||||
match self.distinct {
|
||||
Some((distinct, distinct_size)) => bucket_sort_with_distinct(
|
||||
reader,
|
||||
query,
|
||||
range,
|
||||
facets_docids,
|
||||
self.filter,
|
||||
distinct,
|
||||
distinct_size,
|
||||
self.criteria,
|
||||
self.searchable_attrs,
|
||||
self.main_store,
|
||||
self.postings_lists_store,
|
||||
self.documents_fields_counts_store,
|
||||
self.synonyms_store,
|
||||
self.prefix_documents_cache_store,
|
||||
self.prefix_postings_lists_cache_store,
|
||||
self.index.main,
|
||||
self.index.postings_lists,
|
||||
self.index.documents_fields_counts,
|
||||
self.index.synonyms,
|
||||
self.index.prefix_documents_cache,
|
||||
self.index.prefix_postings_lists_cache,
|
||||
),
|
||||
None => bucket_sort(
|
||||
reader,
|
||||
query,
|
||||
range,
|
||||
facets_docids,
|
||||
self.filter,
|
||||
self.criteria,
|
||||
self.searchable_attrs,
|
||||
self.main_store,
|
||||
self.postings_lists_store,
|
||||
self.documents_fields_counts_store,
|
||||
self.synonyms_store,
|
||||
self.prefix_documents_cache_store,
|
||||
self.prefix_postings_lists_cache_store,
|
||||
self.index.main,
|
||||
self.index.postings_lists,
|
||||
self.index.documents_fields_counts,
|
||||
self.index.synonyms,
|
||||
self.index.prefix_documents_cache,
|
||||
self.index.prefix_postings_lists_cache,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -31,6 +31,8 @@ pub struct Settings {
|
||||
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub accept_new_fields: Option<Option<bool>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub attributes_for_faceting: Option<Option<Vec<String>>>,
|
||||
}
|
||||
|
||||
// Any value that is present is considered Some value, including null.
|
||||
@ -60,6 +62,7 @@ impl Settings {
|
||||
stop_words: settings.stop_words.into(),
|
||||
synonyms: settings.synonyms.into(),
|
||||
accept_new_fields: settings.accept_new_fields.into(),
|
||||
attributes_for_faceting: settings.attributes_for_faceting.into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -166,6 +169,7 @@ pub struct SettingsUpdate {
|
||||
pub stop_words: UpdateState<BTreeSet<String>>,
|
||||
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
|
||||
pub accept_new_fields: UpdateState<bool>,
|
||||
pub attributes_for_faceting: UpdateState<Vec<String>>,
|
||||
}
|
||||
|
||||
impl Default for SettingsUpdate {
|
||||
@ -179,6 +183,7 @@ impl Default for SettingsUpdate {
|
||||
stop_words: UpdateState::Nothing,
|
||||
synonyms: UpdateState::Nothing,
|
||||
accept_new_fields: UpdateState::Nothing,
|
||||
attributes_for_faceting: UpdateState::Nothing,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
32
meilisearch-core/src/store/cow_set.rs
Normal file
32
meilisearch-core/src/store/cow_set.rs
Normal file
@ -0,0 +1,32 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use heed::{types::CowSlice, BytesEncode, BytesDecode};
|
||||
use sdset::{Set, SetBuf};
|
||||
use zerocopy::{AsBytes, FromBytes};
|
||||
|
||||
pub struct CowSet<T>(std::marker::PhantomData<T>);
|
||||
|
||||
impl<'a, T: 'a> BytesEncode<'a> for CowSet<T>
|
||||
where
|
||||
T: AsBytes,
|
||||
{
|
||||
type EItem = Set<T>;
|
||||
|
||||
fn bytes_encode(item: &'a Self::EItem) -> Option<Cow<[u8]>> {
|
||||
CowSlice::bytes_encode(item.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: 'a> BytesDecode<'a> for CowSet<T>
|
||||
where
|
||||
T: FromBytes + Copy,
|
||||
{
|
||||
type DItem = Cow<'a, Set<T>>;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
match CowSlice::<T>::bytes_decode(bytes)? {
|
||||
Cow::Owned(vec) => Some(Cow::Owned(SetBuf::new_unchecked(vec))),
|
||||
Cow::Borrowed(slice) => Some(Cow::Borrowed(Set::new_unchecked(slice))),
|
||||
}
|
||||
}
|
||||
}
|
53
meilisearch-core/src/store/facets.rs
Normal file
53
meilisearch-core/src/store/facets.rs
Normal file
@ -0,0 +1,53 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use heed::{RwTxn, RoTxn, Result as ZResult};
|
||||
use sdset::{SetBuf, Set, SetOperation};
|
||||
|
||||
use meilisearch_types::DocumentId;
|
||||
|
||||
use crate::database::MainT;
|
||||
use crate::facets::FacetKey;
|
||||
use super::cow_set::CowSet;
|
||||
|
||||
/// contains facet info
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct Facets {
|
||||
pub(crate) facets: heed::Database<FacetKey, CowSet<DocumentId>>,
|
||||
}
|
||||
|
||||
impl Facets {
|
||||
// we use sdset::SetBuf to ensure the docids are sorted.
|
||||
pub fn put_facet_document_ids(&self, writer: &mut RwTxn<MainT>, facet_key: FacetKey, doc_ids: &Set<DocumentId>) -> ZResult<()> {
|
||||
self.facets.put(writer, &facet_key, doc_ids)
|
||||
}
|
||||
|
||||
pub fn facet_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, facet_key: &FacetKey) -> ZResult<Option<Cow<'txn, Set<DocumentId>>>> {
|
||||
self.facets.get(reader, &facet_key)
|
||||
}
|
||||
|
||||
/// updates the facets store, revmoving the documents from the facets provided in the
|
||||
/// `facet_map` argument
|
||||
pub fn remove(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, Vec<DocumentId>>) -> ZResult<()> {
|
||||
for (key, document_ids) in facet_map {
|
||||
if let Some(old) = self.facets.get(writer, &key)? {
|
||||
let to_remove = SetBuf::from_dirty(document_ids);
|
||||
let new = sdset::duo::OpBuilder::new(old.as_ref(), to_remove.as_set()).difference().into_set_buf();
|
||||
self.facets.put(writer, &key, new.as_set())?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, Vec<DocumentId>>) -> ZResult<()> {
|
||||
for (key, document_ids) in facet_map {
|
||||
let set = SetBuf::from_dirty(document_ids);
|
||||
self.put_facet_document_ids(writer, key, set.as_set())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
|
||||
self.facets.clear(writer)
|
||||
}
|
||||
}
|
@ -1,16 +1,20 @@
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str};
|
||||
use heed::Result as ZResult;
|
||||
use meilisearch_schema::Schema;
|
||||
use meilisearch_schema::{FieldId, Schema};
|
||||
use sdset::Set;
|
||||
|
||||
use crate::database::MainT;
|
||||
use crate::RankedMap;
|
||||
use crate::settings::RankingRule;
|
||||
use super::cow_set::CowSet;
|
||||
|
||||
const CREATED_AT_KEY: &str = "created-at";
|
||||
const ATTRIBUTES_FOR_FACETING: &str = "attributes-for-faceting";
|
||||
const RANKING_RULES_KEY: &str = "ranking-rules";
|
||||
const DISTINCT_ATTRIBUTE_KEY: &str = "distinct-attribute";
|
||||
const STOP_WORDS_KEY: &str = "stop-words";
|
||||
@ -188,6 +192,18 @@ impl Main {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn attributes_for_faceting<'txn>(&self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<Option<Cow<'txn, Set<FieldId>>>> {
|
||||
self.main.get::<_, Str, CowSet<FieldId>>(reader, ATTRIBUTES_FOR_FACETING)
|
||||
}
|
||||
|
||||
pub fn put_attributes_for_faceting(self, writer: &mut heed::RwTxn<MainT>, attributes: &Set<FieldId>) -> ZResult<()> {
|
||||
self.main.put::<_, Str, CowSet<FieldId>>(writer, ATTRIBUTES_FOR_FACETING, attributes)
|
||||
}
|
||||
|
||||
pub fn delete_attributes_for_faceting(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
|
||||
self.main.delete::<_, Str>(writer, ATTRIBUTES_FOR_FACETING)
|
||||
}
|
||||
|
||||
pub fn ranking_rules(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Vec<RankingRule>>> {
|
||||
self.main.get::<_, Str, SerdeBincode<Vec<RankingRule>>>(reader, RANKING_RULES_KEY)
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
mod cow_set;
|
||||
mod docs_words;
|
||||
mod prefix_documents_cache;
|
||||
mod prefix_postings_lists_cache;
|
||||
@ -8,8 +9,10 @@ mod postings_lists;
|
||||
mod synonyms;
|
||||
mod updates;
|
||||
mod updates_results;
|
||||
mod facets;
|
||||
|
||||
pub use self::docs_words::DocsWords;
|
||||
pub use self::facets::Facets;
|
||||
pub use self::prefix_documents_cache::PrefixDocumentsCache;
|
||||
pub use self::prefix_postings_lists_cache::PrefixPostingsListsCache;
|
||||
pub use self::documents_fields::{DocumentFieldsIter, DocumentsFields};
|
||||
@ -42,7 +45,7 @@ use crate::settings::SettingsUpdate;
|
||||
use crate::{query_builder::QueryBuilder, update, DocIndex, DocumentId, Error, MResult};
|
||||
|
||||
type BEU64 = zerocopy::U64<byteorder::BigEndian>;
|
||||
type BEU16 = zerocopy::U16<byteorder::BigEndian>;
|
||||
pub type BEU16 = zerocopy::U16<byteorder::BigEndian>;
|
||||
|
||||
#[derive(Debug, Copy, Clone, AsBytes, FromBytes)]
|
||||
#[repr(C)]
|
||||
@ -197,12 +200,17 @@ fn updates_results_name(name: &str) -> String {
|
||||
format!("store-{}-updates-results", name)
|
||||
}
|
||||
|
||||
fn facets_name(name: &str) -> String {
|
||||
format!("store-{}-facets", name)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Index {
|
||||
pub main: Main,
|
||||
pub postings_lists: PostingsLists,
|
||||
pub documents_fields: DocumentsFields,
|
||||
pub documents_fields_counts: DocumentsFieldsCounts,
|
||||
pub facets: Facets,
|
||||
pub synonyms: Synonyms,
|
||||
pub docs_words: DocsWords,
|
||||
pub prefix_documents_cache: PrefixDocumentsCache,
|
||||
@ -352,29 +360,14 @@ impl Index {
|
||||
}
|
||||
|
||||
pub fn query_builder(&self) -> QueryBuilder {
|
||||
QueryBuilder::new(
|
||||
self.main,
|
||||
self.postings_lists,
|
||||
self.documents_fields_counts,
|
||||
self.synonyms,
|
||||
self.prefix_documents_cache,
|
||||
self.prefix_postings_lists_cache,
|
||||
)
|
||||
QueryBuilder::new(self)
|
||||
}
|
||||
|
||||
pub fn query_builder_with_criteria<'c, 'f, 'd>(
|
||||
&self,
|
||||
pub fn query_builder_with_criteria<'c, 'f, 'd, 'fa, 'i>(
|
||||
&'i self,
|
||||
criteria: Criteria<'c>,
|
||||
) -> QueryBuilder<'c, 'f, 'd> {
|
||||
QueryBuilder::with_criteria(
|
||||
self.main,
|
||||
self.postings_lists,
|
||||
self.documents_fields_counts,
|
||||
self.synonyms,
|
||||
self.prefix_documents_cache,
|
||||
self.prefix_postings_lists_cache,
|
||||
criteria,
|
||||
)
|
||||
) -> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
QueryBuilder::with_criteria(self, criteria)
|
||||
}
|
||||
}
|
||||
|
||||
@ -395,12 +388,14 @@ pub fn create(
|
||||
let prefix_postings_lists_cache_name = prefix_postings_lists_cache_name(name);
|
||||
let updates_name = updates_name(name);
|
||||
let updates_results_name = updates_results_name(name);
|
||||
let facets_name = facets_name(name);
|
||||
|
||||
// open all the stores
|
||||
let main = env.create_poly_database(Some(&main_name))?;
|
||||
let postings_lists = env.create_database(Some(&postings_lists_name))?;
|
||||
let documents_fields = env.create_database(Some(&documents_fields_name))?;
|
||||
let documents_fields_counts = env.create_database(Some(&documents_fields_counts_name))?;
|
||||
let facets = env.create_database(Some(&facets_name))?;
|
||||
let synonyms = env.create_database(Some(&synonyms_name))?;
|
||||
let docs_words = env.create_database(Some(&docs_words_name))?;
|
||||
let prefix_documents_cache = env.create_database(Some(&prefix_documents_cache_name))?;
|
||||
@ -417,6 +412,8 @@ pub fn create(
|
||||
docs_words: DocsWords { docs_words },
|
||||
prefix_postings_lists_cache: PrefixPostingsListsCache { prefix_postings_lists_cache },
|
||||
prefix_documents_cache: PrefixDocumentsCache { prefix_documents_cache },
|
||||
facets: Facets { facets },
|
||||
|
||||
updates: Updates { updates },
|
||||
updates_results: UpdatesResults { updates_results },
|
||||
updates_notifier,
|
||||
@ -437,6 +434,7 @@ pub fn open(
|
||||
let synonyms_name = synonyms_name(name);
|
||||
let docs_words_name = docs_words_name(name);
|
||||
let prefix_documents_cache_name = prefix_documents_cache_name(name);
|
||||
let facets_name = facets_name(name);
|
||||
let prefix_postings_lists_cache_name = prefix_postings_lists_cache_name(name);
|
||||
let updates_name = updates_name(name);
|
||||
let updates_results_name = updates_results_name(name);
|
||||
@ -470,6 +468,10 @@ pub fn open(
|
||||
Some(prefix_documents_cache) => prefix_documents_cache,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let facets = match env.open_database(Some(&facets_name))? {
|
||||
Some(facets) => facets,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let prefix_postings_lists_cache = match env.open_database(Some(&prefix_postings_lists_cache_name))? {
|
||||
Some(prefix_postings_lists_cache) => prefix_postings_lists_cache,
|
||||
None => return Ok(None),
|
||||
@ -491,6 +493,7 @@ pub fn open(
|
||||
synonyms: Synonyms { synonyms },
|
||||
docs_words: DocsWords { docs_words },
|
||||
prefix_documents_cache: PrefixDocumentsCache { prefix_documents_cache },
|
||||
facets: Facets { facets },
|
||||
prefix_postings_lists_cache: PrefixPostingsListsCache { prefix_postings_lists_cache },
|
||||
updates: Updates { updates },
|
||||
updates_results: UpdatesResults { updates_results },
|
||||
|
@ -7,6 +7,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::database::{MainT, UpdateT};
|
||||
use crate::database::{UpdateEvent, UpdateEventsEmitter};
|
||||
use crate::facets;
|
||||
use crate::raw_indexer::RawIndexer;
|
||||
use crate::serde::{extract_document_id, serialize_value_with_id, Deserializer, Serializer};
|
||||
use crate::store;
|
||||
@ -103,10 +104,11 @@ pub fn push_documents_addition<D: serde::Serialize>(
|
||||
Ok(last_update_id)
|
||||
}
|
||||
|
||||
pub fn apply_documents_addition<'a, 'b>(
|
||||
pub fn apply_addition<'a, 'b>(
|
||||
writer: &'a mut heed::RwTxn<'b, MainT>,
|
||||
index: &store::Index,
|
||||
addition: Vec<IndexMap<String, serde_json::Value>>,
|
||||
partial: bool
|
||||
) -> MResult<()> {
|
||||
let mut documents_additions = HashMap::new();
|
||||
|
||||
@ -118,12 +120,30 @@ pub fn apply_documents_addition<'a, 'b>(
|
||||
let primary_key = schema.primary_key().ok_or(Error::MissingPrimaryKey)?;
|
||||
|
||||
// 1. store documents ids for future deletion
|
||||
for document in addition {
|
||||
for mut document in addition {
|
||||
let document_id = match extract_document_id(&primary_key, &document)? {
|
||||
Some(id) => id,
|
||||
None => return Err(Error::MissingDocumentId),
|
||||
};
|
||||
|
||||
if partial {
|
||||
let mut deserializer = Deserializer {
|
||||
document_id,
|
||||
reader: writer,
|
||||
documents_fields: index.documents_fields,
|
||||
schema: &schema,
|
||||
fields: None,
|
||||
};
|
||||
|
||||
// retrieve the old document and
|
||||
// update the new one with missing keys found in the old one
|
||||
let result = Option::<HashMap<String, serde_json::Value>>::deserialize(&mut deserializer)?;
|
||||
if let Some(old_document) = result {
|
||||
for (key, value) in old_document {
|
||||
document.entry(key).or_insert(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
documents_additions.insert(document_id, document);
|
||||
}
|
||||
|
||||
@ -143,6 +163,11 @@ pub fn apply_documents_addition<'a, 'b>(
|
||||
};
|
||||
|
||||
// 3. index the documents fields in the stores
|
||||
if let Some(attributes_for_facetting) = index.main.attributes_for_faceting(writer)? {
|
||||
let facet_map = facets::facet_map_from_docs(&schema, &documents_additions, attributes_for_facetting.as_ref())?;
|
||||
index.facets.add(writer, facet_map)?;
|
||||
}
|
||||
|
||||
let mut indexer = RawIndexer::new(stop_words);
|
||||
|
||||
for (document_id, document) in documents_additions {
|
||||
@ -177,85 +202,15 @@ pub fn apply_documents_partial_addition<'a, 'b>(
|
||||
index: &store::Index,
|
||||
addition: Vec<IndexMap<String, serde_json::Value>>,
|
||||
) -> MResult<()> {
|
||||
let mut documents_additions = HashMap::new();
|
||||
apply_addition(writer, index, addition, true)
|
||||
}
|
||||
|
||||
let mut schema = match index.main.schema(writer)? {
|
||||
Some(schema) => schema,
|
||||
None => return Err(Error::SchemaMissing),
|
||||
};
|
||||
|
||||
let primary_key = schema.primary_key().ok_or(Error::MissingPrimaryKey)?;
|
||||
|
||||
// 1. store documents ids for future deletion
|
||||
for mut document in addition {
|
||||
let document_id = match extract_document_id(&primary_key, &document)? {
|
||||
Some(id) => id,
|
||||
None => return Err(Error::MissingDocumentId),
|
||||
};
|
||||
|
||||
let mut deserializer = Deserializer {
|
||||
document_id,
|
||||
reader: writer,
|
||||
documents_fields: index.documents_fields,
|
||||
schema: &schema,
|
||||
fields: None,
|
||||
};
|
||||
|
||||
// retrieve the old document and
|
||||
// update the new one with missing keys found in the old one
|
||||
let result = Option::<HashMap<String, serde_json::Value>>::deserialize(&mut deserializer)?;
|
||||
if let Some(old_document) = result {
|
||||
for (key, value) in old_document {
|
||||
document.entry(key).or_insert(value);
|
||||
}
|
||||
}
|
||||
|
||||
documents_additions.insert(document_id, document);
|
||||
}
|
||||
|
||||
// 2. remove the documents posting lists
|
||||
let number_of_inserted_documents = documents_additions.len();
|
||||
let documents_ids = documents_additions.iter().map(|(id, _)| *id).collect();
|
||||
apply_documents_deletion(writer, index, documents_ids)?;
|
||||
|
||||
let mut ranked_map = match index.main.ranked_map(writer)? {
|
||||
Some(ranked_map) => ranked_map,
|
||||
None => RankedMap::default(),
|
||||
};
|
||||
|
||||
let stop_words = match index.main.stop_words_fst(writer)? {
|
||||
Some(stop_words) => stop_words,
|
||||
None => fst::Set::default(),
|
||||
};
|
||||
|
||||
// 3. index the documents fields in the stores
|
||||
let mut indexer = RawIndexer::new(stop_words);
|
||||
|
||||
for (document_id, document) in documents_additions {
|
||||
let serializer = Serializer {
|
||||
txn: writer,
|
||||
schema: &mut schema,
|
||||
document_store: index.documents_fields,
|
||||
document_fields_counts: index.documents_fields_counts,
|
||||
indexer: &mut indexer,
|
||||
ranked_map: &mut ranked_map,
|
||||
document_id,
|
||||
};
|
||||
|
||||
document.serialize(serializer)?;
|
||||
}
|
||||
|
||||
write_documents_addition_index(
|
||||
writer,
|
||||
index,
|
||||
&ranked_map,
|
||||
number_of_inserted_documents,
|
||||
indexer,
|
||||
)?;
|
||||
|
||||
index.main.put_schema(writer, &schema)?;
|
||||
|
||||
Ok(())
|
||||
pub fn apply_documents_addition<'a, 'b>(
|
||||
writer: &'a mut heed::RwTxn<'b, MainT>,
|
||||
index: &store::Index,
|
||||
addition: Vec<IndexMap<String, serde_json::Value>>,
|
||||
) -> MResult<()> {
|
||||
apply_addition(writer, index, addition, false)
|
||||
}
|
||||
|
||||
pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Index) -> MResult<()> {
|
||||
@ -277,6 +232,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
||||
index.main.put_words_fst(writer, &fst::Set::default())?;
|
||||
index.main.put_ranked_map(writer, &ranked_map)?;
|
||||
index.main.put_number_of_documents(writer, |_| 0)?;
|
||||
index.facets.clear(writer)?;
|
||||
index.postings_lists.clear(writer)?;
|
||||
index.docs_words.clear(writer)?;
|
||||
|
||||
@ -289,6 +245,11 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
||||
let mut indexer = RawIndexer::new(stop_words);
|
||||
let mut ram_store = HashMap::new();
|
||||
|
||||
if let Some(ref attributes_for_facetting) = index.main.attributes_for_faceting(writer)? {
|
||||
let facet_map = facets::facet_map_from_docids(writer, &index, &documents_ids_to_reindex, &attributes_for_facetting)?;
|
||||
index.facets.add(writer, facet_map)?;
|
||||
}
|
||||
// ^-- https://github.com/meilisearch/MeiliSearch/pull/631#issuecomment-626624470 --v
|
||||
for document_id in documents_ids_to_reindex {
|
||||
for result in index.documents_fields.document_fields(writer, document_id)? {
|
||||
let (field_id, bytes) = result?;
|
||||
|
@ -6,6 +6,7 @@ use sdset::{duo::DifferenceByKey, SetBuf, SetOperation};
|
||||
|
||||
use crate::database::{MainT, UpdateT};
|
||||
use crate::database::{UpdateEvent, UpdateEventsEmitter};
|
||||
use crate::facets;
|
||||
use crate::serde::extract_document_id;
|
||||
use crate::store;
|
||||
use crate::update::{next_update_id, compute_short_prefixes, Update};
|
||||
@ -88,8 +89,6 @@ pub fn apply_documents_deletion(
|
||||
index: &store::Index,
|
||||
deletion: Vec<DocumentId>,
|
||||
) -> MResult<()> {
|
||||
let idset = SetBuf::from_dirty(deletion);
|
||||
|
||||
let schema = match index.main.schema(writer)? {
|
||||
Some(schema) => schema,
|
||||
None => return Err(Error::SchemaMissing),
|
||||
@ -100,9 +99,16 @@ pub fn apply_documents_deletion(
|
||||
None => RankedMap::default(),
|
||||
};
|
||||
|
||||
// facet filters deletion
|
||||
if let Some(attributes_for_facetting) = index.main.attributes_for_faceting(writer)? {
|
||||
let facet_map = facets::facet_map_from_docids(writer, &index, &deletion, &attributes_for_facetting)?;
|
||||
index.facets.remove(writer, facet_map)?;
|
||||
}
|
||||
|
||||
// collect the ranked attributes according to the schema
|
||||
let ranked_fields = schema.ranked();
|
||||
|
||||
let idset = SetBuf::from_dirty(deletion);
|
||||
let mut words_document_ids = HashMap::new();
|
||||
for id in idset {
|
||||
// remove all the ranked attributes from the ranked_map
|
||||
|
@ -102,6 +102,18 @@ pub fn apply_settings_update(
|
||||
UpdateState::Nothing => (),
|
||||
}
|
||||
|
||||
match settings.attributes_for_faceting {
|
||||
UpdateState::Update(attrs) => {
|
||||
apply_attributes_for_faceting_update(writer, index, &mut schema, &attrs)?;
|
||||
must_reindex = true;
|
||||
},
|
||||
UpdateState::Clear => {
|
||||
index.main.delete_attributes_for_faceting(writer)?;
|
||||
index.facets.clear(writer)?;
|
||||
},
|
||||
UpdateState::Nothing => (),
|
||||
}
|
||||
|
||||
index.main.put_schema(writer, &schema)?;
|
||||
|
||||
match settings.stop_words {
|
||||
@ -131,6 +143,21 @@ pub fn apply_settings_update(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_attributes_for_faceting_update(
|
||||
writer: &mut heed::RwTxn<MainT>,
|
||||
index: &store::Index,
|
||||
schema: &mut Schema,
|
||||
attributes: &[String]
|
||||
) -> MResult<()> {
|
||||
let mut attribute_ids = Vec::new();
|
||||
for name in attributes {
|
||||
attribute_ids.push(schema.insert(name)?);
|
||||
}
|
||||
let attributes_for_faceting = SetBuf::from_dirty(attribute_ids);
|
||||
index.main.put_attributes_for_faceting(writer, &attributes_for_faceting)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn apply_stop_words_update(
|
||||
writer: &mut heed::RwTxn<MainT>,
|
||||
index: &store::Index,
|
||||
|
@ -23,6 +23,7 @@ pub enum ResponseError {
|
||||
FilterParsing(String),
|
||||
RetrieveDocument(u64, String),
|
||||
SearchDocuments(String),
|
||||
FacetExpression(String),
|
||||
}
|
||||
|
||||
impl ResponseError {
|
||||
@ -106,6 +107,7 @@ impl fmt::Display for ResponseError {
|
||||
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
|
||||
Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err),
|
||||
Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
|
||||
Self::FacetExpression(e) => write!(f, "error parsing facet filter expression: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -118,13 +120,14 @@ impl aweb::error::ResponseError for ResponseError {
|
||||
}
|
||||
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match *self {
|
||||
match *self {
|
||||
Self::BadParameter(_, _)
|
||||
| Self::BadRequest(_)
|
||||
| Self::CreateIndex(_)
|
||||
| Self::InvalidIndexUid
|
||||
| Self::OpenIndex(_)
|
||||
| Self::RetrieveDocument(_, _)
|
||||
| Self::FacetExpression(_)
|
||||
| Self::SearchDocuments(_)
|
||||
| Self::FilterParsing(_) => StatusCode::BAD_REQUEST,
|
||||
Self::DocumentNotFound(_)
|
||||
@ -151,6 +154,12 @@ impl From<meilisearch_core::FstError> for ResponseError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_core::FacetError> for ResponseError {
|
||||
fn from(error: meilisearch_core::FacetError) -> ResponseError {
|
||||
ResponseError::FacetExpression(error.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_core::Error> for ResponseError {
|
||||
fn from(err: meilisearch_core::Error) -> ResponseError {
|
||||
use meilisearch_core::pest_error::LineColLocation::*;
|
||||
@ -164,6 +173,7 @@ impl From<meilisearch_core::Error> for ResponseError {
|
||||
|
||||
ResponseError::FilterParsing(message)
|
||||
},
|
||||
meilisearch_core::Error::FacetError(e) => ResponseError::FacetExpression(e.to_string()),
|
||||
_ => ResponseError::Internal(err.to_string()),
|
||||
}
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ use std::time::Instant;
|
||||
use indexmap::IndexMap;
|
||||
use log::error;
|
||||
use meilisearch_core::Filter;
|
||||
use meilisearch_core::facets::FacetFilter;
|
||||
use meilisearch_core::criterion::*;
|
||||
use meilisearch_core::settings::RankingRule;
|
||||
use meilisearch_core::{Highlight, Index, MainT, RankedMap};
|
||||
@ -34,6 +35,7 @@ impl IndexSearchExt for Index {
|
||||
attributes_to_highlight: None,
|
||||
filters: None,
|
||||
matches: false,
|
||||
facet_filters: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -48,6 +50,7 @@ pub struct SearchBuilder<'a> {
|
||||
attributes_to_highlight: Option<HashSet<String>>,
|
||||
filters: Option<String>,
|
||||
matches: bool,
|
||||
facet_filters: Option<FacetFilter>,
|
||||
}
|
||||
|
||||
impl<'a> SearchBuilder<'a> {
|
||||
@ -82,6 +85,11 @@ impl<'a> SearchBuilder<'a> {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_facet_filters(&mut self, filters: FacetFilter) -> &SearchBuilder {
|
||||
self.facet_filters = Some(filters);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn filters(&mut self, value: String) -> &SearchBuilder {
|
||||
self.filters = Some(value);
|
||||
self
|
||||
@ -138,6 +146,8 @@ impl<'a> SearchBuilder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
query_builder.set_facets(self.facet_filters.as_ref());
|
||||
|
||||
let start = Instant::now();
|
||||
let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
|
||||
let (docs, nb_hits) = result.map_err(ResponseError::search_documents)?;
|
||||
|
@ -12,6 +12,8 @@ use crate::helpers::Authentication;
|
||||
use crate::routes::IndexParam;
|
||||
use crate::Data;
|
||||
|
||||
use meilisearch_core::facets::FacetFilter;
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(search_with_url_query);
|
||||
}
|
||||
@ -28,6 +30,7 @@ struct SearchQuery {
|
||||
attributes_to_highlight: Option<String>,
|
||||
filters: Option<String>,
|
||||
matches: Option<bool>,
|
||||
facet_filters: Option<String>,
|
||||
}
|
||||
|
||||
#[get("/indexes/{index_uid}/search", wrap = "Authentication::Public")]
|
||||
@ -81,6 +84,13 @@ async fn search_with_url_query(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref facet_filters) = params.facet_filters {
|
||||
match index.main.attributes_for_faceting(&reader)? {
|
||||
Some(ref attrs) => { search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, attrs)?); },
|
||||
None => return Err(ResponseError::FacetExpression("can't filter on facets, as no facet is set".to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(attributes_to_crop) = ¶ms.attributes_to_crop {
|
||||
let default_length = params.crop_length.unwrap_or(200);
|
||||
let mut final_attributes: HashMap<String, usize> = HashMap::new();
|
||||
|
@ -91,6 +91,17 @@ async fn get_all(
|
||||
|
||||
let schema = index.main.schema(&reader)?;
|
||||
|
||||
let attributes_for_faceting = match (&schema, &index.main.attributes_for_faceting(&reader)?) {
|
||||
(Some(schema), Some(attrs)) => {
|
||||
Some(attrs
|
||||
.iter()
|
||||
.filter_map(|&id| schema .name(id))
|
||||
.map(str::to_string)
|
||||
.collect())
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let searchable_attributes = schema.clone().map(|s| {
|
||||
s.indexed_name()
|
||||
.iter()
|
||||
@ -115,6 +126,7 @@ async fn get_all(
|
||||
stop_words: Some(Some(stop_words)),
|
||||
synonyms: Some(Some(synonyms)),
|
||||
accept_new_fields: Some(accept_new_fields),
|
||||
attributes_for_faceting: Some(attributes_for_faceting),
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(settings))
|
||||
@ -140,6 +152,7 @@ async fn delete_all(
|
||||
stop_words: UpdateState::Clear,
|
||||
synonyms: UpdateState::Clear,
|
||||
accept_new_fields: UpdateState::Clear,
|
||||
attributes_for_faceting: UpdateState::Clear,
|
||||
};
|
||||
|
||||
let update_id = index.settings_update(&mut writer, settings)?;
|
||||
|
1613
meilisearch-http/tests/assets/test_set.json
Normal file
1613
meilisearch-http/tests/assets/test_set.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -39,6 +39,74 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn test_server() -> Self {
|
||||
|
||||
let mut server = Self::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
|
||||
server.create_index(body).await;
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
],
|
||||
"searchableAttributes": [
|
||||
"balance",
|
||||
"picture",
|
||||
"age",
|
||||
"color",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"about",
|
||||
"registered",
|
||||
"latitude",
|
||||
"longitude",
|
||||
"tags",
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"id",
|
||||
"isActive",
|
||||
"balance",
|
||||
"picture",
|
||||
"age",
|
||||
"color",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"about",
|
||||
"registered",
|
||||
"latitude",
|
||||
"longitude",
|
||||
"tags",
|
||||
],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(body).await;
|
||||
|
||||
let dataset = include_bytes!("assets/test_set.json");
|
||||
|
||||
let body: Value = serde_json::from_slice(dataset).unwrap();
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
server
|
||||
}
|
||||
|
||||
|
||||
pub async fn wait_update_id(&mut self, update_id: u64) {
|
||||
loop {
|
||||
let (response, status_code) = self.get_update_status(update_id).await;
|
||||
@ -90,6 +158,7 @@ impl Server {
|
||||
eprintln!("post_request_async: {}", url);
|
||||
|
||||
let (response, status_code) = self.post_request(url, body).await;
|
||||
eprintln!("response: {}", response);
|
||||
assert_eq!(status_code, 202);
|
||||
assert!(response["updateId"].as_u64().is_some());
|
||||
self.wait_update_id(response["updateId"].as_u64().unwrap())
|
||||
|
@ -2,6 +2,7 @@ use std::convert::Into;
|
||||
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
|
||||
mod common;
|
||||
|
||||
@ -1133,3 +1134,161 @@ async fn search_with_differents_attributes_8() {
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_faceted_search_valid() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// simple tests on attributes with string value
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color"]
|
||||
});
|
||||
server.update_all_settings(body).await;
|
||||
let query = "q=a&facetFilters=%5B%22color%3Agreen%22%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("color").unwrap() == "green"));
|
||||
let query = "q=a&facetFilters=%5B%22color%3Ablue%22%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("color").unwrap() == "blue"));
|
||||
|
||||
// test case insensitive : ["color:Blue"]
|
||||
let query = "q=a&facetFilters=%5B%22color%3ABlue%22%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("color").unwrap() == "blue"));
|
||||
|
||||
// test on arrays: ["tags:bug"]
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color", "tags"]
|
||||
});
|
||||
server.update_all_settings(body).await;
|
||||
|
||||
let query = "q=a&facetFilters=%5B%22tags%3Abug%22%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
|
||||
|
||||
// test and: ["color:blue", "tags:bug"]
|
||||
let query = "q=a&facetFilters=%5B%22color%3Ablue%22,%20%22tags%3Abug%22%20%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
&& value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
|
||||
|
||||
// test or: [["color:blue", "color:green"]]
|
||||
let query = "q=a&facetFilters=%5B%5B%22color%3Ablue%22,%20%22color%3Agreen%22%5D%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value|
|
||||
value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
|| value
|
||||
.get("color")
|
||||
.unwrap() == "green"));
|
||||
// test and-or: ["tags:bug", ["color:blue", "color:green"]]
|
||||
let query = "q=a&facetFilters=%5B%22color%3Ablue%22,%20%22tags%3Abug%22%20%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value|
|
||||
value
|
||||
.get("tags")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.contains(&Value::String("bug".to_owned()))
|
||||
&& (value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
|| value
|
||||
.get("color")
|
||||
.unwrap() == "green")));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_faceted_search_invalid() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
//no faceted attributes set
|
||||
let query = "q=a&facetFilters=%5B%22color%3Ablue%22,%20%22tags%3Abug%22%20%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_ne!(status_code, 202);
|
||||
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color", "tags"]
|
||||
});
|
||||
server.update_all_settings(body).await;
|
||||
// empty arrays are error
|
||||
// []
|
||||
let query = "q=a&facetFilters=%5B%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_ne!(status_code, 202);
|
||||
// [[]]
|
||||
let query = "q=a&facetFilters=%5B%5B%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_ne!(status_code, 202);
|
||||
// ["color:green", []]
|
||||
let query = "q=a&facetFilters=%5B%22color%3Agreen%22,%20%5B%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_ne!(status_code, 202);
|
||||
|
||||
// too much depth
|
||||
// [[[]]]
|
||||
let query = "q=a&facetFilters=%5B%5B%5B%5D%5D%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_ne!(status_code, 202);
|
||||
// [["color:green", ["color:blue"]]]
|
||||
let query = "q=a&facetFilters=%5B%5B%22color%3Agreen%22,%20%5B%22color%3Ablue%22%5D%5D%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_ne!(status_code, 202);
|
||||
// "color:green"
|
||||
let query = "q=a&facetFilters=%22color%3Agreen%22";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_ne!(status_code, 202);
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ mod common;
|
||||
async fn write_all_and_delete() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
@ -48,9 +47,11 @@ async fn write_all_and_delete() {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine"],
|
||||
},
|
||||
"attributesForFaceting": ["title"],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
|
||||
server.update_all_settings(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
@ -119,6 +120,7 @@ async fn write_all_and_delete() {
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": null,
|
||||
"acceptNewFields": true,
|
||||
});
|
||||
|
||||
@ -169,6 +171,7 @@ async fn write_all_and_update() {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine"],
|
||||
},
|
||||
"attributesForFaceting": ["title"],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
@ -210,6 +213,7 @@ async fn write_all_and_update() {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine", "xmen"],
|
||||
},
|
||||
"attributesForFaceting": ["title"],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
@ -247,6 +251,7 @@ async fn write_all_and_update() {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine", "xmen"],
|
||||
},
|
||||
"attributesForFaceting": ["title"],
|
||||
"acceptNewFields": false
|
||||
});
|
||||
|
||||
@ -277,6 +282,7 @@ async fn test_default_settings() {
|
||||
"displayedAttributes": [],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": null,
|
||||
"acceptNewFields": true,
|
||||
});
|
||||
|
||||
@ -314,6 +320,7 @@ async fn test_default_settings_2() {
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": null,
|
||||
"acceptNewFields": true,
|
||||
});
|
||||
|
||||
@ -421,6 +428,7 @@ async fn write_setting_and_update_partial() {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine"],
|
||||
},
|
||||
"attributesForFaceting": null,
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
|
@ -341,6 +341,7 @@ async fn accept_new_fields_does_not_take_into_account_the_primary_key() {
|
||||
"displayedAttributes": ["title"],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": null,
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
|
@ -11,3 +11,4 @@ indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||
toml = { version = "0.5.6", features = ["preserve_order"] }
|
||||
zerocopy = "0.3.0"
|
||||
|
@ -6,6 +6,7 @@ pub use error::{Error, SResult};
|
||||
pub use fields_map::FieldsMap;
|
||||
pub use schema::Schema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use zerocopy::{AsBytes, FromBytes};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
pub struct IndexedPos(pub u16);
|
||||
@ -36,7 +37,10 @@ impl Into<u16> for IndexedPos {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(AsBytes, FromBytes)]
|
||||
#[repr(C)]
|
||||
pub struct FieldId(pub u16);
|
||||
|
||||
impl FieldId {
|
||||
@ -63,8 +67,8 @@ impl From<u16> for FieldId {
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<u16> for FieldId {
|
||||
fn into(self) -> u16 {
|
||||
self.0
|
||||
impl From<FieldId> for u16 {
|
||||
fn from(other: FieldId) -> u16 {
|
||||
other.0
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user