Merge pull request #631 from MarinPostma/facet-filters

Facet filters
This commit is contained in:
Clément Renault 2020-05-11 18:16:34 +02:00 committed by GitHub
commit b215e9e848
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 2690 additions and 274 deletions

View File

@ -1,5 +1,6 @@
## v0.10.2 ## v0.10.2
- Add support for faceted search (#631)
- Add support for configuring the lmdb map size (#646, #647) - Add support for configuring the lmdb map size (#646, #647)
- Add exposed port for Dockerfile (#654) - Add exposed port for Dockerfile (#654)
- Add sentry probe - Add sentry probe

196
Cargo.lock generated
View File

@ -138,15 +138,16 @@ dependencies = [
[[package]] [[package]]
name = "actix-rt" name = "actix-rt"
version = "1.1.0" version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20066d9200ef8d441ac156c76dd36c3f1e9a15976c34e69ae97f7f570b331882" checksum = "143fcc2912e0d1de2bcf4e2f720d2a60c28652ab4179685a1ee159e0fb3db227"
dependencies = [ dependencies = [
"actix-macros", "actix-macros",
"actix-threadpool", "actix-threadpool",
"copyless", "copyless",
"futures-channel", "futures-channel",
"futures-util", "futures-util",
"smallvec",
"tokio", "tokio",
] ]
@ -309,12 +310,9 @@ checksum = "5d2e7343e7fc9de883d1b0341e0b13970f764c14101234857d2ddafa1cb1cac2"
[[package]] [[package]]
name = "ahash" name = "ahash"
version = "0.3.3" version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35b909d1c126f78ace756fc337133356c499eebeefcce930fa5fb018823f2b2d" checksum = "9c251dce3391a07b43218ca070203ecb8f9f520d35ab71312296a59dbceab154"
dependencies = [
"const-random",
]
[[package]] [[package]]
name = "aho-corasick" name = "aho-corasick"
@ -436,9 +434,9 @@ dependencies = [
[[package]] [[package]]
name = "backtrace-sys" name = "backtrace-sys"
version = "0.1.36" version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78848718ee1255a2485d1309ad9cdecfc2e7d0362dd11c6829364c6b35ae1bc7" checksum = "18fbebbe1c9d1f383a9cc7e8ccdb471b91c8d024ee9c2ca5b5346121fe8b4399"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -452,9 +450,9 @@ checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7"
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.12.0" version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d5ca2cd0adc3f48f9e9ea5a6bbdf9ccc0bfade884847e484d452414c7ccffb3" checksum = "53d1ccbaf7d9ec9537465a97bf19edc1a4e158ecb49fc16178202238c569cc42"
[[package]] [[package]]
name = "bincode" name = "bincode"
@ -646,26 +644,6 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ab08c5bed92075075d5db5149887a477b2dc0318c40882a0dfbd34315ac6141" checksum = "4ab08c5bed92075075d5db5149887a477b2dc0318c40882a0dfbd34315ac6141"
[[package]]
name = "const-random"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f1af9ac737b2dd2d577701e59fd09ba34822f6f2ebdb30a7647405d9e55e16a"
dependencies = [
"const-random-macro",
"proc-macro-hack",
]
[[package]]
name = "const-random-macro"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25e4c606eb459dd29f7c57b2e0879f2b6f14ee130918c2b78ccb58a9624e6c7a"
dependencies = [
"getrandom",
"proc-macro-hack",
]
[[package]] [[package]]
name = "copyless" name = "copyless"
version = "0.1.4" version = "0.1.4"
@ -688,6 +666,12 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3a71ab494c0b5b860bdc8407ae08978052417070c2ced38573a9157ad75b8ac" checksum = "b3a71ab494c0b5b860bdc8407ae08978052417070c2ced38573a9157ad75b8ac"
[[package]]
name = "cow-utils"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79bb3adfaf5f75d24b01aee375f7555907840fa2800e5ec8fa3b9e2031830173"
[[package]] [[package]]
name = "crc32fast" name = "crc32fast"
version = "1.2.0" version = "1.2.0"
@ -699,16 +683,16 @@ dependencies = [
[[package]] [[package]]
name = "criterion" name = "criterion"
version = "0.3.1" version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fc755679c12bda8e5523a71e4d654b6bf2e14bd838dfc48cde6559a05caf7d1" checksum = "63f696897c88b57f4ffe3c69d8e1a0613c7d0e6c4833363c8560fbde9c47b966"
dependencies = [ dependencies = [
"atty", "atty",
"cast", "cast",
"clap", "clap",
"criterion-plot", "criterion-plot",
"csv", "csv",
"itertools 0.8.2", "itertools",
"lazy_static", "lazy_static",
"num-traits", "num-traits",
"oorandom", "oorandom",
@ -724,12 +708,12 @@ dependencies = [
[[package]] [[package]]
name = "criterion-plot" name = "criterion-plot"
version = "0.4.1" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a01e15e0ea58e8234f96146b1f91fa9d0e4dd7a38da93ff7a75d42c0b9d3a545" checksum = "ddeaf7989f00f2e1d871a26a110f3ed713632feac17f65f03ca938c542618b60"
dependencies = [ dependencies = [
"cast", "cast",
"itertools 0.8.2", "itertools",
] ]
[[package]] [[package]]
@ -926,9 +910,9 @@ dependencies = [
[[package]] [[package]]
name = "failure" name = "failure"
version = "0.1.7" version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8529c2421efa3066a5cbd8063d2244603824daccb6936b079010bb2aa89464b" checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"failure_derive", "failure_derive",
@ -936,9 +920,9 @@ dependencies = [
[[package]] [[package]]
name = "failure_derive" name = "failure_derive"
version = "0.1.7" version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "030a733c8287d6213886dd487564ff5c8f6aae10278b3588ed177f9d18f8d231" checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1153,9 +1137,9 @@ dependencies = [
[[package]] [[package]]
name = "h2" name = "h2"
version = "0.2.4" version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "377038bf3c89d18d6ca1431e7a5027194fbd724ca10592b9487ede5e8e144f42" checksum = "79b7246d7e4b979c03fa093da39cfb3617a96bbeee6310af63991668d7e843ff"
dependencies = [ dependencies = [
"bytes 0.5.4", "bytes 0.5.4",
"fnv", "fnv",
@ -1172,9 +1156,9 @@ dependencies = [
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.7.1" version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479e9d9a1a3f8c489868a935b557ab5710e3e223836da2ecd52901d88935cb56" checksum = "96282e96bfcd3da0d3aa9938bedf1e50df3269b6db08b4876d2da0bb1a0841cf"
dependencies = [ dependencies = [
"ahash", "ahash",
"autocfg", "autocfg",
@ -1432,9 +1416,9 @@ dependencies = [
[[package]] [[package]]
name = "ipconfig" name = "ipconfig"
version = "0.2.1" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa79fa216fbe60834a9c0737d7fcd30425b32d1c58854663e24d4c4b328ed83f" checksum = "f7e2f18aece9709094573a9f24f483c4f65caa4298e2f7ae1b71cc65d853fad7"
dependencies = [ dependencies = [
"socket2", "socket2",
"widestring", "widestring",
@ -1442,15 +1426,6 @@ dependencies = [
"winreg", "winreg",
] ]
[[package]]
name = "itertools"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484"
dependencies = [
"either",
]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.9.0" version = "0.9.0"
@ -1489,9 +1464,9 @@ dependencies = [
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.37" version = "0.3.39"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a27d435371a2fa5b6d2b028a74bbdb1234f308da363226a2854ca3ff8ba7055" checksum = "fa5a448de267e7358beaf4a5d849518fe9a0c13fce7afd44b06e68550e5562a7"
dependencies = [ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
@ -1535,9 +1510,9 @@ checksum = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005"
[[package]] [[package]]
name = "linked-hash-map" name = "linked-hash-map"
version = "0.5.2" version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83" checksum = "8dd5a6d5999d9907cda8ed67bbd137d3af8085216c2ac62de5be860bd41f304a"
[[package]] [[package]]
name = "lmdb-rkv-sys" name = "lmdb-rkv-sys"
@ -1617,17 +1592,19 @@ dependencies = [
"byteorder", "byteorder",
"chrono", "chrono",
"compact_arena", "compact_arena",
"cow-utils",
"criterion", "criterion",
"crossbeam-channel", "crossbeam-channel",
"csv", "csv",
"deunicode", "deunicode",
"either",
"env_logger", "env_logger",
"fst", "fst",
"hashbrown", "hashbrown",
"heed", "heed",
"indexmap", "indexmap",
"intervaltree", "intervaltree",
"itertools 0.9.0", "itertools",
"jemallocator", "jemallocator",
"levenshtein_automata", "levenshtein_automata",
"log", "log",
@ -1708,6 +1685,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"toml", "toml",
"zerocopy",
] ]
[[package]] [[package]]
@ -1768,9 +1746,9 @@ dependencies = [
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.21" version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f" checksum = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"fuchsia-zircon", "fuchsia-zircon",
@ -1787,9 +1765,9 @@ dependencies = [
[[package]] [[package]]
name = "mio-uds" name = "mio-uds"
version = "0.6.7" version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "966257a94e196b11bb43aca423754d87429960a768de9414f3691d6957abf125" checksum = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0"
dependencies = [ dependencies = [
"iovec", "iovec",
"libc", "libc",
@ -1828,9 +1806,9 @@ dependencies = [
[[package]] [[package]]
name = "net2" name = "net2"
version = "0.2.33" version = "0.2.34"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" checksum = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
@ -1906,9 +1884,9 @@ checksum = "b1c601810575c99596d4afc46f78a678c80105117c379eb3650cf99b8a21ce5b"
[[package]] [[package]]
name = "oorandom" name = "oorandom"
version = "11.1.0" version = "11.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebcec7c9c2a95cacc7cd0ecb89d8a8454eca13906f6deb55258ffff0adeb9405" checksum = "94af325bc33c7f60191be4e2c984d48aaa21e2854f473b85398344b60c9b6358"
[[package]] [[package]]
name = "opaque-debug" name = "opaque-debug"
@ -2058,18 +2036,18 @@ dependencies = [
[[package]] [[package]]
name = "pin-project" name = "pin-project"
version = "0.4.9" version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f6a7f5eee6292c559c793430c55c00aea9d3b3d1905e855806ca4d7253426a2" checksum = "82c3bfbfb5bb42f99498c7234bbd768c220eb0cea6818259d0d18a1aa3d2595d"
dependencies = [ dependencies = [
"pin-project-internal", "pin-project-internal",
] ]
[[package]] [[package]]
name = "pin-project-internal" name = "pin-project-internal"
version = "0.4.9" version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8988430ce790d8682672117bc06dda364c0be32d3abd738234f19f3240bad99a" checksum = "ccbf6449dcfb18562c015526b085b8df1aa3cdab180af8ec2ebd300a3bd28f63"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2078,9 +2056,9 @@ dependencies = [
[[package]] [[package]]
name = "pin-project-lite" name = "pin-project-lite"
version = "0.1.4" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "237844750cfbb86f67afe27eee600dfbbcb6188d734139b534cbfbf4f96792ae" checksum = "f7505eeebd78492e0f6108f7171c4948dbb120ee8119d9d77d0afa5469bef67f"
[[package]] [[package]]
name = "pin-utils" name = "pin-utils"
@ -2096,9 +2074,9 @@ checksum = "05da548ad6865900e60eaba7f589cc0783590a92e940c26953ff81ddbab2d677"
[[package]] [[package]]
name = "plotters" name = "plotters"
version = "0.2.12" version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e3bb8da247d27ae212529352020f3e5ee16e83c0c258061d27b08ab92675eeb" checksum = "f9b1d9ca091d370ea3a78d5619145d1b59426ab0c9eedbad2514a4cee08bf389"
dependencies = [ dependencies = [
"js-sys", "js-sys",
"num-traits", "num-traits",
@ -2162,9 +2140,9 @@ checksum = "8e946095f9d3ed29ec38de908c22f95d9ac008e424c7bcae54c75a79c527c694"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.10" version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df246d292ff63439fea9bc8c0a270bed0e390d5ebd4db4ba15aba81111b5abe3" checksum = "8872cf6f48eee44265156c111456a700ab3483686b3f96df4cf5481c89157319"
dependencies = [ dependencies = [
"unicode-xid", "unicode-xid",
] ]
@ -2186,9 +2164,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.3" version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bdc6c187c65bca4260c9011c9e3132efe4909da44726bad24cf7572ae338d7f" checksum = "4c1f4b0efa5fc5e8ceb705136bfee52cfdb6a4e3509f770b478cd6ed434232a7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@ -2398,13 +2376,13 @@ dependencies = [
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.16.12" version = "0.16.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ba5a8ec64ee89a76c98c549af81ff14813df09c3e6dc4766c3856da48597a0c" checksum = "703516ae74571f24b465b4a1431e81e2ad51336cb0ded733a55a1aa3eccac196"
dependencies = [ dependencies = [
"cc", "cc",
"lazy_static",
"libc", "libc",
"once_cell",
"spin", "spin",
"untrusted", "untrusted",
"web-sys", "web-sys",
@ -2615,9 +2593,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.51" version = "1.0.52"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da07b57ee2623368351e9a0488bb0b261322a15a6e0ae53e243cbdc0f4208da9" checksum = "a7894c8ed05b7a3a279aeb79025fdec1d3158080b75b98a08faf2806bb799edd"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"itoa", "itoa",
@ -2773,9 +2751,9 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.18" version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "410a7488c0a728c7ceb4ad59b9567eb4053d02e8cc7f5c0e0eeeb39518369213" checksum = "e8e5aa70697bb26ee62214ae3288465ecec0000f05182f039b477001f08f5ae7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2902,9 +2880,9 @@ dependencies = [
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "0.2.19" version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d9c43f1bb96970e153bcbae39a65e249ccb942bd9d36dbdf086024920417c9c" checksum = "05c1d570eb1a36f0345a5ce9c6c6e665b70b73d11236912c0b477616aeec47b1"
dependencies = [ dependencies = [
"bytes 0.5.4", "bytes 0.5.4",
"fnv", "fnv",
@ -3113,9 +3091,9 @@ checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
[[package]] [[package]]
name = "untrusted" name = "untrusted"
version = "0.7.0" version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60369ef7a31de49bcb3f6ca728d4ba7300d9a1658f94c727d4cab8c8d9f4aece" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]] [[package]]
name = "ureq" name = "ureq"
@ -3123,7 +3101,7 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd754afd5f60388b4188210c3795392c5f2fd69a1cc947ec4505dbfee955b902" checksum = "cd754afd5f60388b4188210c3795392c5f2fd69a1cc947ec4505dbfee955b902"
dependencies = [ dependencies = [
"base64 0.12.0", "base64 0.12.1",
"chunked_transfer", "chunked_transfer",
"lazy_static", "lazy_static",
"qstring", "qstring",
@ -3200,9 +3178,9 @@ checksum = "3fc439f2794e98976c88a2a2dafce96b930fe8010b0a256b3c2199a773933168"
[[package]] [[package]]
name = "vec_map" name = "vec_map"
version = "0.8.1" version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]] [[package]]
name = "vergen" name = "vergen"
@ -3261,9 +3239,9 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
[[package]] [[package]]
name = "wasm-bindgen" name = "wasm-bindgen"
version = "0.2.60" version = "0.2.62"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2cc57ce05287f8376e998cbddfb4c8cb43b84a7ec55cf4551d7c00eef317a47f" checksum = "e3c7d40d09cdbf0f4895ae58cf57d92e1e57a9dd8ed2e8390514b54a47cc5551"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"serde", "serde",
@ -3273,9 +3251,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-backend" name = "wasm-bindgen-backend"
version = "0.2.60" version = "0.2.62"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d967d37bf6c16cca2973ca3af071d0a2523392e4a594548155d89a678f4237cd" checksum = "c3972e137ebf830900db522d6c8fd74d1900dcfc733462e9a12e942b00b4ac94"
dependencies = [ dependencies = [
"bumpalo", "bumpalo",
"lazy_static", "lazy_static",
@ -3288,9 +3266,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-futures" name = "wasm-bindgen-futures"
version = "0.4.10" version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7add542ea1ac7fdaa9dc25e031a6af33b7d63376292bd24140c637d00d1c312a" checksum = "8a369c5e1dfb7569e14d62af4da642a3cbc2f9a3652fe586e26ac22222aa4b04"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"js-sys", "js-sys",
@ -3300,9 +3278,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro" name = "wasm-bindgen-macro"
version = "0.2.60" version = "0.2.62"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bd151b63e1ea881bb742cd20e1d6127cef28399558f3b5d415289bc41eee3a4" checksum = "2cd85aa2c579e8892442954685f0d801f9129de24fa2136b2c6a539c76b65776"
dependencies = [ dependencies = [
"quote", "quote",
"wasm-bindgen-macro-support", "wasm-bindgen-macro-support",
@ -3310,9 +3288,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro-support" name = "wasm-bindgen-macro-support"
version = "0.2.60" version = "0.2.62"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d68a5b36eef1be7868f668632863292e37739656a80fc4b9acec7b0bd35a4931" checksum = "8eb197bd3a47553334907ffd2f16507b4f4f01bbec3ac921a7719e0decdfe72a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -3323,15 +3301,15 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-shared" name = "wasm-bindgen-shared"
version = "0.2.60" version = "0.2.62"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf76fe7d25ac79748a37538b7daeed1c7a6867c92d3245c12c6222e4a20d639" checksum = "a91c2916119c17a8e316507afaaa2dd94b47646048014bbdf6bef098c1bb58ad"
[[package]] [[package]]
name = "web-sys" name = "web-sys"
version = "0.3.37" version = "0.3.39"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d6f51648d8c56c366144378a33290049eafdd784071077f6fe37dae64c1c4cb" checksum = "8bc359e5dd3b46cb9687a051d50a2fdd228e4ba7cf6fcf861a5365c3d671a642"
dependencies = [ dependencies = [
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",

View File

@ -11,8 +11,10 @@ bincode = "1.2.1"
byteorder = "1.3.4" byteorder = "1.3.4"
chrono = { version = "0.4.11", features = ["serde"] } chrono = { version = "0.4.11", features = ["serde"] }
compact_arena = "0.4.0" compact_arena = "0.4.0"
cow-utils = "0.1.2"
crossbeam-channel = "0.4.2" crossbeam-channel = "0.4.2"
deunicode = "1.1.0" deunicode = "1.1.0"
either = "1.5.3"
env_logger = "0.7.1" env_logger = "0.7.1"
fst = { version = "0.3.5", default-features = false } fst = { version = "0.3.5", default-features = false }
hashbrown = { version = "0.7.1", features = ["serde"] } hashbrown = { version = "0.7.1", features = ["serde"] }

View File

@ -11,7 +11,7 @@ use std::fmt;
use compact_arena::{SmallArena, Idx32, mk_arena}; use compact_arena::{SmallArena, Idx32, mk_arena};
use log::debug; use log::debug;
use meilisearch_types::DocIndex; use meilisearch_types::DocIndex;
use sdset::{Set, SetBuf, exponential_search}; use sdset::{Set, SetBuf, exponential_search, SetOperation};
use slice_group_by::{GroupBy, GroupByMut}; use slice_group_by::{GroupBy, GroupByMut};
use crate::error::Error; use crate::error::Error;
@ -28,6 +28,7 @@ pub fn bucket_sort<'c, FI>(
reader: &heed::RoTxn<MainT>, reader: &heed::RoTxn<MainT>,
query: &str, query: &str,
range: Range<usize>, range: Range<usize>,
facets_docids: Option<SetBuf<DocumentId>>,
filter: Option<FI>, filter: Option<FI>,
criteria: Criteria<'c>, criteria: Criteria<'c>,
searchable_attrs: Option<ReorderedAttrs>, searchable_attrs: Option<ReorderedAttrs>,
@ -50,6 +51,7 @@ where
reader, reader,
query, query,
range, range,
facets_docids,
filter, filter,
distinct, distinct,
distinct_size, distinct_size,
@ -94,10 +96,17 @@ where
let mut queries_kinds = HashMap::new(); let mut queries_kinds = HashMap::new();
recurs_operation(&mut queries_kinds, &operation); recurs_operation(&mut queries_kinds, &operation);
let QueryResult { docids, queries } = traverse_query_tree(reader, &context, &operation)?; let QueryResult { mut docids, queries } = traverse_query_tree(reader, &context, &operation)?;
debug!("found {} documents", docids.len()); debug!("found {} documents", docids.len());
debug!("number of postings {:?}", queries.len()); debug!("number of postings {:?}", queries.len());
if let Some(facets_docids) = facets_docids {
let intersection = sdset::duo::OpBuilder::new(docids.as_ref(), facets_docids.as_set())
.intersection()
.into_set_buf();
docids = Cow::Owned(intersection);
}
let before = Instant::now(); let before = Instant::now();
mk_arena!(arena); mk_arena!(arena);
let mut bare_matches = cleanup_bare_matches(&mut arena, &docids, queries); let mut bare_matches = cleanup_bare_matches(&mut arena, &docids, queries);
@ -179,6 +188,7 @@ pub fn bucket_sort_with_distinct<'c, FI, FD>(
reader: &heed::RoTxn<MainT>, reader: &heed::RoTxn<MainT>,
query: &str, query: &str,
range: Range<usize>, range: Range<usize>,
facets_docids: Option<SetBuf<DocumentId>>,
filter: Option<FI>, filter: Option<FI>,
distinct: FD, distinct: FD,
distinct_size: usize, distinct_size: usize,
@ -225,10 +235,17 @@ where
let mut queries_kinds = HashMap::new(); let mut queries_kinds = HashMap::new();
recurs_operation(&mut queries_kinds, &operation); recurs_operation(&mut queries_kinds, &operation);
let QueryResult { docids, queries } = traverse_query_tree(reader, &context, &operation)?; let QueryResult { mut docids, queries } = traverse_query_tree(reader, &context, &operation)?;
debug!("found {} documents", docids.len()); debug!("found {} documents", docids.len());
debug!("number of postings {:?}", queries.len()); debug!("number of postings {:?}", queries.len());
if let Some(facets_docids) = facets_docids {
let intersection = sdset::duo::OpBuilder::new(docids.as_ref(), facets_docids.as_set())
.intersection()
.into_set_buf();
docids = Cow::Owned(intersection);
}
let before = Instant::now(); let before = Instant::now();
mk_arena!(arena); mk_arena!(arena);
let mut bare_matches = cleanup_bare_matches(&mut arena, &docids, queries); let mut bare_matches = cleanup_bare_matches(&mut arena, &docids, queries);

View File

@ -28,7 +28,8 @@ pub enum Error {
Serializer(SerializerError), Serializer(SerializerError),
Deserializer(DeserializerError), Deserializer(DeserializerError),
UnsupportedOperation(UnsupportedOperation), UnsupportedOperation(UnsupportedOperation),
FilterParseError(PestError<Rule>) FilterParseError(PestError<Rule>),
FacetError(FacetError),
} }
impl From<io::Error> for Error { impl From<io::Error> for Error {
@ -57,7 +58,13 @@ impl From<PestError<Rule>> for Error {
s.to_string() s.to_string()
})) }))
} }
} }
impl From<FacetError> for Error {
fn from(error: FacetError) -> Error {
Error::FacetError(error)
}
}
impl From<meilisearch_schema::Error> for Error { impl From<meilisearch_schema::Error> for Error {
fn from(error: meilisearch_schema::Error) -> Error { fn from(error: meilisearch_schema::Error) -> Error {
@ -127,6 +134,7 @@ impl fmt::Display for Error {
Deserializer(e) => write!(f, "deserializer error; {}", e), Deserializer(e) => write!(f, "deserializer error; {}", e),
UnsupportedOperation(op) => write!(f, "unsupported operation; {}", op), UnsupportedOperation(op) => write!(f, "unsupported operation; {}", op),
FilterParseError(e) => write!(f, "error parsing filter; {}", e), FilterParseError(e) => write!(f, "error parsing filter; {}", e),
FacetError(e) => write!(f, "error processing facet filter: {}", e),
} }
} }
} }
@ -156,3 +164,40 @@ impl fmt::Display for UnsupportedOperation {
} }
} }
} }
#[derive(Debug)]
pub enum FacetError {
EmptyArray,
ParsingError(String),
UnexpectedToken { expected: &'static [&'static str], found: String },
InvalidFormat(String),
AttributeNotFound(String),
AttributeNotSet { expected: Vec<String>, found: String },
InvalidDocumentAttribute(String),
}
impl FacetError {
pub fn unexpected_token(expected: &'static [&'static str], found: impl ToString) -> FacetError {
FacetError::UnexpectedToken{ expected, found: found.to_string() }
}
pub fn attribute_not_set(expected: Vec<String>, found: impl ToString) -> FacetError {
FacetError::AttributeNotSet{ expected, found: found.to_string() }
}
}
impl fmt::Display for FacetError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use FacetError::*;
match self {
EmptyArray => write!(f, "empty array in facet filter is unspecified behavior"),
ParsingError(msg) => write!(f, "parsing error: {}", msg),
UnexpectedToken { expected, found } => write!(f, "unexpected token {}, expected {}", found, expected.join("or")),
InvalidFormat(found) => write!(f, "invalid facet: {}, facets should be \"facetName:facetValue\"", found),
AttributeNotFound(attr) => write!(f, "unknown {:?} attribute", attr),
AttributeNotSet { found, expected } => write!(f, "`{}` is not set as a faceted attribute. available facet attributes: {}", found, expected.join(", ")),
InvalidDocumentAttribute(attr) => write!(f, "invalid document attribute {}, accepted types: String and [String]", attr),
}
}
}

View File

@ -0,0 +1,353 @@
use std::borrow::Cow;
use std::collections::HashMap;
use std::hash::Hash;
use std::ops::Deref;
use cow_utils::CowUtils;
use either::Either;
use heed::types::{Str, OwnedType};
use indexmap::IndexMap;
use serde_json::Value;
use meilisearch_schema::{FieldId, Schema};
use meilisearch_types::DocumentId;
use crate::database::MainT;
use crate::error::{FacetError, Error};
use crate::store::BEU16;
/// Data structure used to represent a boolean expression in the form of nested arrays.
/// Values in the outer array are and-ed together, values in the inner arrays are or-ed together.
#[derive(Debug, PartialEq)]
pub struct FacetFilter(Vec<Either<Vec<FacetKey>, FacetKey>>);
impl Deref for FacetFilter {
type Target = Vec<Either<Vec<FacetKey>, FacetKey>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl FacetFilter {
pub fn from_str(
s: &str,
schema: &Schema,
attributes_for_faceting: &[FieldId],
) -> Result<Self, FacetError> {
let parsed = serde_json::from_str::<Value>(s).map_err(|e| FacetError::ParsingError(e.to_string()))?;
let mut filter = Vec::new();
match parsed {
Value::Array(and_exprs) => {
if and_exprs.is_empty() {
return Err(FacetError::EmptyArray);
}
for expr in and_exprs {
match expr {
Value::String(s) => {
let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?;
filter.push(Either::Right(key));
}
Value::Array(or_exprs) => {
if or_exprs.is_empty() {
return Err(FacetError::EmptyArray);
}
let mut inner = Vec::new();
for expr in or_exprs {
match expr {
Value::String(s) => {
let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?;
inner.push(key);
}
bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value)),
}
}
filter.push(Either::Left(inner));
}
bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value)),
}
}
return Ok(Self(filter));
}
bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value)),
}
}
}
#[derive(Debug, Eq, PartialEq, Hash)]
#[repr(C)]
pub struct FacetKey(FieldId, String);
impl FacetKey {
pub fn new(field_id: FieldId, value: String) -> Self {
let value = match value.cow_to_lowercase() {
Cow::Borrowed(_) => value,
Cow::Owned(s) => s,
};
Self(field_id, value)
}
pub fn key(&self) -> FieldId {
self.0
}
pub fn value(&self) -> &str {
&self.1
}
// TODO improve parser
fn from_str(
s: &str,
schema: &Schema,
attributes_for_faceting: &[FieldId],
) -> Result<Self, FacetError> {
let mut split = s.splitn(2, ':');
let key = split
.next()
.ok_or_else(|| FacetError::InvalidFormat(s.to_string()))?
.trim();
let field_id = schema
.id(key)
.ok_or_else(|| FacetError::AttributeNotFound(key.to_string()))?;
if !attributes_for_faceting.contains(&field_id) {
return Err(FacetError::attribute_not_set(
attributes_for_faceting
.iter()
.filter_map(|&id| schema.name(id))
.map(str::to_string)
.collect::<Vec<_>>(),
key))
}
let value = split
.next()
.ok_or_else(|| FacetError::InvalidFormat(s.to_string()))?
.trim();
// unquoting the string if need be:
let mut indices = value.char_indices();
let value = match (indices.next(), indices.last()) {
(Some((s, '\'')), Some((e, '\''))) |
(Some((s, '\"')), Some((e, '\"'))) => value[s + 1..e].to_string(),
_ => value.to_string(),
};
Ok(Self::new(field_id, value))
}
}
impl<'a> heed::BytesEncode<'a> for FacetKey {
type EItem = FacetKey;
fn bytes_encode(item: &'a Self::EItem) -> Option<Cow<'a, [u8]>> {
let mut buffer = Vec::with_capacity(2 + item.1.len());
let id = BEU16::new(item.key().into());
let id_bytes = OwnedType::bytes_encode(&id)?;
let value_bytes = Str::bytes_encode(item.value())?;
buffer.extend_from_slice(id_bytes.as_ref());
buffer.extend_from_slice(value_bytes.as_ref());
Some(Cow::Owned(buffer))
}
}
impl<'a> heed::BytesDecode<'a> for FacetKey {
type DItem = FacetKey;
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
let (id_bytes, value_bytes) = bytes.split_at(2);
let id = OwnedType::<BEU16>::bytes_decode(id_bytes)?;
let id = id.get().into();
let string = Str::bytes_decode(&value_bytes)?;
Some(FacetKey(id, string.to_string()))
}
}
pub fn add_to_facet_map(
facet_map: &mut HashMap<FacetKey, Vec<DocumentId>>,
field_id: FieldId,
value: Value,
document_id: DocumentId,
) -> Result<(), FacetError> {
let value = match value {
Value::String(s) => s,
// ignore null
Value::Null => return Ok(()),
value => return Err(FacetError::InvalidDocumentAttribute(value.to_string())),
};
let key = FacetKey::new(field_id, value);
facet_map.entry(key).or_insert_with(Vec::new).push(document_id);
Ok(())
}
pub fn facet_map_from_docids(
rtxn: &heed::RoTxn<MainT>,
index: &crate::Index,
document_ids: &[DocumentId],
attributes_for_facetting: &[FieldId],
) -> Result<HashMap<FacetKey, Vec<DocumentId>>, Error> {
let mut facet_map = HashMap::new();
for document_id in document_ids {
for result in index
.documents_fields
.document_fields(rtxn, *document_id)?
{
let (field_id, bytes) = result?;
if attributes_for_facetting.contains(&field_id) {
match serde_json::from_slice(bytes)? {
Value::Array(values) => {
for v in values {
add_to_facet_map(&mut facet_map, field_id, v, *document_id)?;
}
}
v => add_to_facet_map(&mut facet_map, field_id, v, *document_id)?,
};
}
}
}
Ok(facet_map)
}
pub fn facet_map_from_docs(
schema: &Schema,
documents: &HashMap<DocumentId, IndexMap<String, Value>>,
attributes_for_facetting: &[FieldId],
) -> Result<HashMap<FacetKey, Vec<DocumentId>>, Error> {
let mut facet_map = HashMap::new();
let attributes_for_facetting = attributes_for_facetting
.iter()
.filter_map(|&id| schema.name(id).map(|name| (id, name)))
.collect::<Vec<_>>();
for (id, document) in documents {
for (field_id, name) in &attributes_for_facetting {
if let Some(value) = document.get(*name) {
match value {
Value::Array(values) => {
for v in values {
add_to_facet_map(&mut facet_map, *field_id, v.clone(), *id)?;
}
}
v => add_to_facet_map(&mut facet_map, *field_id, v.clone(), *id)?,
}
}
}
}
Ok(facet_map)
}
#[cfg(test)]
mod test {
use super::*;
use meilisearch_schema::Schema;
#[test]
fn test_facet_key() {
let mut schema = Schema::new();
let id = schema.insert_and_index("hello").unwrap();
let facet_list = [schema.id("hello").unwrap()];
assert_eq!(
FacetKey::from_str("hello:12", &schema, &facet_list).unwrap(),
FacetKey::new(id, "12".to_string())
);
assert_eq!(
FacetKey::from_str("hello:\"foo bar\"", &schema, &facet_list).unwrap(),
FacetKey::new(id, "foo bar".to_string())
);
assert_eq!(
FacetKey::from_str("hello:'foo bar'", &schema, &facet_list).unwrap(),
FacetKey::new(id, "foo bar".to_string())
);
// weird case
assert_eq!(
FacetKey::from_str("hello:blabla:machin", &schema, &facet_list).unwrap(),
FacetKey::new(id, "blabla:machin".to_string())
);
assert_eq!(
FacetKey::from_str("hello:\"\"", &schema, &facet_list).unwrap(),
FacetKey::new(id, "".to_string())
);
assert_eq!(
FacetKey::from_str("hello:'", &schema, &facet_list).unwrap(),
FacetKey::new(id, "'".to_string())
);
assert_eq!(
FacetKey::from_str("hello:''", &schema, &facet_list).unwrap(),
FacetKey::new(id, "".to_string())
);
assert!(FacetKey::from_str("hello", &schema, &facet_list).is_err());
assert!(FacetKey::from_str("toto:12", &schema, &facet_list).is_err());
}
#[test]
fn test_parse_facet_array() {
use either::Either::{Left, Right};
let mut schema = Schema::new();
let _id = schema.insert_and_index("hello").unwrap();
let facet_list = [schema.id("hello").unwrap()];
assert_eq!(
FacetFilter::from_str("[[\"hello:12\"]]", &schema, &facet_list).unwrap(),
FacetFilter(vec![Left(vec![FacetKey(FieldId(0), "12".to_string())])])
);
assert_eq!(
FacetFilter::from_str("[\"hello:12\"]", &schema, &facet_list).unwrap(),
FacetFilter(vec![Right(FacetKey(FieldId(0), "12".to_string()))])
);
assert_eq!(
FacetFilter::from_str("[\"hello:12\", \"hello:13\"]", &schema, &facet_list).unwrap(),
FacetFilter(vec![
Right(FacetKey(FieldId(0), "12".to_string())),
Right(FacetKey(FieldId(0), "13".to_string()))
])
);
assert_eq!(
FacetFilter::from_str("[[\"hello:12\", \"hello:13\"]]", &schema, &facet_list).unwrap(),
FacetFilter(vec![Left(vec![
FacetKey(FieldId(0), "12".to_string()),
FacetKey(FieldId(0), "13".to_string())
])])
);
assert_eq!(
FacetFilter::from_str(
"[[\"hello:12\", \"hello:13\"], \"hello:14\"]",
&schema,
&facet_list
)
.unwrap(),
FacetFilter(vec![
Left(vec![
FacetKey(FieldId(0), "12".to_string()),
FacetKey(FieldId(0), "13".to_string())
]),
Right(FacetKey(FieldId(0), "14".to_string()))
])
);
// invalid array depths
assert!(FacetFilter::from_str(
"[[[\"hello:12\", \"hello:13\"], \"hello:14\"]]",
&schema,
&facet_list
)
.is_err());
assert!(FacetFilter::from_str(
"[[[\"hello:12\", \"hello:13\"]], \"hello:14\"]]",
&schema,
&facet_list
)
.is_err());
assert!(FacetFilter::from_str("\"hello:14\"", &schema, &facet_list).is_err());
// unexisting key
assert!(FacetFilter::from_str("[\"foo:12\"]", &schema, &facet_list).is_err());
// invalid facet key
assert!(FacetFilter::from_str("[\"foo=12\"]", &schema, &facet_list).is_err());
assert!(FacetFilter::from_str("[\"foo12\"]", &schema, &facet_list).is_err());
assert!(FacetFilter::from_str("[\"\"]", &schema, &facet_list).is_err());
// empty array error
assert!(FacetFilter::from_str("[]", &schema, &facet_list).is_err());
assert!(FacetFilter::from_str("[\"hello:12\", []]", &schema, &facet_list).is_err());
}
}

View File

@ -19,14 +19,15 @@ mod ranked_map;
mod raw_document; mod raw_document;
mod reordered_attrs; mod reordered_attrs;
mod update; mod update;
pub mod settings;
pub mod criterion; pub mod criterion;
pub mod facets;
pub mod raw_indexer; pub mod raw_indexer;
pub mod settings;
pub mod serde; pub mod serde;
pub mod store; pub mod store;
pub use self::database::{BoxUpdateFn, Database, DatabaseOptions, MainT, UpdateT}; pub use self::database::{BoxUpdateFn, Database, DatabaseOptions, MainT, UpdateT};
pub use self::error::{Error, HeedError, FstError, MResult, pest_error}; pub use self::error::{Error, HeedError, FstError, MResult, pest_error, FacetError};
pub use self::filters::Filter; pub use self::filters::Filter;
pub use self::number::{Number, ParseNumberError}; pub use self::number::{Number, ParseNumberError};
pub use self::ranked_map::RankedMap; pub use self::ranked_map::RankedMap;

View File

@ -1,66 +1,50 @@
use std::ops::Range; use std::borrow::Cow;
use std::ops::{Range, Deref};
use std::time::Duration; use std::time::Duration;
use crate::database::MainT; use crate::database::MainT;
use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct}; use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct};
use crate::{criterion::Criteria, Document, DocumentId}; use crate::{criterion::Criteria, Document, DocumentId};
use crate::{reordered_attrs::ReorderedAttrs, store, MResult}; use crate::{reordered_attrs::ReorderedAttrs, store, MResult};
use crate::facets::FacetFilter;
pub struct QueryBuilder<'c, 'f, 'd> { use either::Either;
use sdset::SetOperation;
pub struct QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
criteria: Criteria<'c>, criteria: Criteria<'c>,
searchable_attrs: Option<ReorderedAttrs>, searchable_attrs: Option<ReorderedAttrs>,
filter: Option<Box<dyn Fn(DocumentId) -> bool + 'f>>, filter: Option<Box<dyn Fn(DocumentId) -> bool + 'f>>,
distinct: Option<(Box<dyn Fn(DocumentId) -> Option<u64> + 'd>, usize)>, distinct: Option<(Box<dyn Fn(DocumentId) -> Option<u64> + 'd>, usize)>,
timeout: Option<Duration>, timeout: Option<Duration>,
main_store: store::Main, index: &'i store::Index,
postings_lists_store: store::PostingsLists, facets: Option<&'fa FacetFilter>,
documents_fields_counts_store: store::DocumentsFieldsCounts,
synonyms_store: store::Synonyms,
prefix_documents_cache_store: store::PrefixDocumentsCache,
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
} }
impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> { impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
pub fn new( pub fn new(index: &'i store::Index) -> Self {
main: store::Main,
postings_lists: store::PostingsLists,
documents_fields_counts: store::DocumentsFieldsCounts,
synonyms: store::Synonyms,
prefix_documents_cache: store::PrefixDocumentsCache,
prefix_postings_lists_cache: store::PrefixPostingsListsCache,
) -> QueryBuilder<'c, 'f, 'd> {
QueryBuilder::with_criteria( QueryBuilder::with_criteria(
main, index,
postings_lists,
documents_fields_counts,
synonyms,
prefix_documents_cache,
prefix_postings_lists_cache,
Criteria::default(), Criteria::default(),
) )
} }
pub fn set_facets(&mut self, facets: Option<&'fa FacetFilter>) {
self.facets = facets;
}
pub fn with_criteria( pub fn with_criteria(
main: store::Main, index: &'i store::Index,
postings_lists: store::PostingsLists,
documents_fields_counts: store::DocumentsFieldsCounts,
synonyms: store::Synonyms,
prefix_documents_cache: store::PrefixDocumentsCache,
prefix_postings_lists_cache: store::PrefixPostingsListsCache,
criteria: Criteria<'c>, criteria: Criteria<'c>,
) -> QueryBuilder<'c, 'f, 'd> { ) -> Self {
QueryBuilder { QueryBuilder {
criteria, criteria,
searchable_attrs: None, searchable_attrs: None,
filter: None, filter: None,
distinct: None, distinct: None,
timeout: None, timeout: None,
main_store: main, index,
postings_lists_store: postings_lists, facets: None,
documents_fields_counts_store: documents_fields_counts,
synonyms_store: synonyms,
prefix_documents_cache_store: prefix_documents_cache,
prefix_postings_lists_cache_store: prefix_postings_lists_cache,
} }
} }
@ -93,36 +77,70 @@ impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
query: &str, query: &str,
range: Range<usize>, range: Range<usize>,
) -> MResult<(Vec<Document>, usize)> { ) -> MResult<(Vec<Document>, usize)> {
let facets_docids = match self.facets {
Some(facets) => {
let mut ands = Vec::with_capacity(facets.len());
let mut ors = Vec::new();
for f in facets.deref() {
match f {
Either::Left(keys) => {
ors.reserve(keys.len());
for key in keys {
let docids = self.index.facets.facet_document_ids(reader, &key)?.unwrap_or_default();
ors.push(docids);
}
let sets: Vec<_> = ors.iter().map(Cow::deref).collect();
let or_result = sdset::multi::OpBuilder::from_vec(sets).union().into_set_buf();
ands.push(Cow::Owned(or_result));
ors.clear();
}
Either::Right(key) =>{
match self.index.facets.facet_document_ids(reader, &key)? {
Some(docids) => ands.push(docids),
// no candidates for search, early return.
None => return Ok((vec![], 0)),
}
}
};
}
let ands: Vec<_> = ands.iter().map(Cow::deref).collect();
Some(sdset::multi::OpBuilder::from_vec(ands).intersection().into_set_buf())
}
None => None
};
match self.distinct { match self.distinct {
Some((distinct, distinct_size)) => bucket_sort_with_distinct( Some((distinct, distinct_size)) => bucket_sort_with_distinct(
reader, reader,
query, query,
range, range,
facets_docids,
self.filter, self.filter,
distinct, distinct,
distinct_size, distinct_size,
self.criteria, self.criteria,
self.searchable_attrs, self.searchable_attrs,
self.main_store, self.index.main,
self.postings_lists_store, self.index.postings_lists,
self.documents_fields_counts_store, self.index.documents_fields_counts,
self.synonyms_store, self.index.synonyms,
self.prefix_documents_cache_store, self.index.prefix_documents_cache,
self.prefix_postings_lists_cache_store, self.index.prefix_postings_lists_cache,
), ),
None => bucket_sort( None => bucket_sort(
reader, reader,
query, query,
range, range,
facets_docids,
self.filter, self.filter,
self.criteria, self.criteria,
self.searchable_attrs, self.searchable_attrs,
self.main_store, self.index.main,
self.postings_lists_store, self.index.postings_lists,
self.documents_fields_counts_store, self.index.documents_fields_counts,
self.synonyms_store, self.index.synonyms,
self.prefix_documents_cache_store, self.index.prefix_documents_cache,
self.prefix_postings_lists_cache_store, self.index.prefix_postings_lists_cache,
), ),
} }
} }

View File

@ -31,6 +31,8 @@ pub struct Settings {
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>, pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
#[serde(default, deserialize_with = "deserialize_some")] #[serde(default, deserialize_with = "deserialize_some")]
pub accept_new_fields: Option<Option<bool>>, pub accept_new_fields: Option<Option<bool>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub attributes_for_faceting: Option<Option<Vec<String>>>,
} }
// Any value that is present is considered Some value, including null. // Any value that is present is considered Some value, including null.
@ -60,6 +62,7 @@ impl Settings {
stop_words: settings.stop_words.into(), stop_words: settings.stop_words.into(),
synonyms: settings.synonyms.into(), synonyms: settings.synonyms.into(),
accept_new_fields: settings.accept_new_fields.into(), accept_new_fields: settings.accept_new_fields.into(),
attributes_for_faceting: settings.attributes_for_faceting.into(),
}) })
} }
} }
@ -166,6 +169,7 @@ pub struct SettingsUpdate {
pub stop_words: UpdateState<BTreeSet<String>>, pub stop_words: UpdateState<BTreeSet<String>>,
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>, pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
pub accept_new_fields: UpdateState<bool>, pub accept_new_fields: UpdateState<bool>,
pub attributes_for_faceting: UpdateState<Vec<String>>,
} }
impl Default for SettingsUpdate { impl Default for SettingsUpdate {
@ -179,6 +183,7 @@ impl Default for SettingsUpdate {
stop_words: UpdateState::Nothing, stop_words: UpdateState::Nothing,
synonyms: UpdateState::Nothing, synonyms: UpdateState::Nothing,
accept_new_fields: UpdateState::Nothing, accept_new_fields: UpdateState::Nothing,
attributes_for_faceting: UpdateState::Nothing,
} }
} }
} }

View File

@ -0,0 +1,32 @@
use std::borrow::Cow;
use heed::{types::CowSlice, BytesEncode, BytesDecode};
use sdset::{Set, SetBuf};
use zerocopy::{AsBytes, FromBytes};
pub struct CowSet<T>(std::marker::PhantomData<T>);
impl<'a, T: 'a> BytesEncode<'a> for CowSet<T>
where
T: AsBytes,
{
type EItem = Set<T>;
fn bytes_encode(item: &'a Self::EItem) -> Option<Cow<[u8]>> {
CowSlice::bytes_encode(item.as_slice())
}
}
impl<'a, T: 'a> BytesDecode<'a> for CowSet<T>
where
T: FromBytes + Copy,
{
type DItem = Cow<'a, Set<T>>;
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
match CowSlice::<T>::bytes_decode(bytes)? {
Cow::Owned(vec) => Some(Cow::Owned(SetBuf::new_unchecked(vec))),
Cow::Borrowed(slice) => Some(Cow::Borrowed(Set::new_unchecked(slice))),
}
}
}

View File

@ -0,0 +1,53 @@
use std::borrow::Cow;
use std::collections::HashMap;
use heed::{RwTxn, RoTxn, Result as ZResult};
use sdset::{SetBuf, Set, SetOperation};
use meilisearch_types::DocumentId;
use crate::database::MainT;
use crate::facets::FacetKey;
use super::cow_set::CowSet;
/// contains facet info
#[derive(Clone, Copy)]
pub struct Facets {
pub(crate) facets: heed::Database<FacetKey, CowSet<DocumentId>>,
}
impl Facets {
// we use sdset::SetBuf to ensure the docids are sorted.
pub fn put_facet_document_ids(&self, writer: &mut RwTxn<MainT>, facet_key: FacetKey, doc_ids: &Set<DocumentId>) -> ZResult<()> {
self.facets.put(writer, &facet_key, doc_ids)
}
pub fn facet_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, facet_key: &FacetKey) -> ZResult<Option<Cow<'txn, Set<DocumentId>>>> {
self.facets.get(reader, &facet_key)
}
/// updates the facets store, revmoving the documents from the facets provided in the
/// `facet_map` argument
pub fn remove(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, Vec<DocumentId>>) -> ZResult<()> {
for (key, document_ids) in facet_map {
if let Some(old) = self.facets.get(writer, &key)? {
let to_remove = SetBuf::from_dirty(document_ids);
let new = sdset::duo::OpBuilder::new(old.as_ref(), to_remove.as_set()).difference().into_set_buf();
self.facets.put(writer, &key, new.as_set())?;
}
}
Ok(())
}
pub fn add(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, Vec<DocumentId>>) -> ZResult<()> {
for (key, document_ids) in facet_map {
let set = SetBuf::from_dirty(document_ids);
self.put_facet_document_ids(writer, key, set.as_set())?;
}
Ok(())
}
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.facets.clear(writer)
}
}

View File

@ -1,16 +1,20 @@
use std::borrow::Cow;
use std::sync::Arc; use std::sync::Arc;
use std::collections::HashMap; use std::collections::HashMap;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str}; use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str};
use heed::Result as ZResult; use heed::Result as ZResult;
use meilisearch_schema::Schema; use meilisearch_schema::{FieldId, Schema};
use sdset::Set;
use crate::database::MainT; use crate::database::MainT;
use crate::RankedMap; use crate::RankedMap;
use crate::settings::RankingRule; use crate::settings::RankingRule;
use super::cow_set::CowSet;
const CREATED_AT_KEY: &str = "created-at"; const CREATED_AT_KEY: &str = "created-at";
const ATTRIBUTES_FOR_FACETING: &str = "attributes-for-faceting";
const RANKING_RULES_KEY: &str = "ranking-rules"; const RANKING_RULES_KEY: &str = "ranking-rules";
const DISTINCT_ATTRIBUTE_KEY: &str = "distinct-attribute"; const DISTINCT_ATTRIBUTE_KEY: &str = "distinct-attribute";
const STOP_WORDS_KEY: &str = "stop-words"; const STOP_WORDS_KEY: &str = "stop-words";
@ -188,6 +192,18 @@ impl Main {
} }
} }
pub fn attributes_for_faceting<'txn>(&self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<Option<Cow<'txn, Set<FieldId>>>> {
self.main.get::<_, Str, CowSet<FieldId>>(reader, ATTRIBUTES_FOR_FACETING)
}
pub fn put_attributes_for_faceting(self, writer: &mut heed::RwTxn<MainT>, attributes: &Set<FieldId>) -> ZResult<()> {
self.main.put::<_, Str, CowSet<FieldId>>(writer, ATTRIBUTES_FOR_FACETING, attributes)
}
pub fn delete_attributes_for_faceting(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
self.main.delete::<_, Str>(writer, ATTRIBUTES_FOR_FACETING)
}
pub fn ranking_rules(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Vec<RankingRule>>> { pub fn ranking_rules(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Vec<RankingRule>>> {
self.main.get::<_, Str, SerdeBincode<Vec<RankingRule>>>(reader, RANKING_RULES_KEY) self.main.get::<_, Str, SerdeBincode<Vec<RankingRule>>>(reader, RANKING_RULES_KEY)
} }

View File

@ -1,3 +1,4 @@
mod cow_set;
mod docs_words; mod docs_words;
mod prefix_documents_cache; mod prefix_documents_cache;
mod prefix_postings_lists_cache; mod prefix_postings_lists_cache;
@ -8,8 +9,10 @@ mod postings_lists;
mod synonyms; mod synonyms;
mod updates; mod updates;
mod updates_results; mod updates_results;
mod facets;
pub use self::docs_words::DocsWords; pub use self::docs_words::DocsWords;
pub use self::facets::Facets;
pub use self::prefix_documents_cache::PrefixDocumentsCache; pub use self::prefix_documents_cache::PrefixDocumentsCache;
pub use self::prefix_postings_lists_cache::PrefixPostingsListsCache; pub use self::prefix_postings_lists_cache::PrefixPostingsListsCache;
pub use self::documents_fields::{DocumentFieldsIter, DocumentsFields}; pub use self::documents_fields::{DocumentFieldsIter, DocumentsFields};
@ -42,7 +45,7 @@ use crate::settings::SettingsUpdate;
use crate::{query_builder::QueryBuilder, update, DocIndex, DocumentId, Error, MResult}; use crate::{query_builder::QueryBuilder, update, DocIndex, DocumentId, Error, MResult};
type BEU64 = zerocopy::U64<byteorder::BigEndian>; type BEU64 = zerocopy::U64<byteorder::BigEndian>;
type BEU16 = zerocopy::U16<byteorder::BigEndian>; pub type BEU16 = zerocopy::U16<byteorder::BigEndian>;
#[derive(Debug, Copy, Clone, AsBytes, FromBytes)] #[derive(Debug, Copy, Clone, AsBytes, FromBytes)]
#[repr(C)] #[repr(C)]
@ -197,12 +200,17 @@ fn updates_results_name(name: &str) -> String {
format!("store-{}-updates-results", name) format!("store-{}-updates-results", name)
} }
fn facets_name(name: &str) -> String {
format!("store-{}-facets", name)
}
#[derive(Clone)] #[derive(Clone)]
pub struct Index { pub struct Index {
pub main: Main, pub main: Main,
pub postings_lists: PostingsLists, pub postings_lists: PostingsLists,
pub documents_fields: DocumentsFields, pub documents_fields: DocumentsFields,
pub documents_fields_counts: DocumentsFieldsCounts, pub documents_fields_counts: DocumentsFieldsCounts,
pub facets: Facets,
pub synonyms: Synonyms, pub synonyms: Synonyms,
pub docs_words: DocsWords, pub docs_words: DocsWords,
pub prefix_documents_cache: PrefixDocumentsCache, pub prefix_documents_cache: PrefixDocumentsCache,
@ -352,29 +360,14 @@ impl Index {
} }
pub fn query_builder(&self) -> QueryBuilder { pub fn query_builder(&self) -> QueryBuilder {
QueryBuilder::new( QueryBuilder::new(self)
self.main,
self.postings_lists,
self.documents_fields_counts,
self.synonyms,
self.prefix_documents_cache,
self.prefix_postings_lists_cache,
)
} }
pub fn query_builder_with_criteria<'c, 'f, 'd>( pub fn query_builder_with_criteria<'c, 'f, 'd, 'fa, 'i>(
&self, &'i self,
criteria: Criteria<'c>, criteria: Criteria<'c>,
) -> QueryBuilder<'c, 'f, 'd> { ) -> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
QueryBuilder::with_criteria( QueryBuilder::with_criteria(self, criteria)
self.main,
self.postings_lists,
self.documents_fields_counts,
self.synonyms,
self.prefix_documents_cache,
self.prefix_postings_lists_cache,
criteria,
)
} }
} }
@ -395,12 +388,14 @@ pub fn create(
let prefix_postings_lists_cache_name = prefix_postings_lists_cache_name(name); let prefix_postings_lists_cache_name = prefix_postings_lists_cache_name(name);
let updates_name = updates_name(name); let updates_name = updates_name(name);
let updates_results_name = updates_results_name(name); let updates_results_name = updates_results_name(name);
let facets_name = facets_name(name);
// open all the stores // open all the stores
let main = env.create_poly_database(Some(&main_name))?; let main = env.create_poly_database(Some(&main_name))?;
let postings_lists = env.create_database(Some(&postings_lists_name))?; let postings_lists = env.create_database(Some(&postings_lists_name))?;
let documents_fields = env.create_database(Some(&documents_fields_name))?; let documents_fields = env.create_database(Some(&documents_fields_name))?;
let documents_fields_counts = env.create_database(Some(&documents_fields_counts_name))?; let documents_fields_counts = env.create_database(Some(&documents_fields_counts_name))?;
let facets = env.create_database(Some(&facets_name))?;
let synonyms = env.create_database(Some(&synonyms_name))?; let synonyms = env.create_database(Some(&synonyms_name))?;
let docs_words = env.create_database(Some(&docs_words_name))?; let docs_words = env.create_database(Some(&docs_words_name))?;
let prefix_documents_cache = env.create_database(Some(&prefix_documents_cache_name))?; let prefix_documents_cache = env.create_database(Some(&prefix_documents_cache_name))?;
@ -417,6 +412,8 @@ pub fn create(
docs_words: DocsWords { docs_words }, docs_words: DocsWords { docs_words },
prefix_postings_lists_cache: PrefixPostingsListsCache { prefix_postings_lists_cache }, prefix_postings_lists_cache: PrefixPostingsListsCache { prefix_postings_lists_cache },
prefix_documents_cache: PrefixDocumentsCache { prefix_documents_cache }, prefix_documents_cache: PrefixDocumentsCache { prefix_documents_cache },
facets: Facets { facets },
updates: Updates { updates }, updates: Updates { updates },
updates_results: UpdatesResults { updates_results }, updates_results: UpdatesResults { updates_results },
updates_notifier, updates_notifier,
@ -437,6 +434,7 @@ pub fn open(
let synonyms_name = synonyms_name(name); let synonyms_name = synonyms_name(name);
let docs_words_name = docs_words_name(name); let docs_words_name = docs_words_name(name);
let prefix_documents_cache_name = prefix_documents_cache_name(name); let prefix_documents_cache_name = prefix_documents_cache_name(name);
let facets_name = facets_name(name);
let prefix_postings_lists_cache_name = prefix_postings_lists_cache_name(name); let prefix_postings_lists_cache_name = prefix_postings_lists_cache_name(name);
let updates_name = updates_name(name); let updates_name = updates_name(name);
let updates_results_name = updates_results_name(name); let updates_results_name = updates_results_name(name);
@ -470,6 +468,10 @@ pub fn open(
Some(prefix_documents_cache) => prefix_documents_cache, Some(prefix_documents_cache) => prefix_documents_cache,
None => return Ok(None), None => return Ok(None),
}; };
let facets = match env.open_database(Some(&facets_name))? {
Some(facets) => facets,
None => return Ok(None),
};
let prefix_postings_lists_cache = match env.open_database(Some(&prefix_postings_lists_cache_name))? { let prefix_postings_lists_cache = match env.open_database(Some(&prefix_postings_lists_cache_name))? {
Some(prefix_postings_lists_cache) => prefix_postings_lists_cache, Some(prefix_postings_lists_cache) => prefix_postings_lists_cache,
None => return Ok(None), None => return Ok(None),
@ -491,6 +493,7 @@ pub fn open(
synonyms: Synonyms { synonyms }, synonyms: Synonyms { synonyms },
docs_words: DocsWords { docs_words }, docs_words: DocsWords { docs_words },
prefix_documents_cache: PrefixDocumentsCache { prefix_documents_cache }, prefix_documents_cache: PrefixDocumentsCache { prefix_documents_cache },
facets: Facets { facets },
prefix_postings_lists_cache: PrefixPostingsListsCache { prefix_postings_lists_cache }, prefix_postings_lists_cache: PrefixPostingsListsCache { prefix_postings_lists_cache },
updates: Updates { updates }, updates: Updates { updates },
updates_results: UpdatesResults { updates_results }, updates_results: UpdatesResults { updates_results },

View File

@ -7,6 +7,7 @@ use serde::{Deserialize, Serialize};
use crate::database::{MainT, UpdateT}; use crate::database::{MainT, UpdateT};
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::facets;
use crate::raw_indexer::RawIndexer; use crate::raw_indexer::RawIndexer;
use crate::serde::{extract_document_id, serialize_value_with_id, Deserializer, Serializer}; use crate::serde::{extract_document_id, serialize_value_with_id, Deserializer, Serializer};
use crate::store; use crate::store;
@ -103,10 +104,11 @@ pub fn push_documents_addition<D: serde::Serialize>(
Ok(last_update_id) Ok(last_update_id)
} }
pub fn apply_documents_addition<'a, 'b>( pub fn apply_addition<'a, 'b>(
writer: &'a mut heed::RwTxn<'b, MainT>, writer: &'a mut heed::RwTxn<'b, MainT>,
index: &store::Index, index: &store::Index,
addition: Vec<IndexMap<String, serde_json::Value>>, addition: Vec<IndexMap<String, serde_json::Value>>,
partial: bool
) -> MResult<()> { ) -> MResult<()> {
let mut documents_additions = HashMap::new(); let mut documents_additions = HashMap::new();
@ -118,12 +120,30 @@ pub fn apply_documents_addition<'a, 'b>(
let primary_key = schema.primary_key().ok_or(Error::MissingPrimaryKey)?; let primary_key = schema.primary_key().ok_or(Error::MissingPrimaryKey)?;
// 1. store documents ids for future deletion // 1. store documents ids for future deletion
for document in addition { for mut document in addition {
let document_id = match extract_document_id(&primary_key, &document)? { let document_id = match extract_document_id(&primary_key, &document)? {
Some(id) => id, Some(id) => id,
None => return Err(Error::MissingDocumentId), None => return Err(Error::MissingDocumentId),
}; };
if partial {
let mut deserializer = Deserializer {
document_id,
reader: writer,
documents_fields: index.documents_fields,
schema: &schema,
fields: None,
};
// retrieve the old document and
// update the new one with missing keys found in the old one
let result = Option::<HashMap<String, serde_json::Value>>::deserialize(&mut deserializer)?;
if let Some(old_document) = result {
for (key, value) in old_document {
document.entry(key).or_insert(value);
}
}
}
documents_additions.insert(document_id, document); documents_additions.insert(document_id, document);
} }
@ -143,6 +163,11 @@ pub fn apply_documents_addition<'a, 'b>(
}; };
// 3. index the documents fields in the stores // 3. index the documents fields in the stores
if let Some(attributes_for_facetting) = index.main.attributes_for_faceting(writer)? {
let facet_map = facets::facet_map_from_docs(&schema, &documents_additions, attributes_for_facetting.as_ref())?;
index.facets.add(writer, facet_map)?;
}
let mut indexer = RawIndexer::new(stop_words); let mut indexer = RawIndexer::new(stop_words);
for (document_id, document) in documents_additions { for (document_id, document) in documents_additions {
@ -177,85 +202,15 @@ pub fn apply_documents_partial_addition<'a, 'b>(
index: &store::Index, index: &store::Index,
addition: Vec<IndexMap<String, serde_json::Value>>, addition: Vec<IndexMap<String, serde_json::Value>>,
) -> MResult<()> { ) -> MResult<()> {
let mut documents_additions = HashMap::new(); apply_addition(writer, index, addition, true)
}
let mut schema = match index.main.schema(writer)? { pub fn apply_documents_addition<'a, 'b>(
Some(schema) => schema, writer: &'a mut heed::RwTxn<'b, MainT>,
None => return Err(Error::SchemaMissing), index: &store::Index,
}; addition: Vec<IndexMap<String, serde_json::Value>>,
) -> MResult<()> {
let primary_key = schema.primary_key().ok_or(Error::MissingPrimaryKey)?; apply_addition(writer, index, addition, false)
// 1. store documents ids for future deletion
for mut document in addition {
let document_id = match extract_document_id(&primary_key, &document)? {
Some(id) => id,
None => return Err(Error::MissingDocumentId),
};
let mut deserializer = Deserializer {
document_id,
reader: writer,
documents_fields: index.documents_fields,
schema: &schema,
fields: None,
};
// retrieve the old document and
// update the new one with missing keys found in the old one
let result = Option::<HashMap<String, serde_json::Value>>::deserialize(&mut deserializer)?;
if let Some(old_document) = result {
for (key, value) in old_document {
document.entry(key).or_insert(value);
}
}
documents_additions.insert(document_id, document);
}
// 2. remove the documents posting lists
let number_of_inserted_documents = documents_additions.len();
let documents_ids = documents_additions.iter().map(|(id, _)| *id).collect();
apply_documents_deletion(writer, index, documents_ids)?;
let mut ranked_map = match index.main.ranked_map(writer)? {
Some(ranked_map) => ranked_map,
None => RankedMap::default(),
};
let stop_words = match index.main.stop_words_fst(writer)? {
Some(stop_words) => stop_words,
None => fst::Set::default(),
};
// 3. index the documents fields in the stores
let mut indexer = RawIndexer::new(stop_words);
for (document_id, document) in documents_additions {
let serializer = Serializer {
txn: writer,
schema: &mut schema,
document_store: index.documents_fields,
document_fields_counts: index.documents_fields_counts,
indexer: &mut indexer,
ranked_map: &mut ranked_map,
document_id,
};
document.serialize(serializer)?;
}
write_documents_addition_index(
writer,
index,
&ranked_map,
number_of_inserted_documents,
indexer,
)?;
index.main.put_schema(writer, &schema)?;
Ok(())
} }
pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Index) -> MResult<()> { pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Index) -> MResult<()> {
@ -277,6 +232,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
index.main.put_words_fst(writer, &fst::Set::default())?; index.main.put_words_fst(writer, &fst::Set::default())?;
index.main.put_ranked_map(writer, &ranked_map)?; index.main.put_ranked_map(writer, &ranked_map)?;
index.main.put_number_of_documents(writer, |_| 0)?; index.main.put_number_of_documents(writer, |_| 0)?;
index.facets.clear(writer)?;
index.postings_lists.clear(writer)?; index.postings_lists.clear(writer)?;
index.docs_words.clear(writer)?; index.docs_words.clear(writer)?;
@ -289,6 +245,11 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
let mut indexer = RawIndexer::new(stop_words); let mut indexer = RawIndexer::new(stop_words);
let mut ram_store = HashMap::new(); let mut ram_store = HashMap::new();
if let Some(ref attributes_for_facetting) = index.main.attributes_for_faceting(writer)? {
let facet_map = facets::facet_map_from_docids(writer, &index, &documents_ids_to_reindex, &attributes_for_facetting)?;
index.facets.add(writer, facet_map)?;
}
// ^-- https://github.com/meilisearch/MeiliSearch/pull/631#issuecomment-626624470 --v
for document_id in documents_ids_to_reindex { for document_id in documents_ids_to_reindex {
for result in index.documents_fields.document_fields(writer, document_id)? { for result in index.documents_fields.document_fields(writer, document_id)? {
let (field_id, bytes) = result?; let (field_id, bytes) = result?;

View File

@ -6,6 +6,7 @@ use sdset::{duo::DifferenceByKey, SetBuf, SetOperation};
use crate::database::{MainT, UpdateT}; use crate::database::{MainT, UpdateT};
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::facets;
use crate::serde::extract_document_id; use crate::serde::extract_document_id;
use crate::store; use crate::store;
use crate::update::{next_update_id, compute_short_prefixes, Update}; use crate::update::{next_update_id, compute_short_prefixes, Update};
@ -88,8 +89,6 @@ pub fn apply_documents_deletion(
index: &store::Index, index: &store::Index,
deletion: Vec<DocumentId>, deletion: Vec<DocumentId>,
) -> MResult<()> { ) -> MResult<()> {
let idset = SetBuf::from_dirty(deletion);
let schema = match index.main.schema(writer)? { let schema = match index.main.schema(writer)? {
Some(schema) => schema, Some(schema) => schema,
None => return Err(Error::SchemaMissing), None => return Err(Error::SchemaMissing),
@ -100,9 +99,16 @@ pub fn apply_documents_deletion(
None => RankedMap::default(), None => RankedMap::default(),
}; };
// facet filters deletion
if let Some(attributes_for_facetting) = index.main.attributes_for_faceting(writer)? {
let facet_map = facets::facet_map_from_docids(writer, &index, &deletion, &attributes_for_facetting)?;
index.facets.remove(writer, facet_map)?;
}
// collect the ranked attributes according to the schema // collect the ranked attributes according to the schema
let ranked_fields = schema.ranked(); let ranked_fields = schema.ranked();
let idset = SetBuf::from_dirty(deletion);
let mut words_document_ids = HashMap::new(); let mut words_document_ids = HashMap::new();
for id in idset { for id in idset {
// remove all the ranked attributes from the ranked_map // remove all the ranked attributes from the ranked_map

View File

@ -102,6 +102,18 @@ pub fn apply_settings_update(
UpdateState::Nothing => (), UpdateState::Nothing => (),
} }
match settings.attributes_for_faceting {
UpdateState::Update(attrs) => {
apply_attributes_for_faceting_update(writer, index, &mut schema, &attrs)?;
must_reindex = true;
},
UpdateState::Clear => {
index.main.delete_attributes_for_faceting(writer)?;
index.facets.clear(writer)?;
},
UpdateState::Nothing => (),
}
index.main.put_schema(writer, &schema)?; index.main.put_schema(writer, &schema)?;
match settings.stop_words { match settings.stop_words {
@ -131,6 +143,21 @@ pub fn apply_settings_update(
Ok(()) Ok(())
} }
fn apply_attributes_for_faceting_update(
writer: &mut heed::RwTxn<MainT>,
index: &store::Index,
schema: &mut Schema,
attributes: &[String]
) -> MResult<()> {
let mut attribute_ids = Vec::new();
for name in attributes {
attribute_ids.push(schema.insert(name)?);
}
let attributes_for_faceting = SetBuf::from_dirty(attribute_ids);
index.main.put_attributes_for_faceting(writer, &attributes_for_faceting)?;
Ok(())
}
pub fn apply_stop_words_update( pub fn apply_stop_words_update(
writer: &mut heed::RwTxn<MainT>, writer: &mut heed::RwTxn<MainT>,
index: &store::Index, index: &store::Index,

View File

@ -23,6 +23,7 @@ pub enum ResponseError {
FilterParsing(String), FilterParsing(String),
RetrieveDocument(u64, String), RetrieveDocument(u64, String),
SearchDocuments(String), SearchDocuments(String),
FacetExpression(String),
} }
impl ResponseError { impl ResponseError {
@ -106,6 +107,7 @@ impl fmt::Display for ResponseError {
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err), Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err), Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err),
Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err), Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
Self::FacetExpression(e) => write!(f, "error parsing facet filter expression: {}", e),
} }
} }
} }
@ -118,13 +120,14 @@ impl aweb::error::ResponseError for ResponseError {
} }
fn status_code(&self) -> StatusCode { fn status_code(&self) -> StatusCode {
match *self { match *self {
Self::BadParameter(_, _) Self::BadParameter(_, _)
| Self::BadRequest(_) | Self::BadRequest(_)
| Self::CreateIndex(_) | Self::CreateIndex(_)
| Self::InvalidIndexUid | Self::InvalidIndexUid
| Self::OpenIndex(_) | Self::OpenIndex(_)
| Self::RetrieveDocument(_, _) | Self::RetrieveDocument(_, _)
| Self::FacetExpression(_)
| Self::SearchDocuments(_) | Self::SearchDocuments(_)
| Self::FilterParsing(_) => StatusCode::BAD_REQUEST, | Self::FilterParsing(_) => StatusCode::BAD_REQUEST,
Self::DocumentNotFound(_) Self::DocumentNotFound(_)
@ -151,6 +154,12 @@ impl From<meilisearch_core::FstError> for ResponseError {
} }
} }
impl From<meilisearch_core::FacetError> for ResponseError {
fn from(error: meilisearch_core::FacetError) -> ResponseError {
ResponseError::FacetExpression(error.to_string())
}
}
impl From<meilisearch_core::Error> for ResponseError { impl From<meilisearch_core::Error> for ResponseError {
fn from(err: meilisearch_core::Error) -> ResponseError { fn from(err: meilisearch_core::Error) -> ResponseError {
use meilisearch_core::pest_error::LineColLocation::*; use meilisearch_core::pest_error::LineColLocation::*;
@ -164,6 +173,7 @@ impl From<meilisearch_core::Error> for ResponseError {
ResponseError::FilterParsing(message) ResponseError::FilterParsing(message)
}, },
meilisearch_core::Error::FacetError(e) => ResponseError::FacetExpression(e.to_string()),
_ => ResponseError::Internal(err.to_string()), _ => ResponseError::Internal(err.to_string()),
} }
} }

View File

@ -6,6 +6,7 @@ use std::time::Instant;
use indexmap::IndexMap; use indexmap::IndexMap;
use log::error; use log::error;
use meilisearch_core::Filter; use meilisearch_core::Filter;
use meilisearch_core::facets::FacetFilter;
use meilisearch_core::criterion::*; use meilisearch_core::criterion::*;
use meilisearch_core::settings::RankingRule; use meilisearch_core::settings::RankingRule;
use meilisearch_core::{Highlight, Index, MainT, RankedMap}; use meilisearch_core::{Highlight, Index, MainT, RankedMap};
@ -34,6 +35,7 @@ impl IndexSearchExt for Index {
attributes_to_highlight: None, attributes_to_highlight: None,
filters: None, filters: None,
matches: false, matches: false,
facet_filters: None,
} }
} }
} }
@ -48,6 +50,7 @@ pub struct SearchBuilder<'a> {
attributes_to_highlight: Option<HashSet<String>>, attributes_to_highlight: Option<HashSet<String>>,
filters: Option<String>, filters: Option<String>,
matches: bool, matches: bool,
facet_filters: Option<FacetFilter>,
} }
impl<'a> SearchBuilder<'a> { impl<'a> SearchBuilder<'a> {
@ -82,6 +85,11 @@ impl<'a> SearchBuilder<'a> {
self self
} }
pub fn add_facet_filters(&mut self, filters: FacetFilter) -> &SearchBuilder {
self.facet_filters = Some(filters);
self
}
pub fn filters(&mut self, value: String) -> &SearchBuilder { pub fn filters(&mut self, value: String) -> &SearchBuilder {
self.filters = Some(value); self.filters = Some(value);
self self
@ -138,6 +146,8 @@ impl<'a> SearchBuilder<'a> {
} }
} }
query_builder.set_facets(self.facet_filters.as_ref());
let start = Instant::now(); let start = Instant::now();
let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit)); let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
let (docs, nb_hits) = result.map_err(ResponseError::search_documents)?; let (docs, nb_hits) = result.map_err(ResponseError::search_documents)?;

View File

@ -12,6 +12,8 @@ use crate::helpers::Authentication;
use crate::routes::IndexParam; use crate::routes::IndexParam;
use crate::Data; use crate::Data;
use meilisearch_core::facets::FacetFilter;
pub fn services(cfg: &mut web::ServiceConfig) { pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(search_with_url_query); cfg.service(search_with_url_query);
} }
@ -28,6 +30,7 @@ struct SearchQuery {
attributes_to_highlight: Option<String>, attributes_to_highlight: Option<String>,
filters: Option<String>, filters: Option<String>,
matches: Option<bool>, matches: Option<bool>,
facet_filters: Option<String>,
} }
#[get("/indexes/{index_uid}/search", wrap = "Authentication::Public")] #[get("/indexes/{index_uid}/search", wrap = "Authentication::Public")]
@ -81,6 +84,13 @@ async fn search_with_url_query(
} }
} }
if let Some(ref facet_filters) = params.facet_filters {
match index.main.attributes_for_faceting(&reader)? {
Some(ref attrs) => { search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, attrs)?); },
None => return Err(ResponseError::FacetExpression("can't filter on facets, as no facet is set".to_string()))
}
}
if let Some(attributes_to_crop) = &params.attributes_to_crop { if let Some(attributes_to_crop) = &params.attributes_to_crop {
let default_length = params.crop_length.unwrap_or(200); let default_length = params.crop_length.unwrap_or(200);
let mut final_attributes: HashMap<String, usize> = HashMap::new(); let mut final_attributes: HashMap<String, usize> = HashMap::new();

View File

@ -91,6 +91,17 @@ async fn get_all(
let schema = index.main.schema(&reader)?; let schema = index.main.schema(&reader)?;
let attributes_for_faceting = match (&schema, &index.main.attributes_for_faceting(&reader)?) {
(Some(schema), Some(attrs)) => {
Some(attrs
.iter()
.filter_map(|&id| schema .name(id))
.map(str::to_string)
.collect())
}
_ => None,
};
let searchable_attributes = schema.clone().map(|s| { let searchable_attributes = schema.clone().map(|s| {
s.indexed_name() s.indexed_name()
.iter() .iter()
@ -115,6 +126,7 @@ async fn get_all(
stop_words: Some(Some(stop_words)), stop_words: Some(Some(stop_words)),
synonyms: Some(Some(synonyms)), synonyms: Some(Some(synonyms)),
accept_new_fields: Some(accept_new_fields), accept_new_fields: Some(accept_new_fields),
attributes_for_faceting: Some(attributes_for_faceting),
}; };
Ok(HttpResponse::Ok().json(settings)) Ok(HttpResponse::Ok().json(settings))
@ -140,6 +152,7 @@ async fn delete_all(
stop_words: UpdateState::Clear, stop_words: UpdateState::Clear,
synonyms: UpdateState::Clear, synonyms: UpdateState::Clear,
accept_new_fields: UpdateState::Clear, accept_new_fields: UpdateState::Clear,
attributes_for_faceting: UpdateState::Clear,
}; };
let update_id = index.settings_update(&mut writer, settings)?; let update_id = index.settings_update(&mut writer, settings)?;

File diff suppressed because it is too large Load Diff

View File

@ -39,6 +39,74 @@ impl Server {
} }
} }
pub async fn test_server() -> Self {
let mut server = Self::with_uid("test");
let body = json!({
"uid": "test",
"primaryKey": "id",
});
server.create_index(body).await;
let body = json!({
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
],
"searchableAttributes": [
"balance",
"picture",
"age",
"color",
"name",
"gender",
"email",
"phone",
"address",
"about",
"registered",
"latitude",
"longitude",
"tags",
],
"displayedAttributes": [
"id",
"isActive",
"balance",
"picture",
"age",
"color",
"name",
"gender",
"email",
"phone",
"address",
"about",
"registered",
"latitude",
"longitude",
"tags",
],
"acceptNewFields": false,
});
server.update_all_settings(body).await;
let dataset = include_bytes!("assets/test_set.json");
let body: Value = serde_json::from_slice(dataset).unwrap();
server.add_or_replace_multiple_documents(body).await;
server
}
pub async fn wait_update_id(&mut self, update_id: u64) { pub async fn wait_update_id(&mut self, update_id: u64) {
loop { loop {
let (response, status_code) = self.get_update_status(update_id).await; let (response, status_code) = self.get_update_status(update_id).await;
@ -90,6 +158,7 @@ impl Server {
eprintln!("post_request_async: {}", url); eprintln!("post_request_async: {}", url);
let (response, status_code) = self.post_request(url, body).await; let (response, status_code) = self.post_request(url, body).await;
eprintln!("response: {}", response);
assert_eq!(status_code, 202); assert_eq!(status_code, 202);
assert!(response["updateId"].as_u64().is_some()); assert!(response["updateId"].as_u64().is_some());
self.wait_update_id(response["updateId"].as_u64().unwrap()) self.wait_update_id(response["updateId"].as_u64().unwrap())

View File

@ -2,6 +2,7 @@ use std::convert::Into;
use assert_json_diff::assert_json_eq; use assert_json_diff::assert_json_eq;
use serde_json::json; use serde_json::json;
use serde_json::Value;
mod common; mod common;
@ -1133,3 +1134,161 @@ async fn search_with_differents_attributes_8() {
let (response, _status_code) = server.search(query).await; let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false); assert_json_eq!(expected, response["hits"].clone(), ordered: false);
} }
#[actix_rt::test]
async fn test_faceted_search_valid() {
let mut server = common::Server::test_server().await;
// simple tests on attributes with string value
let body = json!({
"attributesForFaceting": ["color"]
});
server.update_all_settings(body).await;
let query = "q=a&facetFilters=%5B%22color%3Agreen%22%5D";
let (response, _status_code) = server.search(query).await;
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
assert!(response
.get("hits")
.unwrap()
.as_array()
.unwrap()
.iter()
.all(|value| value.get("color").unwrap() == "green"));
let query = "q=a&facetFilters=%5B%22color%3Ablue%22%5D";
let (response, _status_code) = server.search(query).await;
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
assert!(response
.get("hits")
.unwrap()
.as_array()
.unwrap()
.iter()
.all(|value| value.get("color").unwrap() == "blue"));
// test case insensitive : ["color:Blue"]
let query = "q=a&facetFilters=%5B%22color%3ABlue%22%5D";
let (response, _status_code) = server.search(query).await;
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
assert!(response
.get("hits")
.unwrap()
.as_array()
.unwrap()
.iter()
.all(|value| value.get("color").unwrap() == "blue"));
// test on arrays: ["tags:bug"]
let body = json!({
"attributesForFaceting": ["color", "tags"]
});
server.update_all_settings(body).await;
let query = "q=a&facetFilters=%5B%22tags%3Abug%22%5D";
let (response, _status_code) = server.search(query).await;
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
assert!(response
.get("hits")
.unwrap()
.as_array()
.unwrap()
.iter()
.all(|value| value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
// test and: ["color:blue", "tags:bug"]
let query = "q=a&facetFilters=%5B%22color%3Ablue%22,%20%22tags%3Abug%22%20%5D";
let (response, _status_code) = server.search(query).await;
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
assert!(response
.get("hits")
.unwrap()
.as_array()
.unwrap()
.iter()
.all(|value| value
.get("color")
.unwrap() == "blue"
&& value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
// test or: [["color:blue", "color:green"]]
let query = "q=a&facetFilters=%5B%5B%22color%3Ablue%22,%20%22color%3Agreen%22%5D%5D";
let (response, _status_code) = server.search(query).await;
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
assert!(response
.get("hits")
.unwrap()
.as_array()
.unwrap()
.iter()
.all(|value|
value
.get("color")
.unwrap() == "blue"
|| value
.get("color")
.unwrap() == "green"));
// test and-or: ["tags:bug", ["color:blue", "color:green"]]
let query = "q=a&facetFilters=%5B%22color%3Ablue%22,%20%22tags%3Abug%22%20%5D";
let (response, _status_code) = server.search(query).await;
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
assert!(response
.get("hits")
.unwrap()
.as_array()
.unwrap()
.iter()
.all(|value|
value
.get("tags")
.unwrap()
.as_array()
.unwrap()
.contains(&Value::String("bug".to_owned()))
&& (value
.get("color")
.unwrap() == "blue"
|| value
.get("color")
.unwrap() == "green")));
}
#[actix_rt::test]
async fn test_faceted_search_invalid() {
let mut server = common::Server::test_server().await;
//no faceted attributes set
let query = "q=a&facetFilters=%5B%22color%3Ablue%22,%20%22tags%3Abug%22%20%5D";
let (_response, status_code) = server.search(query).await;
assert_ne!(status_code, 202);
let body = json!({
"attributesForFaceting": ["color", "tags"]
});
server.update_all_settings(body).await;
// empty arrays are error
// []
let query = "q=a&facetFilters=%5B%5D";
let (_response, status_code) = server.search(query).await;
assert_ne!(status_code, 202);
// [[]]
let query = "q=a&facetFilters=%5B%5B%5D";
let (_response, status_code) = server.search(query).await;
assert_ne!(status_code, 202);
// ["color:green", []]
let query = "q=a&facetFilters=%5B%22color%3Agreen%22,%20%5B%5D";
let (_response, status_code) = server.search(query).await;
assert_ne!(status_code, 202);
// too much depth
// [[[]]]
let query = "q=a&facetFilters=%5B%5B%5B%5D%5D%5D";
let (_response, status_code) = server.search(query).await;
assert_ne!(status_code, 202);
// [["color:green", ["color:blue"]]]
let query = "q=a&facetFilters=%5B%5B%22color%3Agreen%22,%20%5B%22color%3Ablue%22%5D%5D%5D";
let (_response, status_code) = server.search(query).await;
assert_ne!(status_code, 202);
// "color:green"
let query = "q=a&facetFilters=%22color%3Agreen%22";
let (_response, status_code) = server.search(query).await;
assert_ne!(status_code, 202);
}

View File

@ -8,7 +8,6 @@ mod common;
async fn write_all_and_delete() { async fn write_all_and_delete() {
let mut server = common::Server::with_uid("movies"); let mut server = common::Server::with_uid("movies");
server.populate_movies().await; server.populate_movies().await;
// 2 - Send the settings // 2 - Send the settings
let body = json!({ let body = json!({
@ -48,9 +47,11 @@ async fn write_all_and_delete() {
"wolverine": ["xmen", "logan"], "wolverine": ["xmen", "logan"],
"logan": ["wolverine"], "logan": ["wolverine"],
}, },
"attributesForFaceting": ["title"],
"acceptNewFields": false, "acceptNewFields": false,
}); });
server.update_all_settings(body.clone()).await; server.update_all_settings(body.clone()).await;
// 3 - Get all settings and compare to the previous one // 3 - Get all settings and compare to the previous one
@ -119,6 +120,7 @@ async fn write_all_and_delete() {
], ],
"stopWords": [], "stopWords": [],
"synonyms": {}, "synonyms": {},
"attributesForFaceting": null,
"acceptNewFields": true, "acceptNewFields": true,
}); });
@ -169,6 +171,7 @@ async fn write_all_and_update() {
"wolverine": ["xmen", "logan"], "wolverine": ["xmen", "logan"],
"logan": ["wolverine"], "logan": ["wolverine"],
}, },
"attributesForFaceting": ["title"],
"acceptNewFields": false, "acceptNewFields": false,
}); });
@ -210,6 +213,7 @@ async fn write_all_and_update() {
"wolverine": ["xmen", "logan"], "wolverine": ["xmen", "logan"],
"logan": ["wolverine", "xmen"], "logan": ["wolverine", "xmen"],
}, },
"attributesForFaceting": ["title"],
"acceptNewFields": false, "acceptNewFields": false,
}); });
@ -247,6 +251,7 @@ async fn write_all_and_update() {
"wolverine": ["xmen", "logan"], "wolverine": ["xmen", "logan"],
"logan": ["wolverine", "xmen"], "logan": ["wolverine", "xmen"],
}, },
"attributesForFaceting": ["title"],
"acceptNewFields": false "acceptNewFields": false
}); });
@ -277,6 +282,7 @@ async fn test_default_settings() {
"displayedAttributes": [], "displayedAttributes": [],
"stopWords": [], "stopWords": [],
"synonyms": {}, "synonyms": {},
"attributesForFaceting": null,
"acceptNewFields": true, "acceptNewFields": true,
}); });
@ -314,6 +320,7 @@ async fn test_default_settings_2() {
], ],
"stopWords": [], "stopWords": [],
"synonyms": {}, "synonyms": {},
"attributesForFaceting": null,
"acceptNewFields": true, "acceptNewFields": true,
}); });
@ -421,6 +428,7 @@ async fn write_setting_and_update_partial() {
"wolverine": ["xmen", "logan"], "wolverine": ["xmen", "logan"],
"logan": ["wolverine"], "logan": ["wolverine"],
}, },
"attributesForFaceting": null,
"acceptNewFields": false, "acceptNewFields": false,
}); });

View File

@ -341,6 +341,7 @@ async fn accept_new_fields_does_not_take_into_account_the_primary_key() {
"displayedAttributes": ["title"], "displayedAttributes": ["title"],
"stopWords": [], "stopWords": [],
"synonyms": {}, "synonyms": {},
"attributesForFaceting": null,
"acceptNewFields": false, "acceptNewFields": false,
}); });

View File

@ -11,3 +11,4 @@ indexmap = { version = "1.3.2", features = ["serde-1"] }
serde = { version = "1.0.105", features = ["derive"] } serde = { version = "1.0.105", features = ["derive"] }
serde_json = { version = "1.0.50", features = ["preserve_order"] } serde_json = { version = "1.0.50", features = ["preserve_order"] }
toml = { version = "0.5.6", features = ["preserve_order"] } toml = { version = "0.5.6", features = ["preserve_order"] }
zerocopy = "0.3.0"

View File

@ -6,6 +6,7 @@ pub use error::{Error, SResult};
pub use fields_map::FieldsMap; pub use fields_map::FieldsMap;
pub use schema::Schema; pub use schema::Schema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy::{AsBytes, FromBytes};
#[derive(Serialize, Deserialize, Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)] #[derive(Serialize, Deserialize, Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)]
pub struct IndexedPos(pub u16); pub struct IndexedPos(pub u16);
@ -36,7 +37,10 @@ impl Into<u16> for IndexedPos {
} }
} }
#[derive(Serialize, Deserialize, Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)]
#[derive(Serialize, Deserialize)]
#[derive(AsBytes, FromBytes)]
#[repr(C)]
pub struct FieldId(pub u16); pub struct FieldId(pub u16);
impl FieldId { impl FieldId {
@ -63,8 +67,8 @@ impl From<u16> for FieldId {
} }
} }
impl Into<u16> for FieldId { impl From<FieldId> for u16 {
fn into(self) -> u16 { fn from(other: FieldId) -> u16 {
self.0 other.0
} }
} }