Merge pull request #103 from meilisearch/plane-sweep-proximity

Plane-Sweep proximity
This commit is contained in:
Clément Renault 2021-03-08 16:58:34 +01:00 committed by GitHub
commit f9be3ad3fd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 531 additions and 144 deletions

161
Cargo.lock generated
View File

@ -53,8 +53,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2925c4c290382f9d2fa3d1c1b6a63fa1427099721ecca4749b154cc9c25522" checksum = "ca2925c4c290382f9d2fa3d1c1b6a63fa1427099721ecca4749b154cc9c25522"
dependencies = [ dependencies = [
"askama_shared", "askama_shared",
"proc-macro2", "proc-macro2 1.0.24",
"syn", "syn 1.0.60",
] ]
[[package]] [[package]]
@ -74,10 +74,10 @@ dependencies = [
"nom", "nom",
"num-traits", "num-traits",
"percent-encoding", "percent-encoding",
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"serde", "serde",
"syn", "syn 1.0.60",
"toml", "toml",
] ]
@ -622,9 +622,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c287d25add322d9f9abdcdc5927ca398917996600182178774032e9f8258fedd" checksum = "c287d25add322d9f9abdcdc5927ca398917996600182178774032e9f8258fedd"
dependencies = [ dependencies = [
"proc-macro-hack", "proc-macro-hack",
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
] ]
[[package]] [[package]]
@ -1184,6 +1184,28 @@ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
] ]
[[package]]
name = "logging_timer"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40d0c249955c17c2f8f86b5f501b16d2509ebbe775f7b1d1d2b1ba85ade2a793"
dependencies = [
"log",
"logging_timer_proc_macros",
]
[[package]]
name = "logging_timer_proc_macros"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "482c2c28e6bcfe7c4274f82f701774d755e6aa873edfd619460fcd0966e0eb07"
dependencies = [
"log",
"proc-macro2 0.4.30",
"quote 0.6.13",
"syn 0.15.44",
]
[[package]] [[package]]
name = "loom" name = "loom"
version = "0.4.0" version = "0.4.0"
@ -1269,6 +1291,7 @@ dependencies = [
"levenshtein_automata", "levenshtein_automata",
"linked-hash-map", "linked-hash-map",
"log", "log",
"logging_timer",
"maplit", "maplit",
"meilisearch-tokenizer", "meilisearch-tokenizer",
"memmap", "memmap",
@ -1288,6 +1311,7 @@ dependencies = [
"smallstr", "smallstr",
"smallvec", "smallvec",
"tempfile", "tempfile",
"tinytemplate",
"uuid", "uuid",
] ]
@ -1553,9 +1577,9 @@ checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55"
dependencies = [ dependencies = [
"pest 2.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "pest 2.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"pest_meta", "pest_meta",
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
] ]
[[package]] [[package]]
@ -1631,9 +1655,9 @@ version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895" checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
] ]
[[package]] [[package]]
@ -1642,9 +1666,9 @@ version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "758669ae3558c6f74bd2a18b41f7ac0b5a195aea6639d6a9b5e5d1ad5ba24c0b" checksum = "758669ae3558c6f74bd2a18b41f7ac0b5a195aea6639d6a9b5e5d1ad5ba24c0b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
] ]
[[package]] [[package]]
@ -1712,9 +1736,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [ dependencies = [
"proc-macro-error-attr", "proc-macro-error-attr",
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
"version_check", "version_check",
] ]
@ -1724,8 +1748,8 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"version_check", "version_check",
] ]
@ -1741,13 +1765,22 @@ version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086"
[[package]]
name = "proc-macro2"
version = "0.4.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
dependencies = [
"unicode-xid 0.1.0",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.24" version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
dependencies = [ dependencies = [
"unicode-xid", "unicode-xid 0.2.1",
] ]
[[package]] [[package]]
@ -1756,13 +1789,22 @@ version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quote"
version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
dependencies = [
"proc-macro2 0.4.30",
]
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.9" version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
] ]
[[package]] [[package]]
@ -2046,9 +2088,9 @@ version = "1.0.123"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9391c295d64fc0abb2c556bad848f33cb8296276b1ad2677d1ae1ace4f258f31" checksum = "9391c295d64fc0abb2c556bad848f33cb8296276b1ad2677d1ae1ace4f258f31"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
] ]
[[package]] [[package]]
@ -2198,9 +2240,20 @@ checksum = "5ba9cdfda491b814720b6b06e0cac513d922fc407582032e8706e9f137976f90"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro-error", "proc-macro-error",
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
]
[[package]]
name = "syn"
version = "0.15.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
dependencies = [
"proc-macro2 0.4.30",
"quote 0.6.13",
"unicode-xid 0.1.0",
] ]
[[package]] [[package]]
@ -2209,9 +2262,9 @@ version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081" checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"unicode-xid", "unicode-xid 0.2.1",
] ]
[[package]] [[package]]
@ -2229,10 +2282,10 @@ version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701" checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
"unicode-xid", "unicode-xid 0.2.1",
] ]
[[package]] [[package]]
@ -2305,9 +2358,9 @@ dependencies = [
[[package]] [[package]]
name = "tinytemplate" name = "tinytemplate"
version = "1.2.0" version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2ada8616fad06a2d0c455adc530de4ef57605a8120cc65da9653e0e9623ca74" checksum = "6d3dc76004a03cec1c5932bca4cdc2e39aaa798e3f82363dd94f9adf6098c12f"
dependencies = [ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
@ -2358,9 +2411,9 @@ version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a" checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
] ]
[[package]] [[package]]
@ -2521,6 +2574,12 @@ version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
[[package]]
name = "unicode-xid"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
version = "0.2.1" version = "0.2.1"
@ -2652,9 +2711,9 @@ dependencies = [
"bumpalo", "bumpalo",
"lazy_static", "lazy_static",
"log", "log",
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -2664,7 +2723,7 @@ version = "0.2.70"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b8853882eef39593ad4174dd26fc9865a64e84026d223f63bb2c42affcbba2c" checksum = "3b8853882eef39593ad4174dd26fc9865a64e84026d223f63bb2c42affcbba2c"
dependencies = [ dependencies = [
"quote", "quote 1.0.9",
"wasm-bindgen-macro-support", "wasm-bindgen-macro-support",
] ]
@ -2674,9 +2733,9 @@ version = "0.2.70"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4133b5e7f2a531fa413b3a1695e925038a05a71cf67e87dafa295cb645a01385" checksum = "4133b5e7f2a531fa413b3a1695e925038a05a71cf67e87dafa295cb645a01385"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"quote", "quote 1.0.9",
"syn", "syn 1.0.60",
"wasm-bindgen-backend", "wasm-bindgen-backend",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -2781,8 +2840,8 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb" checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2 1.0.24",
"syn", "syn 1.0.60",
"synstructure", "synstructure",
] ]

View File

@ -45,6 +45,11 @@ itertools = "0.10.0"
# logging # logging
log = "0.4.14" log = "0.4.14"
logging_timer = "1.0.0"
# We temporarily depend on this crate just to fix this issue
# https://github.com/bheisler/TinyTemplate/pull/17
tinytemplate = "=1.1.0"
[dev-dependencies] [dev-dependencies]
criterion = "0.3.4" criterion = "0.3.4"

View File

@ -15,6 +15,7 @@ use crate::heed_codec::facet::{FieldDocIdFacetI64Codec, FieldDocIdFacetF64Codec}
use crate::search::criteria::{resolve_query_tree, CriteriaBuilder}; use crate::search::criteria::{resolve_query_tree, CriteriaBuilder};
use crate::search::facet::FacetIter; use crate::search::facet::FacetIter;
use crate::search::query_tree::Operation; use crate::search::query_tree::Operation;
use crate::search::WordDerivationsCache;
use crate::{FieldsIdsMap, FieldId, Index}; use crate::{FieldsIdsMap, FieldId, Index};
use super::{Criterion, CriterionResult}; use super::{Criterion, CriterionResult};
@ -92,7 +93,7 @@ impl<'t> AscDesc<'t> {
let candidates = match &query_tree { let candidates = match &query_tree {
Some(qt) => { Some(qt) => {
let context = CriteriaBuilder::new(rtxn, index)?; let context = CriteriaBuilder::new(rtxn, index)?;
let mut qt_candidates = resolve_query_tree(&context, qt, &mut HashMap::new())?; let mut qt_candidates = resolve_query_tree(&context, qt, &mut HashMap::new(), &mut WordDerivationsCache::new())?;
if let Some(candidates) = candidates { if let Some(candidates) = candidates {
qt_candidates.intersect_with(&candidates); qt_candidates.intersect_with(&candidates);
} }
@ -145,7 +146,8 @@ impl<'t> AscDesc<'t> {
} }
impl<'t> Criterion for AscDesc<'t> { impl<'t> Criterion for AscDesc<'t> {
fn next(&mut self) -> anyhow::Result<Option<CriterionResult>> { #[logging_timer::time("AscDesc::{}")]
fn next(&mut self, wdcache: &mut WordDerivationsCache) -> anyhow::Result<Option<CriterionResult>> {
loop { loop {
debug!("Facet {}({}) iteration", debug!("Facet {}({}) iteration",
if self.ascending { "Asc" } else { "Desc" }, self.field_name if self.ascending { "Asc" } else { "Desc" }, self.field_name
@ -157,7 +159,7 @@ impl<'t> Criterion for AscDesc<'t> {
let bucket_candidates = take(&mut self.bucket_candidates); let bucket_candidates = take(&mut self.bucket_candidates);
match self.parent.as_mut() { match self.parent.as_mut() {
Some(parent) => { Some(parent) => {
match parent.next()? { match parent.next(wdcache)? {
Some(CriterionResult { query_tree, mut candidates, bucket_candidates }) => { Some(CriterionResult { query_tree, mut candidates, bucket_candidates }) => {
self.query_tree = query_tree; self.query_tree = query_tree;
candidates.intersect_with(&self.faceted_candidates); candidates.intersect_with(&self.faceted_candidates);

View File

@ -5,6 +5,7 @@ use log::debug;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::search::query_tree::Operation; use crate::search::query_tree::Operation;
use crate::search::WordDerivationsCache;
use super::{resolve_query_tree, Candidates, Criterion, CriterionResult, Context}; use super::{resolve_query_tree, Candidates, Criterion, CriterionResult, Context};
pub struct Fetcher<'t> { pub struct Fetcher<'t> {
@ -47,7 +48,8 @@ impl<'t> Fetcher<'t> {
} }
impl<'t> Criterion for Fetcher<'t> { impl<'t> Criterion for Fetcher<'t> {
fn next(&mut self) -> anyhow::Result<Option<CriterionResult>> { #[logging_timer::time("Fetcher::{}")]
fn next(&mut self, wdcache: &mut WordDerivationsCache) -> anyhow::Result<Option<CriterionResult>> {
use Candidates::{Allowed, Forbidden}; use Candidates::{Allowed, Forbidden};
loop { loop {
debug!("Fetcher iteration (should_get_documents_ids: {}) ({:?})", debug!("Fetcher iteration (should_get_documents_ids: {}) ({:?})",
@ -60,7 +62,7 @@ impl<'t> Criterion for Fetcher<'t> {
let candidates = take(&mut self.candidates).into_inner(); let candidates = take(&mut self.candidates).into_inner();
let candidates = match &self.query_tree { let candidates = match &self.query_tree {
Some(qt) if should_get_documents_ids => { Some(qt) if should_get_documents_ids => {
let mut docids = resolve_query_tree(self.ctx, &qt, &mut HashMap::new())?; let mut docids = resolve_query_tree(self.ctx, &qt, &mut HashMap::new(), wdcache)?;
docids.intersect_with(&candidates); docids.intersect_with(&candidates);
docids docids
}, },
@ -76,11 +78,11 @@ impl<'t> Criterion for Fetcher<'t> {
Forbidden(_) => { Forbidden(_) => {
match self.parent.as_mut() { match self.parent.as_mut() {
Some(parent) => { Some(parent) => {
match parent.next()? { match parent.next(wdcache)? {
Some(result) => return Ok(Some(result)), Some(result) => return Ok(Some(result)),
None => if should_get_documents_ids { None => if should_get_documents_ids {
let candidates = match &self.query_tree { let candidates = match &self.query_tree {
Some(qt) => resolve_query_tree(self.ctx, &qt, &mut HashMap::new())?, Some(qt) => resolve_query_tree(self.ctx, &qt, &mut HashMap::new(), wdcache)?,
None => self.ctx.documents_ids()?, None => self.ctx.documents_ids()?,
}; };
@ -94,7 +96,7 @@ impl<'t> Criterion for Fetcher<'t> {
}, },
None => if should_get_documents_ids { None => if should_get_documents_ids {
let candidates = match &self.query_tree { let candidates = match &self.query_tree {
Some(qt) => resolve_query_tree(self.ctx, &qt, &mut HashMap::new())?, Some(qt) => resolve_query_tree(self.ctx, &qt, &mut HashMap::new(), wdcache)?,
None => self.ctx.documents_ids()?, None => self.ctx.documents_ids()?,
}; };

View File

@ -4,8 +4,8 @@ use std::borrow::Cow;
use anyhow::bail; use anyhow::bail;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::search::word_derivations; use crate::search::{word_derivations, WordDerivationsCache};
use crate::Index; use crate::{Index, DocumentId};
use super::query_tree::{Operation, Query, QueryKind}; use super::query_tree::{Operation, Query, QueryKind};
use self::typo::Typo; use self::typo::Typo;
@ -21,7 +21,7 @@ pub mod proximity;
pub mod fetcher; pub mod fetcher;
pub trait Criterion { pub trait Criterion {
fn next(&mut self) -> anyhow::Result<Option<CriterionResult>>; fn next(&mut self, wdcache: &mut WordDerivationsCache) -> anyhow::Result<Option<CriterionResult>>;
} }
/// The result of a call to the parent criterion. /// The result of a call to the parent criterion.
@ -66,6 +66,7 @@ pub trait Context {
fn word_prefix_pair_proximity_docids(&self, left: &str, right: &str, proximity: u8) -> heed::Result<Option<RoaringBitmap>>; fn word_prefix_pair_proximity_docids(&self, left: &str, right: &str, proximity: u8) -> heed::Result<Option<RoaringBitmap>>;
fn words_fst<'t>(&self) -> &'t fst::Set<Cow<[u8]>>; fn words_fst<'t>(&self) -> &'t fst::Set<Cow<[u8]>>;
fn in_prefix_cache(&self, word: &str) -> bool; fn in_prefix_cache(&self, word: &str) -> bool;
fn docid_word_positions(&self, docid: DocumentId, word: &str) -> heed::Result<Option<RoaringBitmap>>;
} }
pub struct CriteriaBuilder<'t> { pub struct CriteriaBuilder<'t> {
rtxn: &'t heed::RoTxn<'t>, rtxn: &'t heed::RoTxn<'t>,
@ -104,6 +105,11 @@ impl<'a> Context for CriteriaBuilder<'a> {
fn in_prefix_cache(&self, word: &str) -> bool { fn in_prefix_cache(&self, word: &str) -> bool {
self.words_prefixes_fst.contains(word) self.words_prefixes_fst.contains(word)
} }
fn docid_word_positions(&self, docid: DocumentId, word: &str) -> heed::Result<Option<RoaringBitmap>> {
let key = (docid, word);
self.index.docid_word_positions.get(self.rtxn, &key)
}
} }
impl<'t> CriteriaBuilder<'t> { impl<'t> CriteriaBuilder<'t> {
@ -158,12 +164,14 @@ pub fn resolve_query_tree<'t>(
ctx: &'t dyn Context, ctx: &'t dyn Context,
query_tree: &Operation, query_tree: &Operation,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<RoaringBitmap> ) -> anyhow::Result<RoaringBitmap>
{ {
fn resolve_operation<'t>( fn resolve_operation<'t>(
ctx: &'t dyn Context, ctx: &'t dyn Context,
query_tree: &Operation, query_tree: &Operation,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<RoaringBitmap> ) -> anyhow::Result<RoaringBitmap>
{ {
use Operation::{And, Consecutive, Or, Query}; use Operation::{And, Consecutive, Or, Query};
@ -171,7 +179,7 @@ pub fn resolve_query_tree<'t>(
match query_tree { match query_tree {
And(ops) => { And(ops) => {
let mut ops = ops.iter().map(|op| { let mut ops = ops.iter().map(|op| {
resolve_operation(ctx, op, cache) resolve_operation(ctx, op, cache, wdcache)
}).collect::<anyhow::Result<Vec<_>>>()?; }).collect::<anyhow::Result<Vec<_>>>()?;
ops.sort_unstable_by_key(|cds| cds.len()); ops.sort_unstable_by_key(|cds| cds.len());
@ -194,7 +202,7 @@ pub fn resolve_query_tree<'t>(
for slice in ops.windows(2) { for slice in ops.windows(2) {
match (&slice[0], &slice[1]) { match (&slice[0], &slice[1]) {
(Operation::Query(left), Operation::Query(right)) => { (Operation::Query(left), Operation::Query(right)) => {
match query_pair_proximity_docids(ctx, left, right, 1)? { match query_pair_proximity_docids(ctx, left, right, 1, wdcache)? {
pair_docids if pair_docids.is_empty() => { pair_docids if pair_docids.is_empty() => {
return Ok(RoaringBitmap::new()) return Ok(RoaringBitmap::new())
}, },
@ -215,16 +223,16 @@ pub fn resolve_query_tree<'t>(
Or(_, ops) => { Or(_, ops) => {
let mut candidates = RoaringBitmap::new(); let mut candidates = RoaringBitmap::new();
for op in ops { for op in ops {
let docids = resolve_operation(ctx, op, cache)?; let docids = resolve_operation(ctx, op, cache, wdcache)?;
candidates.union_with(&docids); candidates.union_with(&docids);
} }
Ok(candidates) Ok(candidates)
}, },
Query(q) => Ok(query_docids(ctx, q)?), Query(q) => Ok(query_docids(ctx, q, wdcache)?),
} }
} }
resolve_operation(ctx, query_tree, cache) resolve_operation(ctx, query_tree, cache, wdcache)
} }
@ -233,7 +241,8 @@ fn all_word_pair_proximity_docids<T: AsRef<str>, U: AsRef<str>>(
left_words: &[(T, u8)], left_words: &[(T, u8)],
right_words: &[(U, u8)], right_words: &[(U, u8)],
proximity: u8 proximity: u8
) -> anyhow::Result<RoaringBitmap> { ) -> anyhow::Result<RoaringBitmap>
{
let mut docids = RoaringBitmap::new(); let mut docids = RoaringBitmap::new();
for (left, _l_typo) in left_words { for (left, _l_typo) in left_words {
for (right, _r_typo) in right_words { for (right, _r_typo) in right_words {
@ -244,13 +253,18 @@ fn all_word_pair_proximity_docids<T: AsRef<str>, U: AsRef<str>>(
Ok(docids) Ok(docids)
} }
fn query_docids(ctx: &dyn Context, query: &Query) -> anyhow::Result<RoaringBitmap> { fn query_docids(
ctx: &dyn Context,
query: &Query,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<RoaringBitmap>
{
match &query.kind { match &query.kind {
QueryKind::Exact { word, .. } => { QueryKind::Exact { word, .. } => {
if query.prefix && ctx.in_prefix_cache(&word) { if query.prefix && ctx.in_prefix_cache(&word) {
Ok(ctx.word_prefix_docids(&word)?.unwrap_or_default()) Ok(ctx.word_prefix_docids(&word)?.unwrap_or_default())
} else if query.prefix { } else if query.prefix {
let words = word_derivations(&word, true, 0, ctx.words_fst())?; let words = word_derivations(&word, true, 0, ctx.words_fst(), wdcache)?;
let mut docids = RoaringBitmap::new(); let mut docids = RoaringBitmap::new();
for (word, _typo) in words { for (word, _typo) in words {
let current_docids = ctx.word_docids(&word)?.unwrap_or_default(); let current_docids = ctx.word_docids(&word)?.unwrap_or_default();
@ -262,7 +276,7 @@ fn query_docids(ctx: &dyn Context, query: &Query) -> anyhow::Result<RoaringBitma
} }
}, },
QueryKind::Tolerant { typo, word } => { QueryKind::Tolerant { typo, word } => {
let words = word_derivations(&word, query.prefix, *typo, ctx.words_fst())?; let words = word_derivations(&word, query.prefix, *typo, ctx.words_fst(), wdcache)?;
let mut docids = RoaringBitmap::new(); let mut docids = RoaringBitmap::new();
for (word, _typo) in words { for (word, _typo) in words {
let current_docids = ctx.word_docids(&word)?.unwrap_or_default(); let current_docids = ctx.word_docids(&word)?.unwrap_or_default();
@ -273,10 +287,17 @@ fn query_docids(ctx: &dyn Context, query: &Query) -> anyhow::Result<RoaringBitma
} }
} }
fn query_pair_proximity_docids(ctx: &dyn Context, left: &Query, right: &Query, proximity: u8) -> anyhow::Result<RoaringBitmap> { fn query_pair_proximity_docids(
ctx: &dyn Context,
left: &Query,
right: &Query,
proximity: u8,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<RoaringBitmap>
{
if proximity >= 8 { if proximity >= 8 {
let mut candidates = query_docids(ctx, left)?; let mut candidates = query_docids(ctx, left, wdcache)?;
let right_candidates = query_docids(ctx, right)?; let right_candidates = query_docids(ctx, right, wdcache)?;
candidates.intersect_with(&right_candidates); candidates.intersect_with(&right_candidates);
return Ok(candidates); return Ok(candidates);
} }
@ -287,14 +308,14 @@ fn query_pair_proximity_docids(ctx: &dyn Context, left: &Query, right: &Query, p
if prefix && ctx.in_prefix_cache(&right) { if prefix && ctx.in_prefix_cache(&right) {
Ok(ctx.word_prefix_pair_proximity_docids(left.as_str(), right.as_str(), proximity)?.unwrap_or_default()) Ok(ctx.word_prefix_pair_proximity_docids(left.as_str(), right.as_str(), proximity)?.unwrap_or_default())
} else if prefix { } else if prefix {
let r_words = word_derivations(&right, true, 0, ctx.words_fst())?; let r_words = word_derivations(&right, true, 0, ctx.words_fst(), wdcache)?;
all_word_pair_proximity_docids(ctx, &[(left, 0)], &r_words, proximity) all_word_pair_proximity_docids(ctx, &[(left, 0)], &r_words, proximity)
} else { } else {
Ok(ctx.word_pair_proximity_docids(left.as_str(), right.as_str(), proximity)?.unwrap_or_default()) Ok(ctx.word_pair_proximity_docids(left.as_str(), right.as_str(), proximity)?.unwrap_or_default())
} }
}, },
(QueryKind::Tolerant { typo, word: left }, QueryKind::Exact { word: right, .. }) => { (QueryKind::Tolerant { typo, word: left }, QueryKind::Exact { word: right, .. }) => {
let l_words = word_derivations(&left, false, *typo, ctx.words_fst())?; let l_words = word_derivations(&left, false, *typo, ctx.words_fst(), wdcache)?.to_owned();
if prefix && ctx.in_prefix_cache(&right) { if prefix && ctx.in_prefix_cache(&right) {
let mut docids = RoaringBitmap::new(); let mut docids = RoaringBitmap::new();
for (left, _) in l_words { for (left, _) in l_words {
@ -303,19 +324,19 @@ fn query_pair_proximity_docids(ctx: &dyn Context, left: &Query, right: &Query, p
} }
Ok(docids) Ok(docids)
} else if prefix { } else if prefix {
let r_words = word_derivations(&right, true, 0, ctx.words_fst())?; let r_words = word_derivations(&right, true, 0, ctx.words_fst(), wdcache)?;
all_word_pair_proximity_docids(ctx, &l_words, &r_words, proximity) all_word_pair_proximity_docids(ctx, &l_words, &r_words, proximity)
} else { } else {
all_word_pair_proximity_docids(ctx, &l_words, &[(right, 0)], proximity) all_word_pair_proximity_docids(ctx, &l_words, &[(right, 0)], proximity)
} }
}, },
(QueryKind::Exact { word: left, .. }, QueryKind::Tolerant { typo, word: right }) => { (QueryKind::Exact { word: left, .. }, QueryKind::Tolerant { typo, word: right }) => {
let r_words = word_derivations(&right, prefix, *typo, ctx.words_fst())?; let r_words = word_derivations(&right, prefix, *typo, ctx.words_fst(), wdcache)?;
all_word_pair_proximity_docids(ctx, &[(left, 0)], &r_words, proximity) all_word_pair_proximity_docids(ctx, &[(left, 0)], &r_words, proximity)
}, },
(QueryKind::Tolerant { typo: l_typo, word: left }, QueryKind::Tolerant { typo: r_typo, word: right }) => { (QueryKind::Tolerant { typo: l_typo, word: left }, QueryKind::Tolerant { typo: r_typo, word: right }) => {
let l_words = word_derivations(&left, false, *l_typo, ctx.words_fst())?; let l_words = word_derivations(&left, false, *l_typo, ctx.words_fst(), wdcache)?.to_owned();
let r_words = word_derivations(&right, prefix, *r_typo, ctx.words_fst())?; let r_words = word_derivations(&right, prefix, *r_typo, ctx.words_fst(), wdcache)?;
all_word_pair_proximity_docids(ctx, &l_words, &r_words, proximity) all_word_pair_proximity_docids(ctx, &l_words, &r_words, proximity)
}, },
} }
@ -368,6 +389,10 @@ pub mod test {
fn in_prefix_cache(&self, word: &str) -> bool { fn in_prefix_cache(&self, word: &str) -> bool {
self.word_prefix_docids.contains_key(&word.to_string()) self.word_prefix_docids.contains_key(&word.to_string())
} }
fn docid_word_positions(&self, _docid: DocumentId, _word: &str) -> heed::Result<Option<RoaringBitmap>> {
todo!()
}
} }
impl<'a> Default for TestContext<'a> { impl<'a> Default for TestContext<'a> {

View File

@ -1,10 +1,14 @@
use std::collections::HashMap; use std::borrow::Cow;
use std::collections::btree_map::{self, BTreeMap};
use std::collections::hash_map::{HashMap, Entry};
use std::mem::take; use std::mem::take;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use log::debug; use log::debug;
use crate::{DocumentId, Position, search::{query_tree::QueryKind, word_derivations}};
use crate::search::query_tree::{maximum_proximity, Operation, Query}; use crate::search::query_tree::{maximum_proximity, Operation, Query};
use crate::search::WordDerivationsCache;
use super::{Candidates, Criterion, CriterionResult, Context, query_docids, query_pair_proximity_docids}; use super::{Candidates, Criterion, CriterionResult, Context, query_docids, query_pair_proximity_docids};
pub struct Proximity<'t> { pub struct Proximity<'t> {
@ -15,6 +19,7 @@ pub struct Proximity<'t> {
bucket_candidates: RoaringBitmap, bucket_candidates: RoaringBitmap,
parent: Option<Box<dyn Criterion + 't>>, parent: Option<Box<dyn Criterion + 't>>,
candidates_cache: HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>, candidates_cache: HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>,
plane_sweep_cache: Option<btree_map::IntoIter<u8, RoaringBitmap>>,
} }
impl<'t> Proximity<'t> { impl<'t> Proximity<'t> {
@ -32,6 +37,7 @@ impl<'t> Proximity<'t> {
bucket_candidates: RoaringBitmap::new(), bucket_candidates: RoaringBitmap::new(),
parent: None, parent: None,
candidates_cache: HashMap::new(), candidates_cache: HashMap::new(),
plane_sweep_cache: None,
} }
} }
@ -44,12 +50,14 @@ impl<'t> Proximity<'t> {
bucket_candidates: RoaringBitmap::new(), bucket_candidates: RoaringBitmap::new(),
parent: Some(parent), parent: Some(parent),
candidates_cache: HashMap::new(), candidates_cache: HashMap::new(),
plane_sweep_cache: None,
} }
} }
} }
impl<'t> Criterion for Proximity<'t> { impl<'t> Criterion for Proximity<'t> {
fn next(&mut self) -> anyhow::Result<Option<CriterionResult>> { #[logging_timer::time("Proximity::{}")]
fn next(&mut self, wdcache: &mut WordDerivationsCache) -> anyhow::Result<Option<CriterionResult>> {
use Candidates::{Allowed, Forbidden}; use Candidates::{Allowed, Forbidden};
loop { loop {
debug!("Proximity at iteration {} (max {:?}) ({:?})", debug!("Proximity at iteration {} (max {:?}) ({:?})",
@ -68,15 +76,44 @@ impl<'t> Criterion for Proximity<'t> {
}, },
(Some((max_prox, query_tree)), Allowed(candidates)) => { (Some((max_prox, query_tree)), Allowed(candidates)) => {
if self.proximity as usize > *max_prox { if self.proximity as usize > *max_prox {
// reset state to (None, Forbidden(_))
self.query_tree = None; self.query_tree = None;
self.candidates = Candidates::default(); self.candidates = Candidates::default();
} else { } else {
let mut new_candidates = resolve_candidates( let mut new_candidates = if candidates.len() <= 1000 {
if let Some(cache) = self.plane_sweep_cache.as_mut() {
match cache.next() {
Some((p, candidates)) => {
self.proximity = p;
candidates
},
None => {
// reset state to (None, Forbidden(_))
self.query_tree = None;
self.candidates = Candidates::default();
continue
},
}
} else {
let cache = resolve_plane_sweep_candidates(
self.ctx,
query_tree,
candidates,
wdcache,
)?;
self.plane_sweep_cache = Some(cache.into_iter());
continue
}
} else { // use set theory based algorithm
resolve_candidates(
self.ctx, self.ctx,
&query_tree, &query_tree,
self.proximity, self.proximity,
&mut self.candidates_cache, &mut self.candidates_cache,
)?; wdcache,
)?
};
new_candidates.intersect_with(&candidates); new_candidates.intersect_with(&candidates);
candidates.difference_with(&new_candidates); candidates.difference_with(&new_candidates);
@ -104,6 +141,7 @@ impl<'t> Criterion for Proximity<'t> {
&query_tree, &query_tree,
self.proximity, self.proximity,
&mut self.candidates_cache, &mut self.candidates_cache,
wdcache,
)?; )?;
new_candidates.difference_with(&candidates); new_candidates.difference_with(&candidates);
@ -133,12 +171,13 @@ impl<'t> Criterion for Proximity<'t> {
(None, Forbidden(_)) => { (None, Forbidden(_)) => {
match self.parent.as_mut() { match self.parent.as_mut() {
Some(parent) => { Some(parent) => {
match parent.next()? { match parent.next(wdcache)? {
Some(CriterionResult { query_tree, candidates, bucket_candidates }) => { Some(CriterionResult { query_tree, candidates, bucket_candidates }) => {
self.query_tree = query_tree.map(|op| (maximum_proximity(&op), op)); self.query_tree = query_tree.map(|op| (maximum_proximity(&op), op));
self.proximity = 0; self.proximity = 0;
self.candidates = Candidates::Allowed(candidates); self.candidates = Candidates::Allowed(candidates);
self.bucket_candidates.union_with(&bucket_candidates); self.bucket_candidates.union_with(&bucket_candidates);
self.plane_sweep_cache = None;
}, },
None => return Ok(None), None => return Ok(None),
} }
@ -156,6 +195,7 @@ fn resolve_candidates<'t>(
query_tree: &Operation, query_tree: &Operation,
proximity: u8, proximity: u8,
cache: &mut HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>, cache: &mut HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<RoaringBitmap> ) -> anyhow::Result<RoaringBitmap>
{ {
fn resolve_operation<'t>( fn resolve_operation<'t>(
@ -163,27 +203,28 @@ fn resolve_candidates<'t>(
query_tree: &Operation, query_tree: &Operation,
proximity: u8, proximity: u8,
cache: &mut HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>, cache: &mut HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<Vec<(Query, Query, RoaringBitmap)>> ) -> anyhow::Result<Vec<(Query, Query, RoaringBitmap)>>
{ {
use Operation::{And, Consecutive, Or, Query}; use Operation::{And, Consecutive, Or, Query};
let result = match query_tree { let result = match query_tree {
And(ops) => mdfs(ctx, ops, proximity, cache)?, And(ops) => mdfs(ctx, ops, proximity, cache, wdcache)?,
Consecutive(ops) => if proximity == 0 { Consecutive(ops) => if proximity == 0 {
mdfs(ctx, ops, 0, cache)? mdfs(ctx, ops, 0, cache, wdcache)?
} else { } else {
Default::default() Default::default()
}, },
Or(_, ops) => { Or(_, ops) => {
let mut output = Vec::new(); let mut output = Vec::new();
for op in ops { for op in ops {
let result = resolve_operation(ctx, op, proximity, cache)?; let result = resolve_operation(ctx, op, proximity, cache, wdcache)?;
output.extend(result); output.extend(result);
} }
output output
}, },
Query(q) => if proximity == 0 { Query(q) => if proximity == 0 {
let candidates = query_docids(ctx, q)?; let candidates = query_docids(ctx, q, wdcache)?;
vec![(q.clone(), q.clone(), candidates)] vec![(q.clone(), q.clone(), candidates)]
} else { } else {
Default::default() Default::default()
@ -199,6 +240,7 @@ fn resolve_candidates<'t>(
right: &Operation, right: &Operation,
proximity: u8, proximity: u8,
cache: &mut HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>, cache: &mut HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<Vec<(Query, Query, RoaringBitmap)>> ) -> anyhow::Result<Vec<(Query, Query, RoaringBitmap)>>
{ {
fn pair_combinations(mana: u8, left_max: u8) -> impl Iterator<Item = (u8, u8)> { fn pair_combinations(mana: u8, left_max: u8) -> impl Iterator<Item = (u8, u8)> {
@ -213,13 +255,13 @@ fn resolve_candidates<'t>(
for (left_p, right_p) in pair_combinations(left_right_p, left_right_p) { for (left_p, right_p) in pair_combinations(left_right_p, left_right_p) {
let left_key = (left.clone(), left_p); let left_key = (left.clone(), left_p);
if !cache.contains_key(&left_key) { if !cache.contains_key(&left_key) {
let candidates = resolve_operation(ctx, left, left_p, cache)?; let candidates = resolve_operation(ctx, left, left_p, cache, wdcache)?;
cache.insert(left_key.clone(), candidates); cache.insert(left_key.clone(), candidates);
} }
let right_key = (right.clone(), right_p); let right_key = (right.clone(), right_p);
if !cache.contains_key(&right_key) { if !cache.contains_key(&right_key) {
let candidates = resolve_operation(ctx, right, right_p, cache)?; let candidates = resolve_operation(ctx, right, right_p, cache, wdcache)?;
cache.insert(right_key.clone(), candidates); cache.insert(right_key.clone(), candidates);
} }
@ -228,7 +270,7 @@ fn resolve_candidates<'t>(
for (ll, lr, lcandidates) in lefts { for (ll, lr, lcandidates) in lefts {
for (rl, rr, rcandidates) in rights { for (rl, rr, rcandidates) in rights {
let mut candidates = query_pair_proximity_docids(ctx, lr, rl, pair_p + 1)?; let mut candidates = query_pair_proximity_docids(ctx, lr, rl, pair_p + 1, wdcache)?;
if lcandidates.len() < rcandidates.len() { if lcandidates.len() < rcandidates.len() {
candidates.intersect_with(lcandidates); candidates.intersect_with(lcandidates);
candidates.intersect_with(rcandidates); candidates.intersect_with(rcandidates);
@ -252,6 +294,7 @@ fn resolve_candidates<'t>(
branches: &[Operation], branches: &[Operation],
proximity: u8, proximity: u8,
cache: &mut HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>, cache: &mut HashMap<(Operation, u8), Vec<(Query, Query, RoaringBitmap)>>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<Vec<(Query, Query, RoaringBitmap)>> ) -> anyhow::Result<Vec<(Query, Query, RoaringBitmap)>>
{ {
// Extract the first two elements but gives the tail // Extract the first two elements but gives the tail
@ -261,13 +304,13 @@ fn resolve_candidates<'t>(
}); });
match next { match next {
Some((head1, Some((head2, [_])))) => mdfs_pair(ctx, head1, head2, proximity, cache), Some((head1, Some((head2, [_])))) => mdfs_pair(ctx, head1, head2, proximity, cache, wdcache),
Some((head1, Some((head2, tail)))) => { Some((head1, Some((head2, tail)))) => {
let mut output = Vec::new(); let mut output = Vec::new();
for p in 0..=proximity { for p in 0..=proximity {
for (lhead, _, head_candidates) in mdfs_pair(ctx, head1, head2, p, cache)? { for (lhead, _, head_candidates) in mdfs_pair(ctx, head1, head2, p, cache, wdcache)? {
if !head_candidates.is_empty() { if !head_candidates.is_empty() {
for (_, rtail, mut candidates) in mdfs(ctx, tail, proximity - p, cache)? { for (_, rtail, mut candidates) in mdfs(ctx, tail, proximity - p, cache, wdcache)? {
candidates.intersect_with(&head_candidates); candidates.intersect_with(&head_candidates);
if !candidates.is_empty() { if !candidates.is_empty() {
output.push((lhead.clone(), rtail, candidates)); output.push((lhead.clone(), rtail, candidates));
@ -278,14 +321,230 @@ fn resolve_candidates<'t>(
} }
Ok(output) Ok(output)
}, },
Some((head1, None)) => resolve_operation(ctx, head1, proximity, cache), Some((head1, None)) => resolve_operation(ctx, head1, proximity, cache, wdcache),
None => return Ok(Default::default()), None => return Ok(Default::default()),
} }
} }
let mut candidates = RoaringBitmap::new(); let mut candidates = RoaringBitmap::new();
for (_, _, cds) in resolve_operation(ctx, query_tree, proximity, cache)? { for (_, _, cds) in resolve_operation(ctx, query_tree, proximity, cache, wdcache)? {
candidates.union_with(&cds); candidates.union_with(&cds);
} }
Ok(candidates) Ok(candidates)
} }
fn resolve_plane_sweep_candidates(
ctx: &dyn Context,
query_tree: &Operation,
allowed_candidates: &RoaringBitmap,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<BTreeMap<u8, RoaringBitmap>>
{
/// FIXME may be buggy with query like "new new york"
fn plane_sweep<'a>(
ctx: &dyn Context,
operations: &'a [Operation],
docid: DocumentId,
consecutive: bool,
rocache: &mut HashMap<&'a Operation, Vec<(Position, u8, Position)>>,
dwpcache: &mut HashMap<String, Option<RoaringBitmap>>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<Vec<(Position, u8, Position)>>
{
fn compute_groups_proximity(
groups: &[(usize, (Position, u8, Position))],
consecutive: bool,
) -> Option<(Position, u8, Position)>
{
// take the inner proximity of the first group as initial
let (_, (_, mut proximity, _)) = groups.first()?;
let (_, (left_most_pos, _, _)) = groups.first()?;
let (_, (_, _, right_most_pos)) = groups.last()?;
for pair in groups.windows(2) {
if let [(i1, (_, _, rpos1)), (i2, (lpos2, prox2, _))] = pair {
// if a pair overlap, meaning that they share at least a word, we return None
if rpos1 >= lpos2 { return None }
// if groups are in the good order (query order) we remove 1 to the proximity
// the proximity is clamped to 7
let pair_proximity = if i1 < i2 {
(*lpos2 - *rpos1 - 1).min(7)
} else {
(*lpos2 - *rpos1).min(7)
};
proximity += pair_proximity as u8 + prox2;
}
}
// if groups should be consecutives, we will only accept groups with a proximity of 0
if !consecutive || proximity == 0 {
Some((*left_most_pos, proximity, *right_most_pos))
} else {
None
}
}
let groups_len = operations.len();
let mut groups_positions = Vec::with_capacity(groups_len);
for operation in operations {
let positions = resolve_operation(ctx, operation, docid, rocache, dwpcache, wdcache)?;
groups_positions.push(positions.into_iter());
}
// Pop top elements of each list.
let mut current = Vec::with_capacity(groups_len);
for (i, positions) in groups_positions.iter_mut().enumerate() {
match positions.next() {
Some(p) => current.push((i, p)),
// if a group return None, it means that the document does not contain all the words,
// we return an empty result.
None => return Ok(Vec::new()),
}
}
// Sort k elements by their positions.
current.sort_unstable_by_key(|(_, p)| *p);
// Find leftmost and rightmost group and their positions.
let mut leftmost = *current.first().unwrap();
let mut rightmost = *current.last().unwrap();
let mut output = Vec::new();
loop {
// Find the position p of the next elements of a list of the leftmost group.
// If the list is empty, break the loop.
let p = groups_positions[leftmost.0].next().map(|p| (leftmost.0, p));
// let q be the position q of second group of the interval.
let q = current[1];
let mut leftmost_index = 0;
// If p > r, then the interval [l, r] is minimal and
// we insert it into the heap according to its size.
if p.map_or(true, |p| p.1 > rightmost.1) {
leftmost_index = current[0].0;
if let Some(group) = compute_groups_proximity(&current, consecutive) {
output.push(group);
}
}
// TODO not sure about breaking here or when the p list is found empty.
let p = match p {
Some(p) => p,
None => break,
};
// Remove the leftmost group P in the interval,
// and pop the same group from a list.
current[leftmost_index] = p;
if p.1 > rightmost.1 {
// if [l, r] is minimal, let r = p and l = q.
rightmost = p;
leftmost = q;
} else {
// Ohterwise, let l = min{p,q}.
leftmost = if p.1 < q.1 { p } else { q };
}
// Then update the interval and order of groups_positions in the interval.
current.sort_unstable_by_key(|(_, p)| *p);
}
// Sort the list according to the size and the positions.
output.sort_unstable();
Ok(output)
}
fn resolve_operation<'a>(
ctx: &dyn Context,
query_tree: &'a Operation,
docid: DocumentId,
rocache: &mut HashMap<&'a Operation, Vec<(Position, u8, Position)>>,
dwpcache: &mut HashMap<String, Option<RoaringBitmap>>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<Vec<(Position, u8, Position)>>
{
use Operation::{And, Consecutive, Or};
if let Some(result) = rocache.get(query_tree) {
return Ok(result.clone());
}
let result = match query_tree {
And(ops) => plane_sweep(ctx, ops, docid, false, rocache, dwpcache, wdcache)?,
Consecutive(ops) => plane_sweep(ctx, ops, docid, true, rocache, dwpcache, wdcache)?,
Or(_, ops) => {
let mut result = Vec::new();
for op in ops {
result.extend(resolve_operation(ctx, op, docid, rocache, dwpcache, wdcache)?)
}
result.sort_unstable();
result
},
Operation::Query(Query {prefix, kind}) => {
let fst = ctx.words_fst();
let words = match kind {
QueryKind::Exact { word, .. } => {
if *prefix {
Cow::Borrowed(word_derivations(word, true, 0, fst, wdcache)?)
} else {
Cow::Owned(vec![(word.to_string(), 0)])
}
},
QueryKind::Tolerant { typo, word } => {
Cow::Borrowed(word_derivations(word, *prefix, *typo, fst, wdcache)?)
}
};
let mut result = Vec::new();
for (word, _) in words.as_ref() {
let positions = match dwpcache.entry(word.to_string()) {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => {
let positions = ctx.docid_word_positions(docid, word)?;
entry.insert(positions)
}
};
if let Some(positions) = positions {
let iter = positions.iter().map(|p| (p, 0, p));
result.extend(iter);
}
}
result.sort_unstable();
result
}
};
rocache.insert(query_tree, result.clone());
Ok(result)
}
let mut word_positions_cache = HashMap::new();
let mut resolve_operation_cache = HashMap::new();
let mut candidates = BTreeMap::new();
for docid in allowed_candidates {
word_positions_cache.clear();
resolve_operation_cache.clear();
let positions = resolve_operation(
ctx,
query_tree,
docid,
&mut resolve_operation_cache,
&mut word_positions_cache,
wdcache,
)?;
let best_proximity = positions.into_iter().min_by_key(|(_, proximity, _)| *proximity);
let best_proximity = best_proximity.map(|(_, proximity, _)| proximity).unwrap_or(7);
candidates.entry(best_proximity).or_insert_with(RoaringBitmap::new).insert(docid);
}
Ok(candidates)
}

View File

@ -5,7 +5,7 @@ use log::debug;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::search::query_tree::{maximum_typo, Operation, Query, QueryKind}; use crate::search::query_tree::{maximum_typo, Operation, Query, QueryKind};
use crate::search::word_derivations; use crate::search::{word_derivations, WordDerivationsCache};
use super::{Candidates, Criterion, CriterionResult, Context, query_docids, query_pair_proximity_docids}; use super::{Candidates, Criterion, CriterionResult, Context, query_docids, query_pair_proximity_docids};
pub struct Typo<'t> { pub struct Typo<'t> {
@ -53,7 +53,8 @@ impl<'t> Typo<'t> {
} }
impl<'t> Criterion for Typo<'t> { impl<'t> Criterion for Typo<'t> {
fn next(&mut self) -> anyhow::Result<Option<CriterionResult>> { #[logging_timer::time("Typo::{}")]
fn next(&mut self, wdcache: &mut WordDerivationsCache) -> anyhow::Result<Option<CriterionResult>> {
use Candidates::{Allowed, Forbidden}; use Candidates::{Allowed, Forbidden};
loop { loop {
debug!("Typo at iteration {} ({:?})", self.number_typos, self.candidates); debug!("Typo at iteration {} ({:?})", self.number_typos, self.candidates);
@ -73,15 +74,21 @@ impl<'t> Criterion for Typo<'t> {
} else { } else {
let fst = self.ctx.words_fst(); let fst = self.ctx.words_fst();
let new_query_tree = if self.number_typos < 2 { let new_query_tree = if self.number_typos < 2 {
alterate_query_tree(&fst, query_tree.clone(), self.number_typos, &mut self.typo_cache)? alterate_query_tree(&fst, query_tree.clone(), self.number_typos, &mut self.typo_cache, wdcache)?
} else if self.number_typos == 2 { } else if self.number_typos == 2 {
*query_tree = alterate_query_tree(&fst, query_tree.clone(), self.number_typos, &mut self.typo_cache)?; *query_tree = alterate_query_tree(&fst, query_tree.clone(), self.number_typos, &mut self.typo_cache, wdcache)?;
query_tree.clone() query_tree.clone()
} else { } else {
query_tree.clone() query_tree.clone()
}; };
let mut new_candidates = resolve_candidates(self.ctx, &new_query_tree, self.number_typos, &mut self.candidates_cache)?; let mut new_candidates = resolve_candidates(
self.ctx,
&new_query_tree,
self.number_typos,
&mut self.candidates_cache,
wdcache,
)?;
new_candidates.intersect_with(&candidates); new_candidates.intersect_with(&candidates);
candidates.difference_with(&new_candidates); candidates.difference_with(&new_candidates);
self.number_typos += 1; self.number_typos += 1;
@ -105,15 +112,21 @@ impl<'t> Criterion for Typo<'t> {
} else { } else {
let fst = self.ctx.words_fst(); let fst = self.ctx.words_fst();
let new_query_tree = if self.number_typos < 2 { let new_query_tree = if self.number_typos < 2 {
alterate_query_tree(&fst, query_tree.clone(), self.number_typos, &mut self.typo_cache)? alterate_query_tree(&fst, query_tree.clone(), self.number_typos, &mut self.typo_cache, wdcache)?
} else if self.number_typos == 2 { } else if self.number_typos == 2 {
*query_tree = alterate_query_tree(&fst, query_tree.clone(), self.number_typos, &mut self.typo_cache)?; *query_tree = alterate_query_tree(&fst, query_tree.clone(), self.number_typos, &mut self.typo_cache, wdcache)?;
query_tree.clone() query_tree.clone()
} else { } else {
query_tree.clone() query_tree.clone()
}; };
let mut new_candidates = resolve_candidates(self.ctx, &new_query_tree, self.number_typos, &mut self.candidates_cache)?; let mut new_candidates = resolve_candidates(
self.ctx,
&new_query_tree,
self.number_typos,
&mut self.candidates_cache,
wdcache,
)?;
new_candidates.difference_with(&candidates); new_candidates.difference_with(&candidates);
candidates.union_with(&new_candidates); candidates.union_with(&new_candidates);
self.number_typos += 1; self.number_typos += 1;
@ -141,7 +154,7 @@ impl<'t> Criterion for Typo<'t> {
(None, Forbidden(_)) => { (None, Forbidden(_)) => {
match self.parent.as_mut() { match self.parent.as_mut() {
Some(parent) => { Some(parent) => {
match parent.next()? { match parent.next(wdcache)? {
Some(CriterionResult { query_tree, candidates, bucket_candidates }) => { Some(CriterionResult { query_tree, candidates, bucket_candidates }) => {
self.query_tree = query_tree.map(|op| (maximum_typo(&op), op)); self.query_tree = query_tree.map(|op| (maximum_typo(&op), op));
self.number_typos = 0; self.number_typos = 0;
@ -167,6 +180,7 @@ fn alterate_query_tree(
mut query_tree: Operation, mut query_tree: Operation,
number_typos: u8, number_typos: u8,
typo_cache: &mut HashMap<(String, bool, u8), Vec<(String, u8)>>, typo_cache: &mut HashMap<(String, bool, u8), Vec<(String, u8)>>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<Operation> ) -> anyhow::Result<Operation>
{ {
fn recurse( fn recurse(
@ -174,13 +188,14 @@ fn alterate_query_tree(
operation: &mut Operation, operation: &mut Operation,
number_typos: u8, number_typos: u8,
typo_cache: &mut HashMap<(String, bool, u8), Vec<(String, u8)>>, typo_cache: &mut HashMap<(String, bool, u8), Vec<(String, u8)>>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<()> ) -> anyhow::Result<()>
{ {
use Operation::{And, Consecutive, Or}; use Operation::{And, Consecutive, Or};
match operation { match operation {
And(ops) | Consecutive(ops) | Or(_, ops) => { And(ops) | Consecutive(ops) | Or(_, ops) => {
ops.iter_mut().try_for_each(|op| recurse(words_fst, op, number_typos, typo_cache)) ops.iter_mut().try_for_each(|op| recurse(words_fst, op, number_typos, typo_cache, wdcache))
}, },
Operation::Query(q) => { Operation::Query(q) => {
// TODO may be optimized when number_typos == 0 // TODO may be optimized when number_typos == 0
@ -198,7 +213,7 @@ fn alterate_query_tree(
let words = if let Some(derivations) = typo_cache.get(&cache_key) { let words = if let Some(derivations) = typo_cache.get(&cache_key) {
derivations.clone() derivations.clone()
} else { } else {
let derivations = word_derivations(word, q.prefix, typo, words_fst)?; let derivations = word_derivations(word, q.prefix, typo, words_fst, wdcache)?.to_vec();
typo_cache.insert(cache_key, derivations.clone()); typo_cache.insert(cache_key, derivations.clone());
derivations derivations
}; };
@ -219,7 +234,7 @@ fn alterate_query_tree(
} }
} }
recurse(words_fst, &mut query_tree, number_typos, typo_cache)?; recurse(words_fst, &mut query_tree, number_typos, typo_cache, wdcache)?;
Ok(query_tree) Ok(query_tree)
} }
@ -228,6 +243,7 @@ fn resolve_candidates<'t>(
query_tree: &Operation, query_tree: &Operation,
number_typos: u8, number_typos: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<RoaringBitmap> ) -> anyhow::Result<RoaringBitmap>
{ {
fn resolve_operation<'t>( fn resolve_operation<'t>(
@ -235,13 +251,14 @@ fn resolve_candidates<'t>(
query_tree: &Operation, query_tree: &Operation,
number_typos: u8, number_typos: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<RoaringBitmap> ) -> anyhow::Result<RoaringBitmap>
{ {
use Operation::{And, Consecutive, Or, Query}; use Operation::{And, Consecutive, Or, Query};
match query_tree { match query_tree {
And(ops) => { And(ops) => {
mdfs(ctx, ops, number_typos, cache) mdfs(ctx, ops, number_typos, cache, wdcache)
}, },
Consecutive(ops) => { Consecutive(ops) => {
let mut candidates = RoaringBitmap::new(); let mut candidates = RoaringBitmap::new();
@ -249,7 +266,7 @@ fn resolve_candidates<'t>(
for slice in ops.windows(2) { for slice in ops.windows(2) {
match (&slice[0], &slice[1]) { match (&slice[0], &slice[1]) {
(Operation::Query(left), Operation::Query(right)) => { (Operation::Query(left), Operation::Query(right)) => {
match query_pair_proximity_docids(ctx, left, right, 1)? { match query_pair_proximity_docids(ctx, left, right, 1, wdcache)? {
pair_docids if pair_docids.is_empty() => { pair_docids if pair_docids.is_empty() => {
return Ok(RoaringBitmap::new()) return Ok(RoaringBitmap::new())
}, },
@ -270,13 +287,13 @@ fn resolve_candidates<'t>(
Or(_, ops) => { Or(_, ops) => {
let mut candidates = RoaringBitmap::new(); let mut candidates = RoaringBitmap::new();
for op in ops { for op in ops {
let docids = resolve_operation(ctx, op, number_typos, cache)?; let docids = resolve_operation(ctx, op, number_typos, cache, wdcache)?;
candidates.union_with(&docids); candidates.union_with(&docids);
} }
Ok(candidates) Ok(candidates)
}, },
Query(q) => if q.kind.typo() == number_typos { Query(q) => if q.kind.typo() == number_typos {
Ok(query_docids(ctx, q)?) Ok(query_docids(ctx, q, wdcache)?)
} else { } else {
Ok(RoaringBitmap::new()) Ok(RoaringBitmap::new())
}, },
@ -288,6 +305,7 @@ fn resolve_candidates<'t>(
branches: &[Operation], branches: &[Operation],
mana: u8, mana: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
wdcache: &mut WordDerivationsCache,
) -> anyhow::Result<RoaringBitmap> ) -> anyhow::Result<RoaringBitmap>
{ {
match branches.split_first() { match branches.split_first() {
@ -296,7 +314,7 @@ fn resolve_candidates<'t>(
if let Some(candidates) = cache.get(&cache_key) { if let Some(candidates) = cache.get(&cache_key) {
Ok(candidates.clone()) Ok(candidates.clone())
} else { } else {
let candidates = resolve_operation(ctx, head, mana, cache)?; let candidates = resolve_operation(ctx, head, mana, cache, wdcache)?;
cache.insert(cache_key, candidates.clone()); cache.insert(cache_key, candidates.clone());
Ok(candidates) Ok(candidates)
} }
@ -310,13 +328,13 @@ fn resolve_candidates<'t>(
if let Some(candidates) = cache.get(&cache_key) { if let Some(candidates) = cache.get(&cache_key) {
candidates.clone() candidates.clone()
} else { } else {
let candidates = resolve_operation(ctx, head, m, cache)?; let candidates = resolve_operation(ctx, head, m, cache, wdcache)?;
cache.insert(cache_key, candidates.clone()); cache.insert(cache_key, candidates.clone());
candidates candidates
} }
}; };
if !head_candidates.is_empty() { if !head_candidates.is_empty() {
let tail_candidates = mdfs(ctx, tail, mana - m, cache)?; let tail_candidates = mdfs(ctx, tail, mana - m, cache, wdcache)?;
head_candidates.intersect_with(&tail_candidates); head_candidates.intersect_with(&tail_candidates);
candidates.union_with(&head_candidates); candidates.union_with(&head_candidates);
} }
@ -328,7 +346,7 @@ fn resolve_candidates<'t>(
} }
} }
resolve_operation(ctx, query_tree, number_typos, cache) resolve_operation(ctx, query_tree, number_typos, cache, wdcache)
} }
#[cfg(test)] #[cfg(test)]
@ -343,9 +361,10 @@ mod test {
let query_tree = None; let query_tree = None;
let facet_candidates = None; let facet_candidates = None;
let mut wdcache = WordDerivationsCache::new();
let mut criteria = Typo::initial(&context, query_tree, facet_candidates); let mut criteria = Typo::initial(&context, query_tree, facet_candidates);
assert!(criteria.next().unwrap().is_none()); assert!(criteria.next(&mut wdcache).unwrap().is_none());
} }
#[test] #[test]
@ -361,6 +380,7 @@ mod test {
let facet_candidates = None; let facet_candidates = None;
let mut wdcache = WordDerivationsCache::new();
let mut criteria = Typo::initial(&context, Some(query_tree), facet_candidates); let mut criteria = Typo::initial(&context, Some(query_tree), facet_candidates);
let candidates_1 = context.word_docids("split").unwrap().unwrap() let candidates_1 = context.word_docids("split").unwrap().unwrap()
@ -378,7 +398,7 @@ mod test {
bucket_candidates: candidates_1, bucket_candidates: candidates_1,
}; };
assert_eq!(criteria.next().unwrap(), Some(expected_1)); assert_eq!(criteria.next(&mut wdcache).unwrap(), Some(expected_1));
let candidates_2 = ( let candidates_2 = (
context.word_docids("split").unwrap().unwrap() context.word_docids("split").unwrap().unwrap()
@ -400,7 +420,7 @@ mod test {
bucket_candidates: candidates_2, bucket_candidates: candidates_2,
}; };
assert_eq!(criteria.next().unwrap(), Some(expected_2)); assert_eq!(criteria.next(&mut wdcache).unwrap(), Some(expected_2));
} }
#[test] #[test]
@ -409,6 +429,7 @@ mod test {
let query_tree = None; let query_tree = None;
let facet_candidates = context.word_docids("earth").unwrap().unwrap(); let facet_candidates = context.word_docids("earth").unwrap().unwrap();
let mut wdcache = WordDerivationsCache::new();
let mut criteria = Typo::initial(&context, query_tree, Some(facet_candidates.clone())); let mut criteria = Typo::initial(&context, query_tree, Some(facet_candidates.clone()));
let expected = CriterionResult { let expected = CriterionResult {
@ -418,10 +439,10 @@ mod test {
}; };
// first iteration, returns the facet candidates // first iteration, returns the facet candidates
assert_eq!(criteria.next().unwrap(), Some(expected)); assert_eq!(criteria.next(&mut wdcache).unwrap(), Some(expected));
// second iteration, returns None because there is no more things to do // second iteration, returns None because there is no more things to do
assert!(criteria.next().unwrap().is_none()); assert!(criteria.next(&mut wdcache).unwrap().is_none());
} }
#[test] #[test]
@ -437,6 +458,7 @@ mod test {
let facet_candidates = context.word_docids("earth").unwrap().unwrap(); let facet_candidates = context.word_docids("earth").unwrap().unwrap();
let mut wdcache = WordDerivationsCache::new();
let mut criteria = Typo::initial(&context, Some(query_tree), Some(facet_candidates.clone())); let mut criteria = Typo::initial(&context, Some(query_tree), Some(facet_candidates.clone()));
let candidates_1 = context.word_docids("split").unwrap().unwrap() let candidates_1 = context.word_docids("split").unwrap().unwrap()
@ -454,7 +476,7 @@ mod test {
bucket_candidates: candidates_1 & &facet_candidates, bucket_candidates: candidates_1 & &facet_candidates,
}; };
assert_eq!(criteria.next().unwrap(), Some(expected_1)); assert_eq!(criteria.next(&mut wdcache).unwrap(), Some(expected_1));
let candidates_2 = ( let candidates_2 = (
context.word_docids("split").unwrap().unwrap() context.word_docids("split").unwrap().unwrap()
@ -476,7 +498,7 @@ mod test {
bucket_candidates: candidates_2 & &facet_candidates, bucket_candidates: candidates_2 & &facet_candidates,
}; };
assert_eq!(criteria.next().unwrap(), Some(expected_2)); assert_eq!(criteria.next(&mut wdcache).unwrap(), Some(expected_2));
} }
} }

View File

@ -5,6 +5,7 @@ use log::debug;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::search::query_tree::Operation; use crate::search::query_tree::Operation;
use crate::search::WordDerivationsCache;
use super::{resolve_query_tree, Candidates, Criterion, CriterionResult, Context}; use super::{resolve_query_tree, Candidates, Criterion, CriterionResult, Context};
pub struct Words<'t> { pub struct Words<'t> {
@ -46,7 +47,8 @@ impl<'t> Words<'t> {
} }
impl<'t> Criterion for Words<'t> { impl<'t> Criterion for Words<'t> {
fn next(&mut self) -> anyhow::Result<Option<CriterionResult>> { #[logging_timer::time("Words::{}")]
fn next(&mut self, wdcache: &mut WordDerivationsCache) -> anyhow::Result<Option<CriterionResult>> {
use Candidates::{Allowed, Forbidden}; use Candidates::{Allowed, Forbidden};
loop { loop {
debug!("Words at iteration {} ({:?})", self.query_trees.len(), self.candidates); debug!("Words at iteration {} ({:?})", self.query_trees.len(), self.candidates);
@ -61,7 +63,7 @@ impl<'t> Criterion for Words<'t> {
})); }));
}, },
(Some(qt), Allowed(candidates)) => { (Some(qt), Allowed(candidates)) => {
let mut found_candidates = resolve_query_tree(self.ctx, &qt, &mut self.candidates_cache)?; let mut found_candidates = resolve_query_tree(self.ctx, &qt, &mut self.candidates_cache, wdcache)?;
found_candidates.intersect_with(&candidates); found_candidates.intersect_with(&candidates);
candidates.difference_with(&found_candidates); candidates.difference_with(&found_candidates);
@ -77,7 +79,7 @@ impl<'t> Criterion for Words<'t> {
})); }));
}, },
(Some(qt), Forbidden(candidates)) => { (Some(qt), Forbidden(candidates)) => {
let mut found_candidates = resolve_query_tree(self.ctx, &qt, &mut self.candidates_cache)?; let mut found_candidates = resolve_query_tree(self.ctx, &qt, &mut self.candidates_cache, wdcache)?;
found_candidates.difference_with(&candidates); found_candidates.difference_with(&candidates);
candidates.union_with(&found_candidates); candidates.union_with(&found_candidates);
@ -103,7 +105,7 @@ impl<'t> Criterion for Words<'t> {
(None, Forbidden(_)) => { (None, Forbidden(_)) => {
match self.parent.as_mut() { match self.parent.as_mut() {
Some(parent) => { Some(parent) => {
match parent.next()? { match parent.next(wdcache)? {
Some(CriterionResult { query_tree, candidates, bucket_candidates }) => { Some(CriterionResult { query_tree, candidates, bucket_candidates }) => {
self.query_trees = query_tree.map(explode_query_tree).unwrap_or_default(); self.query_trees = query_tree.map(explode_query_tree).unwrap_or_default();
self.candidates = Candidates::Allowed(candidates); self.candidates = Candidates::Allowed(candidates);

View File

@ -1,5 +1,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::hash_map::{HashMap, Entry};
use std::fmt; use std::fmt;
use std::str::Utf8Error;
use std::time::Instant; use std::time::Instant;
use fst::{IntoStreamer, Streamer, Set}; use fst::{IntoStreamer, Streamer, Set};
@ -97,8 +99,9 @@ impl<'a> Search<'a> {
let mut offset = self.offset; let mut offset = self.offset;
let mut limit = self.limit; let mut limit = self.limit;
let mut documents_ids = Vec::new(); let mut documents_ids = Vec::new();
let mut words_derivations_cache = WordDerivationsCache::new();
let mut initial_candidates = RoaringBitmap::new(); let mut initial_candidates = RoaringBitmap::new();
while let Some(CriterionResult { candidates, bucket_candidates, .. }) = criteria.next()? { while let Some(CriterionResult { candidates, bucket_candidates, .. }) = criteria.next(&mut words_derivations_cache)? {
debug!("Number of candidates found {}", candidates.len()); debug!("Number of candidates found {}", candidates.len());
@ -145,13 +148,19 @@ pub struct SearchResult {
pub documents_ids: Vec<DocumentId>, pub documents_ids: Vec<DocumentId>,
} }
pub fn word_derivations( pub type WordDerivationsCache = HashMap<(String, bool, u8), Vec<(String, u8)>>;
pub fn word_derivations<'c>(
word: &str, word: &str,
is_prefix: bool, is_prefix: bool,
max_typo: u8, max_typo: u8,
fst: &fst::Set<Cow<[u8]>>, fst: &fst::Set<Cow<[u8]>>,
) -> anyhow::Result<Vec<(String, u8)>> cache: &'c mut WordDerivationsCache,
) -> Result<&'c [(String, u8)], Utf8Error>
{ {
match cache.entry((word.to_string(), is_prefix, max_typo)) {
Entry::Occupied(entry) => Ok(entry.into_mut()),
Entry::Vacant(entry) => {
let mut derived_words = Vec::new(); let mut derived_words = Vec::new();
let dfa = build_dfa(word, max_typo, is_prefix); let dfa = build_dfa(word, max_typo, is_prefix);
let mut stream = fst.search_with_state(&dfa).into_stream(); let mut stream = fst.search_with_state(&dfa).into_stream();
@ -162,7 +171,9 @@ pub fn word_derivations(
derived_words.push((word.to_string(), distance.to_u8())); derived_words.push((word.to_string(), distance.to_u8()));
} }
Ok(derived_words) Ok(entry.insert(derived_words))
},
}
} }
pub fn build_dfa(word: &str, typos: u8, is_prefix: bool) -> DFA { pub fn build_dfa(word: &str, typos: u8, is_prefix: bool) -> DFA {