2018-10-17 13:35:34 +02:00
|
|
|
use std::hash::Hash;
|
2019-05-07 12:11:22 +02:00
|
|
|
use std::ops::Range;
|
2018-12-16 14:21:06 +01:00
|
|
|
use std::rc::Rc;
|
2019-08-16 15:01:25 +02:00
|
|
|
use std::time::{Instant, Duration};
|
2019-09-01 18:52:26 +02:00
|
|
|
use std::{mem, cmp, cmp::Reverse};
|
2018-10-10 16:57:21 +02:00
|
|
|
|
2019-06-13 16:38:37 +02:00
|
|
|
use fst::{Streamer, IntoStreamer};
|
2019-06-25 12:27:15 +02:00
|
|
|
use hashbrown::HashMap;
|
2019-07-07 12:41:20 +02:00
|
|
|
use levenshtein_automata::DFA;
|
2019-09-01 18:52:26 +02:00
|
|
|
use log::info;
|
2019-06-17 16:01:31 +02:00
|
|
|
use meilidb_tokenizer::{is_cjk, split_query_string};
|
|
|
|
use rayon::slice::ParallelSliceMut;
|
2019-08-16 16:35:19 +02:00
|
|
|
use rayon::iter::{ParallelIterator, ParallelBridge};
|
2019-06-17 16:01:31 +02:00
|
|
|
use sdset::SetBuf;
|
2019-07-12 16:05:15 +02:00
|
|
|
use slice_group_by::{GroupBy, GroupByMut};
|
2018-10-10 16:57:21 +02:00
|
|
|
|
2019-06-27 15:16:32 +02:00
|
|
|
use crate::automaton::{build_dfa, build_prefix_dfa};
|
2019-02-24 19:44:24 +01:00
|
|
|
use crate::criterion::Criteria;
|
2019-07-07 20:27:37 +02:00
|
|
|
use crate::distinct_map::{DistinctMap, BufferedDistinctMap};
|
|
|
|
use crate::query_enhancer::{QueryEnhancerBuilder, QueryEnhancer};
|
2019-07-15 19:34:53 +02:00
|
|
|
use crate::raw_documents_from;
|
2019-06-24 17:29:14 +02:00
|
|
|
use crate::reordered_attrs::ReorderedAttrs;
|
2019-06-25 12:27:15 +02:00
|
|
|
use crate::{TmpMatch, Highlight, DocumentId, Store, RawDocument, Document};
|
2019-02-24 19:44:24 +01:00
|
|
|
|
2019-06-17 18:21:10 +02:00
|
|
|
const NGRAMS: usize = 3;
|
|
|
|
|
2019-06-19 14:10:21 +02:00
|
|
|
struct Automaton {
|
2019-08-16 12:17:23 +02:00
|
|
|
index: usize,
|
|
|
|
ngram: usize,
|
2019-06-27 15:16:32 +02:00
|
|
|
query_len: usize,
|
2019-06-26 16:28:51 +02:00
|
|
|
is_exact: bool,
|
2019-08-18 18:57:41 +02:00
|
|
|
is_prefix: bool,
|
|
|
|
query: String,
|
2019-06-19 14:10:21 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Automaton {
|
2019-08-18 18:57:41 +02:00
|
|
|
fn dfa(&self) -> DFA {
|
|
|
|
if self.is_prefix {
|
|
|
|
build_prefix_dfa(&self.query)
|
|
|
|
} else {
|
|
|
|
build_dfa(&self.query)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-16 12:17:23 +02:00
|
|
|
fn exact(index: usize, ngram: usize, query: &str) -> Automaton {
|
2019-06-27 15:16:32 +02:00
|
|
|
Automaton {
|
2019-08-16 12:17:23 +02:00
|
|
|
index,
|
|
|
|
ngram,
|
2019-06-27 15:16:32 +02:00
|
|
|
query_len: query.len(),
|
|
|
|
is_exact: true,
|
2019-08-18 18:57:41 +02:00
|
|
|
is_prefix: false,
|
|
|
|
query: query.to_string(),
|
2019-06-27 15:16:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-16 12:17:23 +02:00
|
|
|
fn prefix_exact(index: usize, ngram: usize, query: &str) -> Automaton {
|
2019-06-27 15:16:32 +02:00
|
|
|
Automaton {
|
2019-08-16 12:17:23 +02:00
|
|
|
index,
|
|
|
|
ngram,
|
2019-06-27 15:16:32 +02:00
|
|
|
query_len: query.len(),
|
|
|
|
is_exact: true,
|
2019-08-18 18:57:41 +02:00
|
|
|
is_prefix: true,
|
|
|
|
query: query.to_string(),
|
2019-06-27 15:16:32 +02:00
|
|
|
}
|
2019-06-19 14:10:21 +02:00
|
|
|
}
|
|
|
|
|
2019-08-16 12:17:23 +02:00
|
|
|
fn non_exact(index: usize, ngram: usize, query: &str) -> Automaton {
|
2019-06-27 15:16:32 +02:00
|
|
|
Automaton {
|
2019-08-16 12:17:23 +02:00
|
|
|
index,
|
|
|
|
ngram,
|
2019-06-27 15:16:32 +02:00
|
|
|
query_len: query.len(),
|
|
|
|
is_exact: false,
|
2019-08-18 18:57:41 +02:00
|
|
|
is_prefix: false,
|
|
|
|
query: query.to_string(),
|
2019-06-27 15:16:32 +02:00
|
|
|
}
|
2019-06-19 14:10:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-20 16:25:14 +02:00
|
|
|
pub fn normalize_str(string: &str) -> String {
|
|
|
|
let mut string = string.to_lowercase();
|
|
|
|
|
|
|
|
if !string.contains(is_cjk) {
|
|
|
|
string = deunicode::deunicode_with_tofu(&string, "");
|
|
|
|
}
|
|
|
|
|
|
|
|
string
|
|
|
|
}
|
|
|
|
|
2019-07-07 20:27:37 +02:00
|
|
|
fn generate_automatons<S: Store>(query: &str, store: &S) -> Result<(Vec<Automaton>, QueryEnhancer), S::Error> {
|
2018-12-11 14:49:45 +01:00
|
|
|
let has_end_whitespace = query.chars().last().map_or(false, char::is_whitespace);
|
2019-06-17 18:21:10 +02:00
|
|
|
let query_words: Vec<_> = split_query_string(query).map(str::to_lowercase).collect();
|
2019-06-13 16:38:37 +02:00
|
|
|
let synonyms = store.synonyms()?;
|
|
|
|
|
2019-07-07 20:27:37 +02:00
|
|
|
let mut automatons = Vec::new();
|
|
|
|
let mut enhancer_builder = QueryEnhancerBuilder::new(&query_words);
|
|
|
|
|
|
|
|
// We must not declare the original words to the query enhancer
|
|
|
|
// *but* we need to push them in the automatons list first
|
2019-07-12 16:05:15 +02:00
|
|
|
let mut original_words = query_words.iter().peekable();
|
|
|
|
while let Some(word) = original_words.next() {
|
2019-07-07 20:27:37 +02:00
|
|
|
|
|
|
|
let has_following_word = original_words.peek().is_some();
|
|
|
|
let not_prefix_dfa = has_following_word || has_end_whitespace || word.chars().all(is_cjk);
|
|
|
|
|
|
|
|
let automaton = if not_prefix_dfa {
|
2019-08-16 12:17:23 +02:00
|
|
|
Automaton::exact(automatons.len(), 1, word)
|
2019-07-07 20:27:37 +02:00
|
|
|
} else {
|
2019-08-16 12:17:23 +02:00
|
|
|
Automaton::prefix_exact(automatons.len(), 1, word)
|
2019-07-07 20:27:37 +02:00
|
|
|
};
|
|
|
|
automatons.push(automaton);
|
|
|
|
}
|
|
|
|
|
2019-06-17 18:21:10 +02:00
|
|
|
for n in 1..=NGRAMS {
|
|
|
|
|
2019-07-07 20:27:37 +02:00
|
|
|
let mut ngrams = query_words.windows(n).enumerate().peekable();
|
|
|
|
while let Some((query_index, ngram_slice)) = ngrams.next() {
|
|
|
|
|
|
|
|
let query_range = query_index..query_index + n;
|
2019-06-26 16:28:51 +02:00
|
|
|
let ngram_nb_words = ngram_slice.len();
|
|
|
|
let ngram = ngram_slice.join(" ");
|
2019-06-17 18:21:10 +02:00
|
|
|
|
2019-06-19 13:46:48 +02:00
|
|
|
let has_following_word = ngrams.peek().is_some();
|
|
|
|
let not_prefix_dfa = has_following_word || has_end_whitespace || ngram.chars().all(is_cjk);
|
|
|
|
|
2019-06-26 16:28:51 +02:00
|
|
|
// automaton of synonyms of the ngrams
|
|
|
|
let normalized = normalize_str(&ngram);
|
|
|
|
let lev = if not_prefix_dfa { build_dfa(&normalized) } else { build_prefix_dfa(&normalized) };
|
|
|
|
|
2019-06-17 18:21:10 +02:00
|
|
|
let mut stream = synonyms.search(&lev).into_stream();
|
2019-06-18 15:47:47 +02:00
|
|
|
while let Some(base) = stream.next() {
|
|
|
|
|
|
|
|
// only trigger alternatives when the last word has been typed
|
|
|
|
// i.e. "new " do not but "new yo" triggers alternatives to "new york"
|
|
|
|
let base = std::str::from_utf8(base).unwrap();
|
|
|
|
let base_nb_words = split_query_string(base).count();
|
|
|
|
if ngram_nb_words != base_nb_words { continue }
|
|
|
|
|
|
|
|
if let Some(synonyms) = store.alternatives_to(base.as_bytes())? {
|
|
|
|
|
2019-06-17 18:21:10 +02:00
|
|
|
let mut stream = synonyms.into_stream();
|
|
|
|
while let Some(synonyms) = stream.next() {
|
|
|
|
let synonyms = std::str::from_utf8(synonyms).unwrap();
|
2019-07-07 20:27:37 +02:00
|
|
|
let synonyms_words: Vec<_> = split_query_string(synonyms).collect();
|
|
|
|
let nb_synonym_words = synonyms_words.len();
|
2019-06-26 16:28:51 +02:00
|
|
|
|
2019-07-07 20:27:37 +02:00
|
|
|
let real_query_index = automatons.len();
|
|
|
|
enhancer_builder.declare(query_range.clone(), real_query_index, &synonyms_words);
|
|
|
|
|
2019-07-12 16:05:15 +02:00
|
|
|
for synonym in synonyms_words {
|
2019-06-26 16:28:51 +02:00
|
|
|
let automaton = if nb_synonym_words == 1 {
|
2019-08-16 12:17:23 +02:00
|
|
|
Automaton::exact(automatons.len(), n, synonym)
|
2019-06-26 16:28:51 +02:00
|
|
|
} else {
|
2019-08-16 12:17:23 +02:00
|
|
|
Automaton::non_exact(automatons.len(), n, synonym)
|
2019-06-26 16:28:51 +02:00
|
|
|
};
|
2019-07-07 20:27:37 +02:00
|
|
|
automatons.push(automaton);
|
2019-06-17 18:21:10 +02:00
|
|
|
}
|
2019-06-17 10:28:43 +02:00
|
|
|
}
|
2019-06-13 16:38:37 +02:00
|
|
|
}
|
|
|
|
}
|
2019-06-17 10:44:16 +02:00
|
|
|
|
2019-07-15 14:28:40 +02:00
|
|
|
if n != 1 {
|
2019-06-27 15:23:19 +02:00
|
|
|
// automaton of concatenation of query words
|
|
|
|
let concat = ngram_slice.concat();
|
|
|
|
let normalized = normalize_str(&concat);
|
2019-06-17 18:21:10 +02:00
|
|
|
|
2019-07-07 20:27:37 +02:00
|
|
|
let real_query_index = automatons.len();
|
|
|
|
enhancer_builder.declare(query_range.clone(), real_query_index, &[&normalized]);
|
|
|
|
|
2019-08-16 12:17:23 +02:00
|
|
|
let automaton = Automaton::exact(automatons.len(), n, &normalized);
|
2019-07-07 20:27:37 +02:00
|
|
|
automatons.push(automaton);
|
|
|
|
}
|
2019-06-17 18:21:10 +02:00
|
|
|
}
|
2018-11-27 19:11:33 +01:00
|
|
|
}
|
2018-12-11 14:49:45 +01:00
|
|
|
|
2019-08-16 12:17:23 +02:00
|
|
|
// order automatons, the most important first,
|
|
|
|
// we keep the original automatons at the front.
|
|
|
|
let original_len = query_words.len();
|
|
|
|
automatons[original_len..].sort_unstable_by_key(|a| (Reverse(a.is_exact), Reverse(a.ngram)));
|
|
|
|
|
2019-07-07 20:27:37 +02:00
|
|
|
Ok((automatons, enhancer_builder.build()))
|
2018-10-17 13:35:34 +02:00
|
|
|
}
|
|
|
|
|
2019-05-07 12:11:22 +02:00
|
|
|
pub struct QueryBuilder<'c, S, FI = fn(DocumentId) -> bool> {
|
|
|
|
store: S,
|
2019-02-24 11:58:22 +01:00
|
|
|
criteria: Criteria<'c>,
|
2019-06-24 17:29:14 +02:00
|
|
|
searchable_attrs: Option<ReorderedAttrs>,
|
2018-12-29 20:16:29 +01:00
|
|
|
filter: Option<FI>,
|
2019-08-28 13:23:03 +02:00
|
|
|
fetch_timeout: Option<Duration>,
|
2018-10-10 16:57:21 +02:00
|
|
|
}
|
|
|
|
|
2019-05-07 12:11:22 +02:00
|
|
|
impl<'c, S> QueryBuilder<'c, S, fn(DocumentId) -> bool> {
|
|
|
|
pub fn new(store: S) -> Self {
|
|
|
|
QueryBuilder::with_criteria(store, Criteria::default())
|
2018-11-27 19:11:33 +01:00
|
|
|
}
|
2018-10-10 16:57:21 +02:00
|
|
|
|
2019-05-07 12:11:22 +02:00
|
|
|
pub fn with_criteria(store: S, criteria: Criteria<'c>) -> Self {
|
2019-08-28 13:23:03 +02:00
|
|
|
QueryBuilder { store, criteria, searchable_attrs: None, filter: None, fetch_timeout: None }
|
2018-11-27 19:11:33 +01:00
|
|
|
}
|
2019-02-03 10:55:16 +01:00
|
|
|
}
|
2018-11-27 19:11:33 +01:00
|
|
|
|
2019-05-07 12:11:22 +02:00
|
|
|
impl<'c, S, FI> QueryBuilder<'c, S, FI>
|
2019-02-03 10:55:16 +01:00
|
|
|
{
|
2019-05-07 12:11:22 +02:00
|
|
|
pub fn with_filter<F>(self, function: F) -> QueryBuilder<'c, S, F>
|
2019-02-24 11:58:22 +01:00
|
|
|
where F: Fn(DocumentId) -> bool,
|
2018-12-29 20:16:29 +01:00
|
|
|
{
|
|
|
|
QueryBuilder {
|
2019-05-07 12:11:22 +02:00
|
|
|
store: self.store,
|
2018-12-29 20:16:29 +01:00
|
|
|
criteria: self.criteria,
|
2019-03-05 16:34:29 +01:00
|
|
|
searchable_attrs: self.searchable_attrs,
|
2019-06-17 16:01:31 +02:00
|
|
|
filter: Some(function),
|
2019-08-28 13:23:03 +02:00
|
|
|
fetch_timeout: self.fetch_timeout,
|
2018-12-29 20:16:29 +01:00
|
|
|
}
|
2018-11-27 19:11:33 +01:00
|
|
|
}
|
|
|
|
|
2019-08-28 13:23:03 +02:00
|
|
|
pub fn with_fetch_timeout(self, timeout: Duration) -> QueryBuilder<'c, S, FI> {
|
|
|
|
QueryBuilder { fetch_timeout: Some(timeout), ..self }
|
|
|
|
}
|
|
|
|
|
2019-05-07 12:11:22 +02:00
|
|
|
pub fn with_distinct<F, K>(self, function: F, size: usize) -> DistinctQueryBuilder<'c, S, FI, F>
|
2019-02-24 11:58:22 +01:00
|
|
|
where F: Fn(DocumentId) -> Option<K>,
|
2018-12-29 20:16:29 +01:00
|
|
|
K: Hash + Eq,
|
|
|
|
{
|
2019-05-22 11:00:58 +02:00
|
|
|
DistinctQueryBuilder { inner: self, function, size }
|
2018-10-10 16:57:21 +02:00
|
|
|
}
|
|
|
|
|
2019-03-05 16:34:29 +01:00
|
|
|
pub fn add_searchable_attribute(&mut self, attribute: u16) {
|
2019-07-01 14:34:06 +02:00
|
|
|
let reorders = self.searchable_attrs.get_or_insert_with(ReorderedAttrs::new);
|
2019-06-24 17:29:14 +02:00
|
|
|
reorders.insert_attribute(attribute);
|
2019-03-05 16:34:29 +01:00
|
|
|
}
|
2019-04-18 14:11:00 +02:00
|
|
|
}
|
2019-03-05 16:34:29 +01:00
|
|
|
|
2019-08-02 12:07:23 +02:00
|
|
|
fn multiword_rewrite_matches(
|
|
|
|
mut matches: Vec<(DocumentId, TmpMatch)>,
|
|
|
|
query_enhancer: &QueryEnhancer,
|
2019-08-28 13:23:03 +02:00
|
|
|
timeout: Option<Duration>,
|
2019-08-02 12:07:23 +02:00
|
|
|
) -> SetBuf<(DocumentId, TmpMatch)>
|
|
|
|
{
|
|
|
|
let mut padded_matches = Vec::with_capacity(matches.len());
|
|
|
|
|
|
|
|
// we sort the matches by word index to make them rewritable
|
|
|
|
let start = Instant::now();
|
|
|
|
matches.par_sort_unstable_by_key(|(id, match_)| (*id, match_.attribute, match_.word_index));
|
|
|
|
info!("rewrite sort by word_index took {:.2?}", start.elapsed());
|
|
|
|
|
|
|
|
let start = Instant::now();
|
|
|
|
// for each attribute of each document
|
|
|
|
for same_document_attribute in matches.linear_group_by_key(|(id, m)| (*id, m.attribute)) {
|
|
|
|
|
|
|
|
// padding will only be applied
|
|
|
|
// to word indices in the same attribute
|
|
|
|
let mut padding = 0;
|
|
|
|
let mut iter = same_document_attribute.linear_group_by_key(|(_, m)| m.word_index);
|
|
|
|
|
|
|
|
// for each match at the same position
|
|
|
|
// in this document attribute
|
|
|
|
while let Some(same_word_index) = iter.next() {
|
|
|
|
|
|
|
|
// find the biggest padding
|
|
|
|
let mut biggest = 0;
|
|
|
|
for (id, match_) in same_word_index {
|
|
|
|
|
|
|
|
let mut replacement = query_enhancer.replacement(match_.query_index);
|
|
|
|
let replacement_len = replacement.len();
|
|
|
|
let nexts = iter.remainder().linear_group_by_key(|(_, m)| m.word_index);
|
|
|
|
|
|
|
|
if let Some(query_index) = replacement.next() {
|
|
|
|
let word_index = match_.word_index + padding as u16;
|
|
|
|
let match_ = TmpMatch { query_index, word_index, ..match_.clone() };
|
|
|
|
padded_matches.push((*id, match_));
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut found = false;
|
|
|
|
|
|
|
|
// look ahead and if there already is a match
|
|
|
|
// corresponding to this padding word, abort the padding
|
|
|
|
'padding: for (x, next_group) in nexts.enumerate() {
|
|
|
|
|
|
|
|
for (i, query_index) in replacement.clone().enumerate().skip(x) {
|
|
|
|
let word_index = match_.word_index + padding as u16 + (i + 1) as u16;
|
|
|
|
let padmatch = TmpMatch { query_index, word_index, ..match_.clone() };
|
|
|
|
|
|
|
|
for (_, nmatch_) in next_group {
|
|
|
|
let mut rep = query_enhancer.replacement(nmatch_.query_index);
|
|
|
|
let query_index = rep.next().unwrap();
|
|
|
|
if query_index == padmatch.query_index {
|
|
|
|
|
|
|
|
if !found {
|
|
|
|
// if we find a corresponding padding for the
|
|
|
|
// first time we must push preceding paddings
|
|
|
|
for (i, query_index) in replacement.clone().enumerate().take(i) {
|
|
|
|
let word_index = match_.word_index + padding as u16 + (i + 1) as u16;
|
|
|
|
let match_ = TmpMatch { query_index, word_index, ..match_.clone() };
|
|
|
|
padded_matches.push((*id, match_));
|
|
|
|
biggest = biggest.max(i + 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
padded_matches.push((*id, padmatch));
|
|
|
|
found = true;
|
|
|
|
continue 'padding;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// if we do not find a corresponding padding in the
|
|
|
|
// next groups so stop here and pad what was found
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
if !found {
|
|
|
|
// if no padding was found in the following matches
|
|
|
|
// we must insert the entire padding
|
|
|
|
for (i, query_index) in replacement.enumerate() {
|
|
|
|
let word_index = match_.word_index + padding as u16 + (i + 1) as u16;
|
|
|
|
let match_ = TmpMatch { query_index, word_index, ..match_.clone() };
|
|
|
|
padded_matches.push((*id, match_));
|
|
|
|
}
|
|
|
|
|
|
|
|
biggest = biggest.max(replacement_len - 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
padding += biggest;
|
|
|
|
}
|
2019-09-01 18:52:26 +02:00
|
|
|
|
|
|
|
// check the timeout *after* having processed at least one element
|
|
|
|
if timeout.map_or(false, |timeout| start.elapsed() > timeout) { break }
|
2019-08-02 12:07:23 +02:00
|
|
|
}
|
|
|
|
info!("main multiword rewrite took {:.2?}", start.elapsed());
|
|
|
|
|
|
|
|
let start = Instant::now();
|
|
|
|
for document_matches in padded_matches.linear_group_by_key_mut(|(id, _)| *id) {
|
|
|
|
document_matches.sort_unstable();
|
|
|
|
}
|
|
|
|
info!("final rewrite sort took {:.2?}", start.elapsed());
|
|
|
|
|
|
|
|
SetBuf::new_unchecked(padded_matches)
|
|
|
|
}
|
|
|
|
|
2019-05-07 12:11:22 +02:00
|
|
|
impl<'c, S, FI> QueryBuilder<'c, S, FI>
|
2019-08-19 11:10:54 +02:00
|
|
|
where S: Store + Sync,
|
2019-08-16 16:35:19 +02:00
|
|
|
S::Error: Send,
|
2019-04-18 14:11:00 +02:00
|
|
|
{
|
2019-05-02 12:10:54 +02:00
|
|
|
fn query_all(&self, query: &str) -> Result<Vec<RawDocument>, S::Error> {
|
2019-07-07 20:27:37 +02:00
|
|
|
let (automatons, query_enhancer) = generate_automatons(query, &self.store)?;
|
2019-08-19 11:10:54 +02:00
|
|
|
let searchables = self.searchable_attrs.as_ref();
|
|
|
|
let store = &self.store;
|
2019-08-28 13:23:03 +02:00
|
|
|
let fetch_timeout = &self.fetch_timeout;
|
2018-11-28 17:12:24 +01:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let mut matches = Vec::new();
|
|
|
|
let mut highlights = Vec::new();
|
2019-08-19 11:10:54 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let timeout = fetch_timeout.map(|d| d * 75 / 100);
|
|
|
|
let start = Instant::now();
|
2019-08-19 11:10:54 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let results: Vec<_> = automatons
|
|
|
|
.into_iter()
|
|
|
|
.par_bridge()
|
|
|
|
.map_with((store, searchables), |(store, searchables), automaton| {
|
|
|
|
let Automaton { index, is_exact, query_len, .. } = automaton;
|
|
|
|
let dfa = automaton.dfa();
|
2019-08-19 11:10:54 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let words = match store.words() {
|
|
|
|
Ok(words) => words,
|
|
|
|
Err(err) => return Some(Err(err)),
|
|
|
|
};
|
2019-08-19 11:10:54 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let mut stream = words.search(&dfa).into_stream();
|
|
|
|
let mut matches = Vec::new();
|
|
|
|
let mut highlights = Vec::new();
|
|
|
|
|
|
|
|
while let Some(input) = stream.next() {
|
|
|
|
let distance = dfa.eval(input).to_u8();
|
|
|
|
let is_exact = is_exact && distance == 0 && input.len() == query_len;
|
|
|
|
|
|
|
|
let doc_indexes = match store.word_indexes(input) {
|
|
|
|
Ok(Some(doc_indexes)) => doc_indexes,
|
|
|
|
Ok(None) => continue,
|
|
|
|
Err(err) => return Some(Err(err)),
|
|
|
|
};
|
|
|
|
|
|
|
|
matches.reserve(doc_indexes.len());
|
|
|
|
highlights.reserve(doc_indexes.len());
|
|
|
|
|
|
|
|
for di in doc_indexes.as_slice() {
|
|
|
|
let attribute = searchables.map_or(Some(di.attribute), |r| r.get(di.attribute));
|
|
|
|
if let Some(attribute) = attribute {
|
|
|
|
let match_ = TmpMatch {
|
|
|
|
query_index: index as u32,
|
|
|
|
distance,
|
|
|
|
attribute,
|
|
|
|
word_index: di.word_index,
|
|
|
|
is_exact,
|
|
|
|
};
|
2019-08-19 11:10:54 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let highlight = Highlight {
|
|
|
|
attribute: di.attribute,
|
|
|
|
char_index: di.char_index,
|
|
|
|
char_length: di.char_length,
|
2019-08-19 11:10:54 +02:00
|
|
|
};
|
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
matches.push((di.document_id, match_));
|
|
|
|
highlights.push((di.document_id, highlight));
|
2019-08-16 16:35:19 +02:00
|
|
|
}
|
2019-09-01 18:52:26 +02:00
|
|
|
}
|
2019-08-16 16:35:19 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
// check the timeout *after* having processed at least one element
|
|
|
|
if timeout.map_or(false, |timeout| start.elapsed() > timeout) { break }
|
2019-08-19 11:10:54 +02:00
|
|
|
}
|
2019-08-16 16:35:19 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
Some(Ok((matches, highlights)))
|
|
|
|
})
|
|
|
|
.while_some()
|
|
|
|
.collect();
|
2018-10-10 16:57:21 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
for result in results {
|
|
|
|
let (mut rcv_matches, mut rcv_highlights) = result?;
|
|
|
|
matches.append(&mut rcv_matches);
|
|
|
|
highlights.append(&mut rcv_highlights);
|
|
|
|
}
|
2019-08-16 16:35:19 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
info!("main query all took {:.2?}", start.elapsed());
|
|
|
|
info!("{} total matches to rewrite", matches.len());
|
2019-07-12 16:05:15 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let start = Instant::now();
|
|
|
|
let timeout = fetch_timeout.map(|d| d * 25 / 100);
|
|
|
|
let matches = multiword_rewrite_matches(matches, &query_enhancer, timeout);
|
|
|
|
info!("multiword rewrite took {:.2?}", start.elapsed());
|
2019-07-15 19:34:53 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let start = Instant::now();
|
|
|
|
let highlights = {
|
|
|
|
highlights.par_sort_unstable_by_key(|(id, _)| *id);
|
|
|
|
SetBuf::new_unchecked(highlights)
|
|
|
|
};
|
|
|
|
info!("sorting highlights took {:.2?}", start.elapsed());
|
2019-08-02 12:07:23 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
info!("{} total matches to classify", matches.len());
|
2019-07-15 19:34:53 +02:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
let start = Instant::now();
|
|
|
|
let raw_documents = raw_documents_from(matches, highlights);
|
|
|
|
info!("making raw documents took {:.2?}", start.elapsed());
|
2019-01-06 15:01:44 +01:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
info!("{} total documents to classify", raw_documents.len());
|
2019-02-02 14:22:31 +01:00
|
|
|
|
2019-09-01 18:52:26 +02:00
|
|
|
Ok(raw_documents)
|
2018-10-17 13:35:34 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-07 12:11:22 +02:00
|
|
|
impl<'c, S, FI> QueryBuilder<'c, S, FI>
|
2019-08-19 11:10:54 +02:00
|
|
|
where S: Store + Sync,
|
2019-08-16 16:35:19 +02:00
|
|
|
S::Error: Send,
|
2019-04-18 14:11:00 +02:00
|
|
|
FI: Fn(DocumentId) -> bool,
|
2018-10-17 13:35:34 +02:00
|
|
|
{
|
2019-05-02 12:10:54 +02:00
|
|
|
pub fn query(self, query: &str, range: Range<usize>) -> Result<Vec<Document>, S::Error> {
|
2019-02-03 11:12:34 +01:00
|
|
|
// We delegate the filter work to the distinct query builder,
|
2018-12-29 20:16:59 +01:00
|
|
|
// specifying a distinct rule that has no effect.
|
|
|
|
if self.filter.is_some() {
|
2019-02-24 11:58:22 +01:00
|
|
|
let builder = self.with_distinct(|_| None as Option<()>, 1);
|
2018-12-29 20:16:59 +01:00
|
|
|
return builder.query(query, range);
|
|
|
|
}
|
|
|
|
|
2019-02-16 20:44:16 +01:00
|
|
|
let start = Instant::now();
|
2019-05-02 12:10:54 +02:00
|
|
|
let mut documents = self.query_all(query)?;
|
2019-02-16 20:44:16 +01:00
|
|
|
info!("query_all took {:.2?}", start.elapsed());
|
2019-01-14 21:18:46 +01:00
|
|
|
|
2018-10-17 13:35:34 +02:00
|
|
|
let mut groups = vec![documents.as_mut_slice()];
|
|
|
|
|
2019-05-20 11:18:59 +02:00
|
|
|
'criteria: for criterion in self.criteria.as_ref() {
|
2018-10-17 13:35:34 +02:00
|
|
|
let tmp_groups = mem::replace(&mut groups, Vec::new());
|
2018-12-16 14:21:06 +01:00
|
|
|
let mut documents_seen = 0;
|
|
|
|
|
|
|
|
for group in tmp_groups {
|
|
|
|
// if this group does not overlap with the requested range,
|
|
|
|
// push it without sorting and splitting it
|
|
|
|
if documents_seen + group.len() < range.start {
|
|
|
|
documents_seen += group.len();
|
|
|
|
groups.push(group);
|
|
|
|
continue;
|
|
|
|
}
|
2018-10-17 13:35:34 +02:00
|
|
|
|
2019-02-16 20:44:16 +01:00
|
|
|
let start = Instant::now();
|
|
|
|
group.par_sort_unstable_by(|a, b| criterion.evaluate(a, b));
|
2019-05-20 11:18:59 +02:00
|
|
|
info!("criterion {} sort took {:.2?}", criterion.name(), start.elapsed());
|
2018-12-16 14:21:06 +01:00
|
|
|
|
2019-02-02 14:23:18 +01:00
|
|
|
for group in group.binary_group_by_mut(|a, b| criterion.eq(a, b)) {
|
2019-05-20 11:18:59 +02:00
|
|
|
info!("criterion {} produced a group of size {}", criterion.name(), group.len());
|
|
|
|
|
2018-12-16 14:21:06 +01:00
|
|
|
documents_seen += group.len();
|
2018-10-17 13:35:34 +02:00
|
|
|
groups.push(group);
|
2018-12-16 14:21:06 +01:00
|
|
|
|
|
|
|
// we have sort enough documents if the last document sorted is after
|
|
|
|
// the end of the requested range, we can continue to the next criterion
|
|
|
|
if documents_seen >= range.end { continue 'criteria }
|
2018-10-17 13:35:34 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-16 14:21:06 +01:00
|
|
|
let offset = cmp::min(documents.len(), range.start);
|
2019-02-02 14:22:31 +01:00
|
|
|
let iter = documents.into_iter().skip(offset).take(range.len());
|
2019-06-25 12:27:15 +02:00
|
|
|
Ok(iter.map(|d| Document::from_raw(d)).collect())
|
2018-10-17 13:35:34 +02:00
|
|
|
}
|
2018-11-27 19:11:33 +01:00
|
|
|
}
|
2018-10-11 14:04:41 +02:00
|
|
|
|
2019-04-18 14:11:00 +02:00
|
|
|
pub struct DistinctQueryBuilder<'c, I, FI, FD> {
|
|
|
|
inner: QueryBuilder<'c, I, FI>,
|
2018-12-29 20:16:29 +01:00
|
|
|
function: FD,
|
2018-11-27 19:11:33 +01:00
|
|
|
size: usize,
|
|
|
|
}
|
2018-10-17 13:35:34 +02:00
|
|
|
|
2019-04-18 14:11:00 +02:00
|
|
|
impl<'c, I, FI, FD> DistinctQueryBuilder<'c, I, FI, FD>
|
2018-12-29 20:16:29 +01:00
|
|
|
{
|
2019-04-18 14:11:00 +02:00
|
|
|
pub fn with_filter<F>(self, function: F) -> DistinctQueryBuilder<'c, I, F, FD>
|
2019-02-24 11:58:22 +01:00
|
|
|
where F: Fn(DocumentId) -> bool,
|
2018-12-29 20:16:29 +01:00
|
|
|
{
|
|
|
|
DistinctQueryBuilder {
|
|
|
|
inner: self.inner.with_filter(function),
|
|
|
|
function: self.function,
|
2019-08-28 13:23:03 +02:00
|
|
|
size: self.size,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn with_fetch_timeout(self, timeout: Duration) -> DistinctQueryBuilder<'c, I, FI, FD> {
|
|
|
|
DistinctQueryBuilder {
|
|
|
|
inner: self.inner.with_fetch_timeout(timeout),
|
|
|
|
function: self.function,
|
|
|
|
size: self.size,
|
2018-12-29 20:16:29 +01:00
|
|
|
}
|
|
|
|
}
|
2019-03-05 16:34:29 +01:00
|
|
|
|
|
|
|
pub fn add_searchable_attribute(&mut self, attribute: u16) {
|
|
|
|
self.inner.add_searchable_attribute(attribute);
|
|
|
|
}
|
2018-12-29 20:16:29 +01:00
|
|
|
}
|
|
|
|
|
2019-05-07 12:11:22 +02:00
|
|
|
impl<'c, S, FI, FD, K> DistinctQueryBuilder<'c, S, FI, FD>
|
2019-08-19 11:10:54 +02:00
|
|
|
where S: Store + Sync,
|
2019-08-16 16:35:19 +02:00
|
|
|
S::Error: Send,
|
2019-04-18 14:11:00 +02:00
|
|
|
FI: Fn(DocumentId) -> bool,
|
2019-02-24 11:58:22 +01:00
|
|
|
FD: Fn(DocumentId) -> Option<K>,
|
2018-11-28 17:12:24 +01:00
|
|
|
K: Hash + Eq,
|
2018-11-27 19:11:33 +01:00
|
|
|
{
|
2019-05-02 12:10:54 +02:00
|
|
|
pub fn query(self, query: &str, range: Range<usize>) -> Result<Vec<Document>, S::Error> {
|
2019-02-16 20:44:16 +01:00
|
|
|
let start = Instant::now();
|
2019-05-02 12:10:54 +02:00
|
|
|
let mut documents = self.inner.query_all(query)?;
|
2019-02-16 20:44:16 +01:00
|
|
|
info!("query_all took {:.2?}", start.elapsed());
|
2019-02-02 14:22:31 +01:00
|
|
|
|
2018-11-28 17:12:24 +01:00
|
|
|
let mut groups = vec![documents.as_mut_slice()];
|
2018-12-16 14:22:04 +01:00
|
|
|
let mut key_cache = HashMap::new();
|
2018-10-17 13:35:34 +02:00
|
|
|
|
2018-12-29 20:16:48 +01:00
|
|
|
let mut filter_map = HashMap::new();
|
2018-12-16 14:22:04 +01:00
|
|
|
// these two variables informs on the current distinct map and
|
|
|
|
// on the raw offset of the start of the group where the
|
|
|
|
// range.start bound is located according to the distinct function
|
|
|
|
let mut distinct_map = DistinctMap::new(self.size);
|
|
|
|
let mut distinct_raw_offset = 0;
|
|
|
|
|
2019-05-20 11:18:59 +02:00
|
|
|
'criteria: for criterion in self.inner.criteria.as_ref() {
|
2018-11-28 17:12:24 +01:00
|
|
|
let tmp_groups = mem::replace(&mut groups, Vec::new());
|
2018-12-16 14:22:04 +01:00
|
|
|
let mut buf_distinct = BufferedDistinctMap::new(&mut distinct_map);
|
|
|
|
let mut documents_seen = 0;
|
|
|
|
|
|
|
|
for group in tmp_groups {
|
|
|
|
// if this group does not overlap with the requested range,
|
|
|
|
// push it without sorting and splitting it
|
|
|
|
if documents_seen + group.len() < distinct_raw_offset {
|
|
|
|
documents_seen += group.len();
|
|
|
|
groups.push(group);
|
|
|
|
continue;
|
|
|
|
}
|
2018-10-17 13:35:34 +02:00
|
|
|
|
2019-02-16 20:44:16 +01:00
|
|
|
let start = Instant::now();
|
|
|
|
group.par_sort_unstable_by(|a, b| criterion.evaluate(a, b));
|
2019-05-20 11:18:59 +02:00
|
|
|
info!("criterion {} sort took {:.2?}", criterion.name(), start.elapsed());
|
2018-12-16 14:22:04 +01:00
|
|
|
|
2019-02-02 14:22:31 +01:00
|
|
|
for group in group.binary_group_by_mut(|a, b| criterion.eq(a, b)) {
|
2018-12-16 14:22:04 +01:00
|
|
|
// we must compute the real distinguished len of this sub-group
|
2018-12-13 11:54:09 +01:00
|
|
|
for document in group.iter() {
|
2018-12-29 20:16:48 +01:00
|
|
|
let filter_accepted = match &self.inner.filter {
|
|
|
|
Some(filter) => {
|
|
|
|
let entry = filter_map.entry(document.id);
|
2019-02-24 11:58:22 +01:00
|
|
|
*entry.or_insert_with(|| (filter)(document.id))
|
2018-12-29 20:16:48 +01:00
|
|
|
},
|
2019-01-02 15:07:46 +01:00
|
|
|
None => true,
|
2018-12-13 11:54:09 +01:00
|
|
|
};
|
2018-12-16 14:22:04 +01:00
|
|
|
|
2018-12-29 20:16:48 +01:00
|
|
|
if filter_accepted {
|
|
|
|
let entry = key_cache.entry(document.id);
|
2019-02-24 11:58:22 +01:00
|
|
|
let key = entry.or_insert_with(|| (self.function)(document.id).map(Rc::new));
|
2018-12-29 20:16:48 +01:00
|
|
|
|
|
|
|
match key.clone() {
|
|
|
|
Some(key) => buf_distinct.register(key),
|
|
|
|
None => buf_distinct.register_without_key(),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2018-12-16 14:22:04 +01:00
|
|
|
// the requested range end is reached: stop computing distinct
|
|
|
|
if buf_distinct.len() >= range.end { break }
|
2018-12-13 11:54:09 +01:00
|
|
|
}
|
2018-12-16 14:22:04 +01:00
|
|
|
|
2019-05-20 11:18:59 +02:00
|
|
|
info!("criterion {} produced a group of size {}", criterion.name(), group.len());
|
|
|
|
|
2018-12-16 14:22:04 +01:00
|
|
|
documents_seen += group.len();
|
2018-11-28 17:12:24 +01:00
|
|
|
groups.push(group);
|
2018-12-16 14:22:04 +01:00
|
|
|
|
|
|
|
// if this sub-group does not overlap with the requested range
|
|
|
|
// we must update the distinct map and its start index
|
|
|
|
if buf_distinct.len() < range.start {
|
|
|
|
buf_distinct.transfert_to_internal();
|
|
|
|
distinct_raw_offset = documents_seen;
|
|
|
|
}
|
|
|
|
|
|
|
|
// we have sort enough documents if the last document sorted is after
|
|
|
|
// the end of the requested range, we can continue to the next criterion
|
|
|
|
if buf_distinct.len() >= range.end { continue 'criteria }
|
2018-11-28 17:12:24 +01:00
|
|
|
}
|
|
|
|
}
|
2018-10-18 15:08:04 +02:00
|
|
|
}
|
|
|
|
|
2018-12-16 14:22:04 +01:00
|
|
|
let mut out_documents = Vec::with_capacity(range.len());
|
|
|
|
let mut seen = BufferedDistinctMap::new(&mut distinct_map);
|
|
|
|
|
|
|
|
for document in documents.into_iter().skip(distinct_raw_offset) {
|
2018-12-29 20:16:48 +01:00
|
|
|
let filter_accepted = match &self.inner.filter {
|
|
|
|
Some(_) => filter_map.remove(&document.id).expect("BUG: filtered not found"),
|
|
|
|
None => true,
|
2018-11-28 17:12:24 +01:00
|
|
|
};
|
2018-10-17 13:35:34 +02:00
|
|
|
|
2018-12-29 20:16:48 +01:00
|
|
|
if filter_accepted {
|
|
|
|
let key = key_cache.remove(&document.id).expect("BUG: cached key not found");
|
|
|
|
let distinct_accepted = match key {
|
|
|
|
Some(key) => seen.register(key),
|
|
|
|
None => seen.register_without_key(),
|
|
|
|
};
|
|
|
|
|
|
|
|
if distinct_accepted && seen.len() > range.start {
|
2019-06-25 12:27:15 +02:00
|
|
|
out_documents.push(Document::from_raw(document));
|
2018-12-29 20:16:48 +01:00
|
|
|
if out_documents.len() == range.len() { break }
|
|
|
|
}
|
2018-11-28 17:12:24 +01:00
|
|
|
}
|
2018-10-17 13:35:34 +02:00
|
|
|
}
|
|
|
|
|
2019-05-02 12:10:54 +02:00
|
|
|
Ok(out_documents)
|
2018-10-11 14:04:41 +02:00
|
|
|
}
|
2018-10-10 16:57:21 +02:00
|
|
|
}
|
2019-06-13 15:47:49 +02:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
use std::collections::{BTreeSet, HashMap};
|
|
|
|
use std::iter::FromIterator;
|
|
|
|
|
|
|
|
use sdset::SetBuf;
|
2019-06-13 16:20:01 +02:00
|
|
|
use fst::{Set, IntoStreamer};
|
2019-06-13 15:47:49 +02:00
|
|
|
|
|
|
|
use crate::DocIndex;
|
|
|
|
use crate::store::Store;
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
struct InMemorySetStore {
|
|
|
|
set: Set,
|
2019-06-13 16:20:01 +02:00
|
|
|
synonyms: Set,
|
2019-06-13 15:47:49 +02:00
|
|
|
indexes: HashMap<Vec<u8>, SetBuf<DocIndex>>,
|
2019-06-13 16:20:01 +02:00
|
|
|
alternatives: HashMap<Vec<u8>, Set>,
|
2019-06-13 15:47:49 +02:00
|
|
|
}
|
|
|
|
|
2019-06-13 16:20:01 +02:00
|
|
|
fn set_from_stream<'f, I, S>(stream: I) -> Set
|
|
|
|
where
|
|
|
|
I: for<'a> fst::IntoStreamer<'a, Into=S, Item=&'a [u8]>,
|
|
|
|
S: 'f + for<'a> fst::Streamer<'a, Item=&'a [u8]>,
|
|
|
|
{
|
|
|
|
let mut builder = fst::SetBuilder::memory();
|
2019-06-13 16:44:09 +02:00
|
|
|
builder.extend_stream(stream).unwrap();
|
2019-06-13 16:20:01 +02:00
|
|
|
builder.into_inner().and_then(Set::from_bytes).unwrap()
|
|
|
|
}
|
2019-06-13 15:47:49 +02:00
|
|
|
|
2019-06-13 16:20:01 +02:00
|
|
|
fn insert_key(set: &Set, key: &[u8]) -> Set {
|
|
|
|
let unique_key = {
|
|
|
|
let mut builder = fst::SetBuilder::memory();
|
2019-06-13 16:44:09 +02:00
|
|
|
builder.insert(key).unwrap();
|
2019-06-13 16:20:01 +02:00
|
|
|
builder.into_inner().and_then(Set::from_bytes).unwrap()
|
|
|
|
};
|
2019-06-13 15:47:49 +02:00
|
|
|
|
2019-06-13 16:20:01 +02:00
|
|
|
let union_ = set.op().add(unique_key.into_stream()).r#union();
|
|
|
|
|
|
|
|
set_from_stream(union_)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn sdset_into_fstset(set: &sdset::Set<&str>) -> Set {
|
|
|
|
let mut builder = fst::SetBuilder::memory();
|
2019-06-20 16:25:14 +02:00
|
|
|
let set = SetBuf::from_dirty(set.into_iter().map(|s| normalize_str(s)).collect());
|
2019-06-13 16:44:09 +02:00
|
|
|
builder.extend_iter(set.into_iter()).unwrap();
|
2019-06-13 16:20:01 +02:00
|
|
|
builder.into_inner().and_then(Set::from_bytes).unwrap()
|
|
|
|
}
|
|
|
|
|
|
|
|
impl InMemorySetStore {
|
|
|
|
pub fn add_synonym(&mut self, word: &str, new: SetBuf<&str>) {
|
2019-06-17 10:28:43 +02:00
|
|
|
let word = word.to_lowercase();
|
2019-06-13 16:20:01 +02:00
|
|
|
let alternatives = self.alternatives.entry(word.as_bytes().to_vec()).or_default();
|
|
|
|
let new = sdset_into_fstset(&new);
|
|
|
|
*alternatives = set_from_stream(alternatives.op().add(new.into_stream()).r#union());
|
|
|
|
|
|
|
|
self.synonyms = insert_key(&self.synonyms, word.as_bytes());
|
2019-06-13 15:47:49 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-17 11:31:10 +02:00
|
|
|
impl<'a> FromIterator<(&'a str, &'a [DocIndex])> for InMemorySetStore {
|
|
|
|
fn from_iter<I: IntoIterator<Item=(&'a str, &'a [DocIndex])>>(iter: I) -> Self {
|
2019-06-13 15:47:49 +02:00
|
|
|
let mut tree = BTreeSet::new();
|
|
|
|
let mut map = HashMap::new();
|
|
|
|
|
|
|
|
for (word, indexes) in iter {
|
2019-06-17 11:31:10 +02:00
|
|
|
let word = word.to_lowercase().into_bytes();
|
|
|
|
tree.insert(word.clone());
|
|
|
|
map.entry(word).or_insert_with(Vec::new).extend_from_slice(indexes);
|
2019-06-13 15:47:49 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
InMemorySetStore {
|
|
|
|
set: Set::from_iter(tree).unwrap(),
|
2019-06-13 16:20:01 +02:00
|
|
|
synonyms: Set::default(),
|
2019-06-17 11:31:10 +02:00
|
|
|
indexes: map.into_iter().map(|(k, v)| (k, SetBuf::from_dirty(v))).collect(),
|
2019-06-13 16:20:01 +02:00
|
|
|
alternatives: HashMap::new(),
|
2019-06-13 15:47:49 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-13 16:20:01 +02:00
|
|
|
impl Store for InMemorySetStore {
|
|
|
|
type Error = std::io::Error;
|
|
|
|
|
|
|
|
fn words(&self) -> Result<&Set, Self::Error> {
|
|
|
|
Ok(&self.set)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn word_indexes(&self, word: &[u8]) -> Result<Option<SetBuf<DocIndex>>, Self::Error> {
|
|
|
|
Ok(self.indexes.get(word).cloned())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn synonyms(&self) -> Result<&Set, Self::Error> {
|
|
|
|
Ok(&self.synonyms)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn alternatives_to(&self, word: &[u8]) -> Result<Option<Set>, Self::Error> {
|
|
|
|
Ok(self.alternatives.get(word).map(|s| Set::from_bytes(s.as_fst().to_vec()).unwrap()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-13 15:47:49 +02:00
|
|
|
const fn doc_index(document_id: u64, word_index: u16) -> DocIndex {
|
|
|
|
DocIndex {
|
|
|
|
document_id: DocumentId(document_id),
|
|
|
|
attribute: 0,
|
|
|
|
word_index,
|
|
|
|
char_index: 0,
|
|
|
|
char_length: 0,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-17 16:01:31 +02:00
|
|
|
const fn doc_char_index(document_id: u64, word_index: u16, char_index: u16) -> DocIndex {
|
|
|
|
DocIndex {
|
|
|
|
document_id: DocumentId(document_id),
|
|
|
|
attribute: 0,
|
|
|
|
word_index,
|
|
|
|
char_index,
|
|
|
|
char_length: 0,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-27 15:23:19 +02:00
|
|
|
#[test]
|
|
|
|
fn simple() {
|
|
|
|
let store = InMemorySetStore::from_iter(vec![
|
|
|
|
("iphone", &[doc_char_index(0, 0, 0)][..]),
|
|
|
|
("from", &[doc_char_index(0, 1, 1)][..]),
|
|
|
|
("apple", &[doc_char_index(0, 2, 2)][..]),
|
|
|
|
]);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("iphone from apple", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:23:19 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, .. }));
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, .. }));
|
2019-06-27 15:23:19 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
2019-06-13 15:47:49 +02:00
|
|
|
#[test]
|
2019-06-13 16:38:37 +02:00
|
|
|
fn simple_synonyms() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
2019-06-17 11:31:10 +02:00
|
|
|
("hello", &[doc_index(0, 0)][..]),
|
2019-06-13 15:47:49 +02:00
|
|
|
]);
|
|
|
|
|
2019-06-13 16:38:37 +02:00
|
|
|
store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello"]));
|
|
|
|
|
2019-06-13 15:47:49 +02:00
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("hello", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("bonjour", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
2019-06-13 16:44:09 +02:00
|
|
|
#[test]
|
|
|
|
fn prefix_synonyms() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
2019-06-17 11:31:10 +02:00
|
|
|
("hello", &[doc_index(0, 0)][..]),
|
2019-06-13 16:44:09 +02:00
|
|
|
]);
|
|
|
|
|
|
|
|
store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello"]));
|
|
|
|
store.add_synonym("salut", SetBuf::from_dirty(vec!["hello"]));
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("sal", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 16:44:09 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("bonj", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 16:44:09 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("sal blabla", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("bonj blabla", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn levenshtein_synonyms() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
2019-06-17 11:31:10 +02:00
|
|
|
("hello", &[doc_index(0, 0)][..]),
|
2019-06-13 16:44:09 +02:00
|
|
|
]);
|
|
|
|
|
|
|
|
store.add_synonym("salutation", SetBuf::from_dirty(vec!["hello"]));
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("salutution", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 16:44:09 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("saluttion", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 16:44:09 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
2019-06-13 15:47:49 +02:00
|
|
|
#[test]
|
2019-06-13 16:38:37 +02:00
|
|
|
fn harder_synonyms() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
2019-06-27 15:17:15 +02:00
|
|
|
("hello", &[doc_index(0, 0)][..]),
|
|
|
|
("bonjour", &[doc_index(1, 3)]),
|
|
|
|
("salut", &[doc_index(2, 5)]),
|
2019-06-13 15:47:49 +02:00
|
|
|
]);
|
|
|
|
|
2019-06-13 16:38:37 +02:00
|
|
|
store.add_synonym("hello", SetBuf::from_dirty(vec!["bonjour", "salut"]));
|
|
|
|
store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello", "salut"]));
|
|
|
|
store.add_synonym("salut", SetBuf::from_dirty(vec!["hello", "bonjour"]));
|
|
|
|
|
2019-06-13 15:47:49 +02:00
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("hello", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 3, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 5, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("bonjour", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 3, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 5, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("salut", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 3, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
2019-06-27 15:17:15 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 5, .. }));
|
2019-06-27 15:17:15 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-13 15:47:49 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
2019-06-17 10:28:43 +02:00
|
|
|
|
|
|
|
#[test]
|
2019-06-17 16:01:31 +02:00
|
|
|
/// Unique word has multi-word synonyms
|
|
|
|
fn unique_to_multiword_synonyms() {
|
2019-06-17 10:28:43 +02:00
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
2019-06-27 15:23:19 +02:00
|
|
|
("new", &[doc_char_index(0, 0, 0)][..]),
|
|
|
|
("york", &[doc_char_index(0, 1, 1)][..]),
|
|
|
|
("city", &[doc_char_index(0, 2, 2)][..]),
|
2019-06-17 16:01:31 +02:00
|
|
|
("subway", &[doc_char_index(0, 3, 3)][..]),
|
2019-06-17 11:31:10 +02:00
|
|
|
|
2019-06-27 15:23:19 +02:00
|
|
|
("NY", &[doc_char_index(1, 0, 0)][..]),
|
2019-06-17 16:01:31 +02:00
|
|
|
("subway", &[doc_char_index(1, 1, 1)][..]),
|
2019-06-17 10:28:43 +02:00
|
|
|
]);
|
|
|
|
|
2019-06-17 16:01:31 +02:00
|
|
|
store.add_synonym("NY", SetBuf::from_dirty(vec!["NYC", "new york", "new york city"]));
|
|
|
|
store.add_synonym("NYC", SetBuf::from_dirty(vec!["NY", "new york", "new york city"]));
|
2019-06-17 10:28:43 +02:00
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NY subway", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-19 14:10:21 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NY ± new
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NY ± york
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // NY ± city
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // subway
|
2019-06-19 14:10:21 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-17 16:01:31 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(iter.next(), None); // position rewritten ^
|
2019-06-17 16:01:31 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NYC subway", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-19 14:10:21 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NYC ± new
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NYC ± york
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // NYC ± city
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // subway
|
2019-06-19 14:10:21 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-17 16:01:31 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(iter.next(), None); // position rewritten ^
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn unique_to_multiword_synonyms_words_proximity() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
|
|
|
("new", &[doc_char_index(0, 0, 0)][..]),
|
|
|
|
("york", &[doc_char_index(0, 1, 1)][..]),
|
|
|
|
("city", &[doc_char_index(0, 2, 2)][..]),
|
|
|
|
("subway", &[doc_char_index(0, 3, 3)][..]),
|
|
|
|
|
|
|
|
("york", &[doc_char_index(1, 0, 0)][..]),
|
|
|
|
("new", &[doc_char_index(1, 1, 1)][..]),
|
|
|
|
("subway", &[doc_char_index(1, 2, 2)][..]),
|
|
|
|
|
|
|
|
("NY", &[doc_char_index(2, 0, 0)][..]),
|
|
|
|
("subway", &[doc_char_index(2, 1, 1)][..]),
|
|
|
|
]);
|
|
|
|
|
|
|
|
store.add_synonym("NY", SetBuf::from_dirty(vec!["york new"]));
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NY", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. })); // NY ± york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, .. })); // NY ± new
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. })); // york = NY
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, .. })); // new = NY
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 1, .. })); // york = NY
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 0, .. })); // new = NY
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("new york", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, .. })); // york
|
|
|
|
assert_matches!(matches.next(), None); // position rewritten ^
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 1, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 0, .. })); // new
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn unique_to_multiword_synonyms_cumulative_word_index() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
|
|
|
("NY", &[doc_char_index(0, 0, 0)][..]),
|
|
|
|
("subway", &[doc_char_index(0, 1, 1)][..]),
|
|
|
|
|
|
|
|
("new", &[doc_char_index(1, 0, 0)][..]),
|
|
|
|
("york", &[doc_char_index(1, 1, 1)][..]),
|
|
|
|
("subway", &[doc_char_index(1, 2, 2)][..]),
|
|
|
|
]);
|
|
|
|
|
|
|
|
store.add_synonym("new york", SetBuf::from_dirty(vec!["NY"]));
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NY subway", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NY
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 2, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("new york subway", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new = NY
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york = NY
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-17 16:01:31 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
/// Unique word has multi-word synonyms
|
2019-07-07 19:57:42 +02:00
|
|
|
fn harder_unique_to_multiword_synonyms_one() {
|
2019-06-17 16:01:31 +02:00
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
|
|
|
("new", &[doc_char_index(0, 0, 0)][..]),
|
|
|
|
("york", &[doc_char_index(0, 1, 1)][..]),
|
|
|
|
("city", &[doc_char_index(0, 2, 2)][..]),
|
|
|
|
("yellow", &[doc_char_index(0, 3, 3)][..]),
|
|
|
|
("subway", &[doc_char_index(0, 4, 4)][..]),
|
|
|
|
("broken", &[doc_char_index(0, 5, 5)][..]),
|
|
|
|
|
|
|
|
("NY", &[doc_char_index(1, 0, 0)][..]),
|
|
|
|
("blue", &[doc_char_index(1, 1, 1)][..]),
|
|
|
|
("subway", &[doc_char_index(1, 2, 2)][..]),
|
|
|
|
]);
|
|
|
|
|
|
|
|
store.add_synonym("NY", SetBuf::from_dirty(vec!["NYC", "new york", "new york city"]));
|
|
|
|
store.add_synonym("NYC", SetBuf::from_dirty(vec!["NY", "new york", "new york city"]));
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NY subway", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-19 14:10:21 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(iter.next(), None); // position rewritten ^
|
2019-06-19 14:10:21 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-17 10:28:43 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(iter.next(), None); // position rewritten ^
|
2019-06-17 16:01:31 +02:00
|
|
|
});
|
2019-06-17 10:28:43 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NYC subway", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-19 14:10:21 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // NYC
|
|
|
|
// because one-word to one-word ^^^^
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: true, .. })); // subway
|
2019-06-19 14:10:21 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-17 16:01:31 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: true, .. })); // subway
|
2019-06-17 16:01:31 +02:00
|
|
|
assert_matches!(iter.next(), None); // position rewritten ^
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
/// Unique word has multi-word synonyms
|
|
|
|
fn even_harder_unique_to_multiword_synonyms() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
|
|
|
("new", &[doc_char_index(0, 0, 0)][..]),
|
|
|
|
("york", &[doc_char_index(0, 1, 1)][..]),
|
|
|
|
("city", &[doc_char_index(0, 2, 2)][..]),
|
|
|
|
("yellow", &[doc_char_index(0, 3, 3)][..]),
|
|
|
|
("underground", &[doc_char_index(0, 4, 4)][..]),
|
|
|
|
("train", &[doc_char_index(0, 5, 5)][..]),
|
|
|
|
("broken", &[doc_char_index(0, 6, 6)][..]),
|
|
|
|
|
|
|
|
("NY", &[doc_char_index(1, 0, 0)][..]),
|
|
|
|
("blue", &[doc_char_index(1, 1, 1)][..]),
|
|
|
|
("subway", &[doc_char_index(1, 2, 2)][..]),
|
|
|
|
]);
|
|
|
|
|
|
|
|
store.add_synonym("NY", SetBuf::from_dirty(vec!["NYC", "new york", "new york city"]));
|
|
|
|
store.add_synonym("NYC", SetBuf::from_dirty(vec!["NY", "new york", "new york city"]));
|
|
|
|
store.add_synonym("subway", SetBuf::from_dirty(vec!["underground train"]));
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NY subway broken", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-17 10:28:43 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: false, .. })); // underground = subway
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 4, word_index: 5, is_exact: false, .. })); // train = subway
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 5, word_index: 6, is_exact: true, .. })); // broken
|
|
|
|
assert_matches!(iter.next(), None); // position rewritten ^
|
2019-06-17 16:01:31 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-17 16:01:31 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city = NY
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: true, .. })); // underground = subway
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 4, word_index: 5, is_exact: true, .. })); // train = subway
|
|
|
|
assert_matches!(iter.next(), None); // position rewritten ^
|
2019-06-17 10:28:43 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
2019-06-17 16:01:31 +02:00
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NYC subway", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-19 14:10:21 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city = NYC
|
|
|
|
// because one-word to one-word ^^^^
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: true, .. })); // underground = subway
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 4, word_index: 5, is_exact: true, .. })); // train = subway
|
2019-06-19 14:10:21 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-17 16:01:31 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city = NYC
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: false, .. })); // underground = subway
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 4, word_index: 5, is_exact: false, .. })); // train = subway
|
|
|
|
assert_matches!(iter.next(), None); // position rewritten ^
|
2019-06-17 16:01:31 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
/// Multi-word has multi-word synonyms
|
|
|
|
fn multiword_to_multiword_synonyms() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
2019-06-17 18:21:10 +02:00
|
|
|
("NY", &[doc_char_index(0, 0, 0)][..]),
|
|
|
|
("subway", &[doc_char_index(0, 1, 1)][..]),
|
|
|
|
|
|
|
|
("NYC", &[doc_char_index(1, 0, 0)][..]),
|
|
|
|
("blue", &[doc_char_index(1, 1, 1)][..]),
|
|
|
|
("subway", &[doc_char_index(1, 2, 2)][..]),
|
|
|
|
("broken", &[doc_char_index(1, 3, 3)][..]),
|
2019-06-27 15:23:19 +02:00
|
|
|
|
|
|
|
("new", &[doc_char_index(2, 0, 0)][..]),
|
|
|
|
("york", &[doc_char_index(2, 1, 1)][..]),
|
|
|
|
("underground", &[doc_char_index(2, 2, 2)][..]),
|
|
|
|
("train", &[doc_char_index(2, 3, 3)][..]),
|
|
|
|
("broken", &[doc_char_index(2, 4, 4)][..]),
|
2019-06-17 16:01:31 +02:00
|
|
|
]);
|
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
store.add_synonym("new york", SetBuf::from_dirty(vec![ "NYC", "NY", "new york city" ]));
|
|
|
|
store.add_synonym("new york city", SetBuf::from_dirty(vec![ "NYC", "NY", "new york" ]));
|
|
|
|
store.add_synonym("underground train", SetBuf::from_dirty(vec![ "subway" ]));
|
2019-06-17 16:01:31 +02:00
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
2019-06-17 18:21:10 +02:00
|
|
|
let results = builder.query("new york underground train broken", 0..20).unwrap();
|
2019-06-17 16:01:31 +02:00
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
2019-07-01 14:55:47 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // underground
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 4, word_index: 4, is_exact: true, .. })); // train
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 5, word_index: 5, is_exact: true, .. })); // broken
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-27 15:23:19 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-17 18:21:10 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NYC = new
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NYC = york
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // NYC = city
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: true, .. })); // subway = underground
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 4, word_index: 5, is_exact: true, .. })); // subway = train
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 5, word_index: 6, is_exact: true, .. })); // broken
|
2019-06-17 18:21:10 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-17 16:01:31 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NY = new
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NY = york
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // NY = city
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // subway = underground
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 4, word_index: 4, is_exact: true, .. })); // subway = train
|
2019-06-17 18:21:10 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("new york city underground train broken", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
2019-07-01 14:55:47 +02:00
|
|
|
let mut matches = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // underground
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 4, word_index: 4, is_exact: true, .. })); // train
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 5, word_index: 5, is_exact: true, .. })); // broken
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
|
|
|
let mut iter = matches.into_iter();
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NYC = new
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NYC = new
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NYC = york
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NYC = york
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // NYC = city
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 4, is_exact: true, .. })); // subway = underground
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 4, word_index: 5, is_exact: true, .. })); // subway = train
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 5, word_index: 6, is_exact: true, .. })); // broken
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
|
|
|
let mut iter = matches.into_iter();
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NY = new
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // NY = new
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NY = york
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // NY = york
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // NY = city
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // subway = underground
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 4, word_index: 4, is_exact: true, .. })); // subway = train
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
2019-07-01 14:55:47 +02:00
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
#[test]
|
|
|
|
fn intercrossed_multiword_synonyms() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
|
|
|
("new", &[doc_index(0, 0)][..]),
|
|
|
|
("york", &[doc_index(0, 1)][..]),
|
|
|
|
("big", &[doc_index(0, 2)][..]),
|
|
|
|
("city", &[doc_index(0, 3)][..]),
|
|
|
|
]);
|
2019-07-01 14:55:47 +02:00
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
store.add_synonym("new york", SetBuf::from_dirty(vec![ "new york city" ]));
|
|
|
|
store.add_synonym("new york city", SetBuf::from_dirty(vec![ "new york" ]));
|
2019-07-01 14:55:47 +02:00
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("new york big ", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: false, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new
|
2019-07-01 14:55:47 +02:00
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york
|
2019-07-01 14:55:47 +02:00
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 4, is_exact: false, .. })); // city
|
2019-07-01 14:55:47 +02:00
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // big
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-27 15:23:19 +02:00
|
|
|
});
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
|
|
|
("NY", &[doc_index(0, 0)][..]),
|
|
|
|
("city", &[doc_index(0, 1)][..]),
|
|
|
|
("subway", &[doc_index(0, 2)][..]),
|
|
|
|
|
|
|
|
("NY", &[doc_index(1, 0)][..]),
|
|
|
|
("subway", &[doc_index(1, 1)][..]),
|
|
|
|
|
|
|
|
("NY", &[doc_index(2, 0)][..]),
|
|
|
|
("york", &[doc_index(2, 1)][..]),
|
|
|
|
("city", &[doc_index(2, 2)][..]),
|
|
|
|
("subway", &[doc_index(2, 3)][..]),
|
|
|
|
]);
|
|
|
|
|
|
|
|
store.add_synonym("NY", SetBuf::from_dirty(vec!["new york city story"]));
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("NY subway ", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-07-07 19:57:42 +02:00
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // story
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 4, word_index: 4, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-17 18:21:10 +02:00
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-07-07 19:57:42 +02:00
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 4, word_index: 3, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: false, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: false, .. })); // city
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 4, word_index: 3, is_exact: true, .. })); // subway
|
|
|
|
assert_matches!(matches.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn cumulative_word_indices() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
|
|
|
("NYC", &[doc_index(0, 0)][..]),
|
|
|
|
("long", &[doc_index(0, 1)][..]),
|
|
|
|
("subway", &[doc_index(0, 2)][..]),
|
|
|
|
("cool", &[doc_index(0, 3)][..]),
|
|
|
|
]);
|
|
|
|
|
|
|
|
store.add_synonym("new york city", SetBuf::from_dirty(vec!["NYC"]));
|
|
|
|
store.add_synonym("subway", SetBuf::from_dirty(vec!["underground train"]));
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("new york city long subway cool ", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
|
|
|
let mut matches = matches.into_iter();
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 0, word_index: 0, is_exact: true, .. })); // new = NYC
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 1, word_index: 1, is_exact: true, .. })); // york = NYC
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 2, word_index: 2, is_exact: true, .. })); // city = NYC
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 3, word_index: 3, is_exact: true, .. })); // long
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 4, word_index: 4, is_exact: true, .. })); // subway = underground
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 5, word_index: 5, is_exact: true, .. })); // subway = train
|
|
|
|
assert_matches!(matches.next(), Some(TmpMatch { query_index: 6, word_index: 6, is_exact: true, .. })); // cool
|
|
|
|
assert_matches!(matches.next(), None);
|
2019-06-17 16:01:31 +02:00
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
2019-06-17 10:28:43 +02:00
|
|
|
}
|
2019-06-20 16:25:14 +02:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn deunicoded_synonyms() {
|
|
|
|
let mut store = InMemorySetStore::from_iter(vec![
|
2019-07-07 19:57:42 +02:00
|
|
|
("telephone", &[doc_index(0, 0)][..]), // meilidb-data indexes the unidecoded
|
|
|
|
("téléphone", &[doc_index(0, 0)][..]), // and the original words with the same DocIndex
|
|
|
|
|
|
|
|
("iphone", &[doc_index(1, 0)][..]),
|
2019-06-20 16:25:14 +02:00
|
|
|
]);
|
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
store.add_synonym("téléphone", SetBuf::from_dirty(vec!["iphone"]));
|
2019-06-20 16:25:14 +02:00
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("telephone", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-20 16:25:14 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, .. }));
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, .. }));
|
2019-06-20 16:25:14 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-20 16:25:14 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, .. }));
|
2019-06-20 16:25:14 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("téléphone", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-20 16:25:14 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, .. }));
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, .. }));
|
2019-06-20 16:25:14 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-20 16:25:14 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, .. }));
|
2019-06-20 16:25:14 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("télephone", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
2019-06-20 16:25:14 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, .. }));
|
2019-06-20 16:25:14 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-20 16:25:14 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, distance: 1, word_index: 0, is_exact: false, .. })); // iphone
|
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, distance: 1, word_index: 0, is_exact: false, .. })); // téléphone
|
2019-06-26 16:28:51 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn simple_concatenation() {
|
|
|
|
let store = InMemorySetStore::from_iter(vec![
|
|
|
|
("iphone", &[doc_index(0, 0)][..]),
|
|
|
|
("case", &[doc_index(0, 1)][..]),
|
|
|
|
]);
|
|
|
|
|
|
|
|
let builder = QueryBuilder::new(&store);
|
|
|
|
let results = builder.query("i phone case", 0..20).unwrap();
|
|
|
|
let mut iter = results.into_iter();
|
|
|
|
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
2019-06-26 16:28:51 +02:00
|
|
|
let mut iter = matches.into_iter();
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 0, word_index: 0, distance: 0, .. })); // iphone
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 1, distance: 0, .. })); // iphone
|
2019-07-01 14:55:47 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 1, word_index: 0, distance: 1, .. })); // phone
|
2019-07-07 19:57:42 +02:00
|
|
|
assert_matches!(iter.next(), Some(TmpMatch { query_index: 2, word_index: 2, distance: 0, .. })); // case
|
2019-06-20 16:25:14 +02:00
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
});
|
|
|
|
assert_matches!(iter.next(), None);
|
|
|
|
}
|
2019-06-13 15:47:49 +02:00
|
|
|
}
|