mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-23 13:10:06 +01:00
Merge pull request #771 from MarinPostma/placeholder-search
Placeholder search
This commit is contained in:
commit
2f6c55ef78
@ -1,3 +1,7 @@
|
|||||||
|
## v0.13.0 (unreleased)
|
||||||
|
|
||||||
|
- placeholder search (#771)
|
||||||
|
|
||||||
## v0.12.0
|
## v0.12.0
|
||||||
|
|
||||||
- Fix long documents not being indexed completely bug (#816)
|
- Fix long documents not being indexed completely bug (#816)
|
||||||
|
@ -368,7 +368,7 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = builder.query(ref_reader, &query, 0..command.number_results)?;
|
let result = builder.query(ref_reader, Some(&query), 0..command.number_results)?;
|
||||||
|
|
||||||
let mut retrieve_duration = Duration::default();
|
let mut retrieve_duration = Duration::default();
|
||||||
|
|
||||||
|
@ -10,16 +10,16 @@ use std::fmt;
|
|||||||
|
|
||||||
use compact_arena::{SmallArena, Idx32, mk_arena};
|
use compact_arena::{SmallArena, Idx32, mk_arena};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_types::DocIndex;
|
|
||||||
use sdset::{Set, SetBuf, exponential_search, SetOperation, Counter, duo::OpBuilder};
|
use sdset::{Set, SetBuf, exponential_search, SetOperation, Counter, duo::OpBuilder};
|
||||||
use slice_group_by::{GroupBy, GroupByMut};
|
use slice_group_by::{GroupBy, GroupByMut};
|
||||||
|
|
||||||
use crate::error::Error;
|
use meilisearch_types::DocIndex;
|
||||||
|
|
||||||
use crate::criterion::{Criteria, Context, ContextMut};
|
use crate::criterion::{Criteria, Context, ContextMut};
|
||||||
use crate::distinct_map::{BufferedDistinctMap, DistinctMap};
|
use crate::distinct_map::{BufferedDistinctMap, DistinctMap};
|
||||||
use crate::raw_document::RawDocument;
|
use crate::raw_document::RawDocument;
|
||||||
use crate::{database::MainT, reordered_attrs::ReorderedAttrs};
|
use crate::{database::MainT, reordered_attrs::ReorderedAttrs};
|
||||||
use crate::{Document, DocumentId, MResult, Index};
|
use crate::{store, Document, DocumentId, MResult, Index, RankedMap, MainReader, Error};
|
||||||
use crate::query_tree::{create_query_tree, traverse_query_tree};
|
use crate::query_tree::{create_query_tree, traverse_query_tree};
|
||||||
use crate::query_tree::{Operation, QueryResult, QueryKind, QueryId, PostingsKey};
|
use crate::query_tree::{Operation, QueryResult, QueryKind, QueryId, PostingsKey};
|
||||||
use crate::query_tree::Context as QTContext;
|
use crate::query_tree::Context as QTContext;
|
||||||
@ -588,8 +588,55 @@ impl Deref for PostingsListView<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// sorts documents ids according to user defined ranking rules.
|
||||||
|
pub fn placeholder_document_sort(
|
||||||
|
document_ids: &mut [DocumentId],
|
||||||
|
index: &store::Index,
|
||||||
|
reader: &MainReader,
|
||||||
|
ranked_map: &RankedMap
|
||||||
|
) -> MResult<()> {
|
||||||
|
use crate::settings::RankingRule;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
enum SortOrder {
|
||||||
|
Asc,
|
||||||
|
Desc,
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ranking_rules) = index.main.ranking_rules(reader)? {
|
||||||
|
let schema = index.main.schema(reader)?
|
||||||
|
.ok_or(Error::SchemaMissing)?;
|
||||||
|
|
||||||
|
// Select custom rules from ranking rules, and map them to custom rules
|
||||||
|
// containing a field_id
|
||||||
|
let ranking_rules = ranking_rules.iter().filter_map(|r|
|
||||||
|
match r {
|
||||||
|
RankingRule::Asc(name) => schema.id(name).map(|f| (f, SortOrder::Asc)),
|
||||||
|
RankingRule::Desc(name) => schema.id(name).map(|f| (f, SortOrder::Desc)),
|
||||||
|
_ => None,
|
||||||
|
}).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
document_ids.sort_unstable_by(|a, b| {
|
||||||
|
for (field_id, order) in &ranking_rules {
|
||||||
|
let a_value = ranked_map.get(*a, *field_id);
|
||||||
|
let b_value = ranked_map.get(*b, *field_id);
|
||||||
|
let (a, b) = match order {
|
||||||
|
SortOrder::Asc => (a_value, b_value),
|
||||||
|
SortOrder::Desc => (b_value, a_value),
|
||||||
|
};
|
||||||
|
match a.cmp(&b) {
|
||||||
|
Ordering::Equal => continue,
|
||||||
|
ordering => return ordering,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ordering::Equal
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// For each entry in facet_docids, calculates the number of documents in the intersection with candidate_docids.
|
/// For each entry in facet_docids, calculates the number of documents in the intersection with candidate_docids.
|
||||||
fn facet_count(
|
pub fn facet_count(
|
||||||
facet_docids: HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>,
|
facet_docids: HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>,
|
||||||
candidate_docids: &Set<DocumentId>,
|
candidate_docids: &Set<DocumentId>,
|
||||||
) -> HashMap<String, HashMap<String, usize>> {
|
) -> HashMap<String, HashMap<String, usize>> {
|
||||||
|
@ -814,7 +814,7 @@ mod tests {
|
|||||||
|
|
||||||
// even try to search for a document
|
// even try to search for a document
|
||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
let SortResult {documents, .. } = index.query_builder().query(&reader, "21 ", 0..20).unwrap();
|
let SortResult {documents, .. } = index.query_builder().query(&reader, Some("21 "), 0..20).unwrap();
|
||||||
assert_matches!(documents.len(), 1);
|
assert_matches!(documents.len(), 1);
|
||||||
|
|
||||||
reader.abort().unwrap();
|
reader.abort().unwrap();
|
||||||
@ -1212,7 +1212,7 @@ mod tests {
|
|||||||
|
|
||||||
let builder = index.query_builder_with_criteria(criteria);
|
let builder = index.query_builder_with_criteria(criteria);
|
||||||
|
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "Kevin", 0..20).unwrap();
|
let SortResult {documents, .. } = builder.query(&reader, Some("Kevin"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(
|
assert_matches!(
|
||||||
|
@ -1,18 +1,20 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::ops::{Range, Deref};
|
use std::ops::{Deref, Range};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use sdset::SetOperation;
|
use sdset::{SetOperation, SetBuf, Set};
|
||||||
|
|
||||||
use meilisearch_schema::FieldId;
|
use meilisearch_schema::FieldId;
|
||||||
|
|
||||||
|
use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct, SortResult, placeholder_document_sort, facet_count};
|
||||||
use crate::database::MainT;
|
use crate::database::MainT;
|
||||||
use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct, SortResult};
|
|
||||||
use crate::{criterion::Criteria, DocumentId};
|
|
||||||
use crate::{reordered_attrs::ReorderedAttrs, store, MResult};
|
|
||||||
use crate::facets::FacetFilter;
|
use crate::facets::FacetFilter;
|
||||||
|
use crate::distinct_map::{DistinctMap, BufferedDistinctMap};
|
||||||
|
use crate::Document;
|
||||||
|
use crate::{criterion::Criteria, DocumentId};
|
||||||
|
use crate::{reordered_attrs::ReorderedAttrs, store, MResult, MainReader};
|
||||||
|
|
||||||
pub struct QueryBuilder<'c, 'f, 'd, 'i> {
|
pub struct QueryBuilder<'c, 'f, 'd, 'i> {
|
||||||
criteria: Criteria<'c>,
|
criteria: Criteria<'c>,
|
||||||
@ -27,10 +29,7 @@ pub struct QueryBuilder<'c, 'f, 'd, 'i> {
|
|||||||
|
|
||||||
impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||||
pub fn new(index: &'i store::Index) -> Self {
|
pub fn new(index: &'i store::Index) -> Self {
|
||||||
QueryBuilder::with_criteria(
|
QueryBuilder::with_criteria(index, Criteria::default())
|
||||||
index,
|
|
||||||
Criteria::default(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// sets facet attributes to filter on
|
/// sets facet attributes to filter on
|
||||||
@ -43,10 +42,7 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
|||||||
self.facets = facets;
|
self.facets = facets;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_criteria(
|
pub fn with_criteria(index: &'i store::Index, criteria: Criteria<'c>) -> Self {
|
||||||
index: &'i store::Index,
|
|
||||||
criteria: Criteria<'c>,
|
|
||||||
) -> Self {
|
|
||||||
QueryBuilder {
|
QueryBuilder {
|
||||||
criteria,
|
criteria,
|
||||||
searchable_attrs: None,
|
searchable_attrs: None,
|
||||||
@ -82,14 +78,11 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
|||||||
reorders.insert_attribute(attribute);
|
reorders.insert_attribute(attribute);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn query(
|
/// returns the documents ids associated with a facet filter by computing the union and
|
||||||
self,
|
/// intersection of the document sets
|
||||||
reader: &heed::RoTxn<MainT>,
|
fn facets_docids(&self, reader: &MainReader) -> MResult<Option<SetBuf<DocumentId>>> {
|
||||||
query: &str,
|
let facet_docids = match self.facet_filter {
|
||||||
range: Range<usize>,
|
Some(ref facets) => {
|
||||||
) -> MResult<SortResult> {
|
|
||||||
let facets_docids = match self.facet_filter {
|
|
||||||
Some(facets) => {
|
|
||||||
let mut ands = Vec::with_capacity(facets.len());
|
let mut ands = Vec::with_capacity(facets.len());
|
||||||
let mut ors = Vec::new();
|
let mut ors = Vec::new();
|
||||||
for f in facets.deref() {
|
for f in facets.deref() {
|
||||||
@ -97,48 +90,50 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
|||||||
Either::Left(keys) => {
|
Either::Left(keys) => {
|
||||||
ors.reserve(keys.len());
|
ors.reserve(keys.len());
|
||||||
for key in keys {
|
for key in keys {
|
||||||
let docids = self.index.facets.facet_document_ids(reader, &key)?.unwrap_or_default();
|
let docids = self
|
||||||
|
.index
|
||||||
|
.facets
|
||||||
|
.facet_document_ids(reader, &key)?
|
||||||
|
.unwrap_or_default();
|
||||||
ors.push(docids);
|
ors.push(docids);
|
||||||
}
|
}
|
||||||
let sets: Vec<_> = ors.iter().map(Cow::deref).collect();
|
let sets: Vec<_> = ors.iter().map(Cow::deref).collect();
|
||||||
let or_result = sdset::multi::OpBuilder::from_vec(sets).union().into_set_buf();
|
let or_result = sdset::multi::OpBuilder::from_vec(sets)
|
||||||
|
.union()
|
||||||
|
.into_set_buf();
|
||||||
ands.push(Cow::Owned(or_result));
|
ands.push(Cow::Owned(or_result));
|
||||||
ors.clear();
|
ors.clear();
|
||||||
}
|
}
|
||||||
Either::Right(key) =>{
|
Either::Right(key) => {
|
||||||
match self.index.facets.facet_document_ids(reader, &key)? {
|
match self.index.facets.facet_document_ids(reader, &key)? {
|
||||||
Some(docids) => ands.push(docids),
|
Some(docids) => ands.push(docids),
|
||||||
// no candidates for search, early return.
|
// no candidates for search, early return.
|
||||||
None => return Ok(SortResult::default()),
|
None => return Ok(Some(SetBuf::default())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let ands: Vec<_> = ands.iter().map(Cow::deref).collect();
|
let ands: Vec<_> = ands.iter().map(Cow::deref).collect();
|
||||||
Some(sdset::multi::OpBuilder::from_vec(ands).intersection().into_set_buf())
|
Some(
|
||||||
}
|
sdset::multi::OpBuilder::from_vec(ands)
|
||||||
None => None
|
.intersection()
|
||||||
};
|
.into_set_buf(),
|
||||||
|
)
|
||||||
// for each field to retrieve the count for, create an HashMap associating the attribute
|
|
||||||
// value to a set of matching documents. The HashMaps are them collected in another
|
|
||||||
// HashMap, associating each HashMap to it's field.
|
|
||||||
let facet_count_docids = match self.facets {
|
|
||||||
Some(field_ids) => {
|
|
||||||
let mut facet_count_map = HashMap::new();
|
|
||||||
for (field_id, field_name) in field_ids {
|
|
||||||
let mut key_map = HashMap::new();
|
|
||||||
for pair in self.index.facets.field_document_ids(reader, field_id)? {
|
|
||||||
let (facet_key, document_ids) = pair?;
|
|
||||||
let value = facet_key.value();
|
|
||||||
key_map.insert(value.to_string(), document_ids);
|
|
||||||
}
|
|
||||||
facet_count_map.insert(field_name, key_map);
|
|
||||||
}
|
|
||||||
Some(facet_count_map)
|
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
|
Ok(facet_docids)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn standard_query(self, reader: &MainReader, query: &str, range: Range<usize>) -> MResult<SortResult> {
|
||||||
|
let facets_docids = match self.facets_docids(reader)? {
|
||||||
|
Some(ids) if ids.is_empty() => return Ok(SortResult::default()),
|
||||||
|
other => other
|
||||||
|
};
|
||||||
|
// for each field to retrieve the count for, create an HashMap associating the attribute
|
||||||
|
// value to a set of matching documents. The HashMaps are them collected in another
|
||||||
|
// HashMap, associating each HashMap to it's field.
|
||||||
|
let facet_count_docids = self.facet_count_docids(reader)?;
|
||||||
|
|
||||||
match self.distinct {
|
match self.distinct {
|
||||||
Some((distinct, distinct_size)) => bucket_sort_with_distinct(
|
Some((distinct, distinct_size)) => bucket_sort_with_distinct(
|
||||||
@ -167,6 +162,117 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn placeholder_query(self, reader: &heed::RoTxn<MainT>, range: Range<usize>) -> MResult<SortResult> {
|
||||||
|
match self.facets_docids(reader)? {
|
||||||
|
Some(docids) => {
|
||||||
|
// We sort the docids from facets according to the criteria set by the user
|
||||||
|
let mut sorted_docids = docids.clone().into_vec();
|
||||||
|
let mut sort_result = match self.index.main.ranked_map(reader)? {
|
||||||
|
Some(ranked_map) => {
|
||||||
|
placeholder_document_sort(&mut sorted_docids, self.index, reader, &ranked_map)?;
|
||||||
|
self.sort_result_from_docids(&sorted_docids, range)
|
||||||
|
},
|
||||||
|
// if we can't perform a sort, we return documents unordered
|
||||||
|
None => self.sort_result_from_docids(&docids, range),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(f) = self.facet_count_docids(reader)? {
|
||||||
|
sort_result.exhaustive_facets_count = Some(true);
|
||||||
|
sort_result.facets = Some(facet_count(f, &docids));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(sort_result)
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
match self.index.main.sorted_document_ids_cache(reader)? {
|
||||||
|
// build result from cached document ids
|
||||||
|
Some(docids) => {
|
||||||
|
let mut sort_result = self.sort_result_from_docids(&docids, range);
|
||||||
|
|
||||||
|
if let Some(f) = self.facet_count_docids(reader)? {
|
||||||
|
sort_result.exhaustive_facets_count = Some(true);
|
||||||
|
// document ids are not sorted in natural order, we need to construct a new set
|
||||||
|
let document_set = SetBuf::from_dirty(Vec::from(docids));
|
||||||
|
sort_result.facets = Some(facet_count(f, &document_set));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(sort_result)
|
||||||
|
},
|
||||||
|
// no document id cached, return empty result
|
||||||
|
None => Ok(SortResult::default()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn facet_count_docids<'a>(&self, reader: &'a MainReader) -> MResult<Option<HashMap<String, HashMap<String, Cow<'a, Set<DocumentId>>>>>> {
|
||||||
|
match self.facets {
|
||||||
|
Some(ref field_ids) => {
|
||||||
|
let mut facet_count_map = HashMap::new();
|
||||||
|
for (field_id, field_name) in field_ids {
|
||||||
|
let mut key_map = HashMap::new();
|
||||||
|
for pair in self.index.facets.field_document_ids(reader, *field_id)? {
|
||||||
|
let (facet_key, document_ids) = pair?;
|
||||||
|
let value = facet_key.value();
|
||||||
|
key_map.insert(value.to_string(), document_ids);
|
||||||
|
}
|
||||||
|
facet_count_map.insert(field_name.clone(), key_map);
|
||||||
|
}
|
||||||
|
Ok(Some(facet_count_map))
|
||||||
|
}
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sort_result_from_docids(&self, docids: &[DocumentId], range: Range<usize>) -> SortResult {
|
||||||
|
let mut sort_result = SortResult::default();
|
||||||
|
let mut result = match self.filter {
|
||||||
|
Some(ref filter) => docids
|
||||||
|
.iter()
|
||||||
|
.filter(|item| (filter)(**item))
|
||||||
|
.skip(range.start)
|
||||||
|
.take(range.end - range.start)
|
||||||
|
.map(|&id| Document::from_highlights(id, &[]))
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
None => docids
|
||||||
|
.iter()
|
||||||
|
.skip(range.start)
|
||||||
|
.take(range.end - range.start)
|
||||||
|
.map(|&id| Document::from_highlights(id, &[]))
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// distinct is set, remove duplicates with disctinct function
|
||||||
|
if let Some((distinct, distinct_size)) = &self.distinct {
|
||||||
|
let mut distinct_map = DistinctMap::new(*distinct_size);
|
||||||
|
let mut distinct_map = BufferedDistinctMap::new(&mut distinct_map);
|
||||||
|
result.retain(|doc| {
|
||||||
|
let id = doc.id;
|
||||||
|
let key = (distinct)(id);
|
||||||
|
match key {
|
||||||
|
Some(key) => distinct_map.register(key),
|
||||||
|
None => distinct_map.register_without_key(),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
sort_result.documents = result;
|
||||||
|
sort_result.nb_hits = docids.len();
|
||||||
|
sort_result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn query(
|
||||||
|
self,
|
||||||
|
reader: &heed::RoTxn<MainT>,
|
||||||
|
query: Option<&str>,
|
||||||
|
range: Range<usize>,
|
||||||
|
) -> MResult<SortResult> {
|
||||||
|
match query {
|
||||||
|
Some(query) => self.standard_query(reader, query, range),
|
||||||
|
None => self.placeholder_query(reader, range),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -181,12 +287,12 @@ mod tests {
|
|||||||
use sdset::SetBuf;
|
use sdset::SetBuf;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use crate::DocIndex;
|
|
||||||
use crate::Document;
|
|
||||||
use crate::automaton::normalize_str;
|
use crate::automaton::normalize_str;
|
||||||
use crate::bucket_sort::SimpleMatch;
|
use crate::bucket_sort::SimpleMatch;
|
||||||
use crate::database::{Database,DatabaseOptions};
|
use crate::database::{Database, DatabaseOptions};
|
||||||
use crate::store::Index;
|
use crate::store::Index;
|
||||||
|
use crate::DocIndex;
|
||||||
|
use crate::Document;
|
||||||
use meilisearch_schema::Schema;
|
use meilisearch_schema::Schema;
|
||||||
|
|
||||||
fn set_from_stream<'f, I, S>(stream: I) -> fst::Set<Vec<u8>>
|
fn set_from_stream<'f, I, S>(stream: I) -> fst::Set<Vec<u8>>
|
||||||
@ -366,7 +472,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult { documents, .. } = builder.query(&reader, "iphone from apple", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("iphone from apple"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -389,7 +495,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult { documents, .. } = builder.query(&reader, "hello", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("hello"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -400,7 +506,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult { documents, .. } = builder.query(&reader, "bonjour", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("bonjour"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -504,7 +610,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult { documents, .. } = builder.query(&reader, "hello", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("hello"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -525,7 +631,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult { documents, .. } = builder.query(&reader, "bonjour", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("bonjour"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -546,7 +652,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult { documents, .. } = builder.query(&reader, "salut", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("salut"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -592,7 +698,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult { documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("NY subway"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||||
@ -614,7 +720,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult { documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("NYC subway"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||||
@ -656,7 +762,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "NY", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("NY"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
||||||
@ -680,7 +786,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "new york", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("new york"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -714,7 +820,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("NY subway"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -731,7 +837,8 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "new york subway", 0..20).unwrap();
|
let SortResult { documents, .. } =
|
||||||
|
builder.query(&reader, Some("new york subway"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||||
@ -779,7 +886,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("NY subway"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||||
@ -801,7 +908,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("NYC subway"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||||
@ -854,7 +961,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway broken", 0..20).unwrap();
|
let SortResult {documents, .. } = builder.query(&reader, Some("NY subway broken"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -870,7 +977,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("NYC subway"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||||
@ -926,8 +1033,8 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder
|
let SortResult { documents, .. } = builder
|
||||||
.query(&reader, "new york underground train broken", 0..20)
|
.query(&reader, Some("new york underground train broken"), 0..20)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
@ -956,8 +1063,8 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder
|
let SortResult { documents, .. } = builder
|
||||||
.query(&reader, "new york city underground train broken", 0..20)
|
.query(&reader, Some("new york city underground train broken"), 0..20)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
@ -1000,7 +1107,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "new york big ", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("new york big "), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -1034,7 +1141,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway ", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("NY subway "), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -1084,8 +1191,8 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder
|
let SortResult { documents, .. } = builder
|
||||||
.query(&reader, "new york city long subway cool ", 0..20)
|
.query(&reader, Some("new york city long subway cool "), 0..20)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
@ -1117,7 +1224,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "telephone", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("telephone"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -1134,7 +1241,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "téléphone", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("téléphone"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -1151,7 +1258,7 @@ mod tests {
|
|||||||
assert_matches!(iter.next(), None);
|
assert_matches!(iter.next(), None);
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "télephone", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("télephone"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||||
@ -1178,7 +1285,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "i phone case", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("i phone case"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -1207,7 +1314,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("searchengine"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -1247,7 +1354,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("searchengine"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
@ -1279,7 +1386,7 @@ mod tests {
|
|||||||
let reader = db.main_read_txn().unwrap();
|
let reader = db.main_read_txn().unwrap();
|
||||||
|
|
||||||
let builder = store.query_builder();
|
let builder = store.query_builder();
|
||||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
let SortResult { documents, .. } = builder.query(&reader, Some("searchengine"), 0..20).unwrap();
|
||||||
let mut iter = documents.into_iter();
|
let mut iter = documents.into_iter();
|
||||||
|
|
||||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||||
|
@ -2,7 +2,7 @@ use std::borrow::Cow;
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str};
|
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str, CowSlice};
|
||||||
use meilisearch_schema::{FieldId, Schema};
|
use meilisearch_schema::{FieldId, Schema};
|
||||||
use meilisearch_types::DocumentId;
|
use meilisearch_types::DocumentId;
|
||||||
use sdset::Set;
|
use sdset::Set;
|
||||||
@ -25,6 +25,7 @@ const NUMBER_OF_DOCUMENTS_KEY: &str = "number-of-documents";
|
|||||||
const RANKED_MAP_KEY: &str = "ranked-map";
|
const RANKED_MAP_KEY: &str = "ranked-map";
|
||||||
const RANKING_RULES_KEY: &str = "ranking-rules";
|
const RANKING_RULES_KEY: &str = "ranking-rules";
|
||||||
const SCHEMA_KEY: &str = "schema";
|
const SCHEMA_KEY: &str = "schema";
|
||||||
|
const SORTED_DOCUMENT_IDS_CACHE_KEY: &str = "sorted-document-ids-cache";
|
||||||
const STOP_WORDS_KEY: &str = "stop-words";
|
const STOP_WORDS_KEY: &str = "stop-words";
|
||||||
const SYNONYMS_KEY: &str = "synonyms";
|
const SYNONYMS_KEY: &str = "synonyms";
|
||||||
const UPDATED_AT_KEY: &str = "updated-at";
|
const UPDATED_AT_KEY: &str = "updated-at";
|
||||||
@ -165,6 +166,14 @@ impl Main {
|
|||||||
Ok(self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, fst.as_fst().as_bytes())?)
|
Ok(self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, fst.as_fst().as_bytes())?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn put_sorted_document_ids_cache(self, writer: &mut heed::RwTxn<MainT>, documents_ids: &[DocumentId]) -> MResult<()> {
|
||||||
|
Ok(self.main.put::<_, Str, CowSlice<DocumentId>>(writer, SORTED_DOCUMENT_IDS_CACHE_KEY, documents_ids)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sorted_document_ids_cache(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<Cow<[DocumentId]>>> {
|
||||||
|
Ok(self.main.get::<_, Str, CowSlice<DocumentId>>(reader, SORTED_DOCUMENT_IDS_CACHE_KEY)?)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn put_schema(self, writer: &mut heed::RwTxn<MainT>, schema: &Schema) -> MResult<()> {
|
pub fn put_schema(self, writer: &mut heed::RwTxn<MainT>, schema: &Schema) -> MResult<()> {
|
||||||
Ok(self.main.put::<_, Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema)?)
|
Ok(self.main.put::<_, Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema)?)
|
||||||
}
|
}
|
||||||
|
@ -217,7 +217,7 @@ pub fn apply_addition<'a, 'b>(
|
|||||||
let mut indexer = RawIndexer::new(stop_words);
|
let mut indexer = RawIndexer::new(stop_words);
|
||||||
|
|
||||||
// For each document in this update
|
// For each document in this update
|
||||||
for (document_id, document) in documents_additions {
|
for (document_id, document) in &documents_additions {
|
||||||
// For each key-value pair in the document.
|
// For each key-value pair in the document.
|
||||||
for (attribute, value) in document {
|
for (attribute, value) in document {
|
||||||
let field_id = schema.insert_and_index(&attribute)?;
|
let field_id = schema.insert_and_index(&attribute)?;
|
||||||
@ -229,7 +229,7 @@ pub fn apply_addition<'a, 'b>(
|
|||||||
&mut indexer,
|
&mut indexer,
|
||||||
&schema,
|
&schema,
|
||||||
field_id,
|
field_id,
|
||||||
document_id,
|
*document_id,
|
||||||
&value,
|
&value,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
@ -257,6 +257,10 @@ pub fn apply_addition<'a, 'b>(
|
|||||||
index.facets.add(writer, facet_map)?;
|
index.facets.add(writer, facet_map)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// update is finished; update sorted document id cache with new state
|
||||||
|
let mut document_ids = index.main.internal_docids(writer)?.to_vec();
|
||||||
|
super::cache_document_ids_sorted(writer, &ranked_map, index, &mut document_ids)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -313,8 +317,8 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
|||||||
index.facets.add(writer, facet_map)?;
|
index.facets.add(writer, facet_map)?;
|
||||||
}
|
}
|
||||||
// ^-- https://github.com/meilisearch/MeiliSearch/pull/631#issuecomment-626624470 --v
|
// ^-- https://github.com/meilisearch/MeiliSearch/pull/631#issuecomment-626624470 --v
|
||||||
for document_id in documents_ids_to_reindex {
|
for document_id in &documents_ids_to_reindex {
|
||||||
for result in index.documents_fields.document_fields(writer, document_id)? {
|
for result in index.documents_fields.document_fields(writer, *document_id)? {
|
||||||
let (field_id, bytes) = result?;
|
let (field_id, bytes) = result?;
|
||||||
let value: Value = serde_json::from_slice(bytes)?;
|
let value: Value = serde_json::from_slice(bytes)?;
|
||||||
ram_store.insert((document_id, field_id), value);
|
ram_store.insert((document_id, field_id), value);
|
||||||
@ -330,7 +334,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
|||||||
&mut indexer,
|
&mut indexer,
|
||||||
&schema,
|
&schema,
|
||||||
field_id,
|
field_id,
|
||||||
document_id,
|
*document_id,
|
||||||
&value,
|
&value,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
@ -354,6 +358,10 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
|||||||
index.facets.add(writer, facet_map)?;
|
index.facets.add(writer, facet_map)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// update is finished; update sorted document id cache with new state
|
||||||
|
let mut document_ids = index.main.internal_docids(writer)?.to_vec();
|
||||||
|
super::cache_document_ids_sorted(writer, &ranked_map, index, &mut document_ids)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ use crate::database::{UpdateEvent, UpdateEventsEmitter};
|
|||||||
use crate::facets;
|
use crate::facets;
|
||||||
use crate::store;
|
use crate::store;
|
||||||
use crate::update::{next_update_id, compute_short_prefixes, Update};
|
use crate::update::{next_update_id, compute_short_prefixes, Update};
|
||||||
use crate::{DocumentId, Error, MResult, RankedMap};
|
use crate::{DocumentId, Error, MResult, RankedMap, MainWriter, Index};
|
||||||
|
|
||||||
pub struct DocumentsDeletion {
|
pub struct DocumentsDeletion {
|
||||||
updates_store: store::Updates,
|
updates_store: store::Updates,
|
||||||
@ -153,8 +153,8 @@ pub fn apply_documents_deletion(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let deleted_documents_len = deleted_documents.len() as u64;
|
let deleted_documents_len = deleted_documents.len() as u64;
|
||||||
for id in deleted_documents {
|
for id in &deleted_documents {
|
||||||
index.docs_words.del_doc_words(writer, id)?;
|
index.docs_words.del_doc_words(writer, *id)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let removed_words = fst::Set::from_iter(removed_words).unwrap();
|
let removed_words = fst::Set::from_iter(removed_words).unwrap();
|
||||||
@ -180,5 +180,28 @@ pub fn apply_documents_deletion(
|
|||||||
|
|
||||||
compute_short_prefixes(writer, &words, index)?;
|
compute_short_prefixes(writer, &words, index)?;
|
||||||
|
|
||||||
|
// update is finished; update sorted document id cache with new state
|
||||||
|
document_cache_remove_deleted(writer, index, &ranked_map, &deleted_documents)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// rebuilds the document id cache by either removing deleted documents from the existing cache,
|
||||||
|
/// and generating a new one from docs in store
|
||||||
|
fn document_cache_remove_deleted(writer: &mut MainWriter, index: &Index, ranked_map: &RankedMap, documents_to_delete: &HashSet<DocumentId>) -> MResult<()> {
|
||||||
|
let new_cache = match index.main.sorted_document_ids_cache(writer)? {
|
||||||
|
// only keep documents that are not in the list of deleted documents. Order is preserved,
|
||||||
|
// no need to resort
|
||||||
|
Some(old_cache) => {
|
||||||
|
old_cache.iter().filter(|docid| !documents_to_delete.contains(docid)).cloned().collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
// couldn't find cached documents, try building a new cache from documents in store
|
||||||
|
None => {
|
||||||
|
let mut document_ids = index.main.internal_docids(writer)?.to_vec();
|
||||||
|
super::cache_document_ids_sorted(writer, ranked_map, index, &mut document_ids)?;
|
||||||
|
document_ids
|
||||||
|
}
|
||||||
|
};
|
||||||
|
index.main.put_sorted_document_ids_cache(writer, &new_cache)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -25,8 +25,9 @@ use serde::{Deserialize, Serialize};
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use meilisearch_error::ErrorCode;
|
use meilisearch_error::ErrorCode;
|
||||||
|
use meilisearch_types::DocumentId;
|
||||||
|
|
||||||
use crate::{store, MResult};
|
use crate::{store, MResult, RankedMap};
|
||||||
use crate::database::{MainT, UpdateT};
|
use crate::database::{MainT, UpdateT};
|
||||||
use crate::settings::SettingsUpdate;
|
use crate::settings::SettingsUpdate;
|
||||||
|
|
||||||
@ -371,3 +372,13 @@ where A: AsRef<[u8]>,
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn cache_document_ids_sorted(
|
||||||
|
writer: &mut heed::RwTxn<MainT>,
|
||||||
|
ranked_map: &RankedMap,
|
||||||
|
index: &store::Index,
|
||||||
|
document_ids: &mut [DocumentId],
|
||||||
|
) -> MResult<()> {
|
||||||
|
crate::bucket_sort::placeholder_document_sort(document_ids, index, writer, ranked_map)?;
|
||||||
|
index.main.put_sorted_document_ids_cache(writer, &document_ids)
|
||||||
|
}
|
||||||
|
@ -70,9 +70,9 @@ features = [
|
|||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
serde_url_params = "0.2.0"
|
||||||
tempdir = "0.3.7"
|
tempdir = "0.3.7"
|
||||||
tokio = { version = "0.2.18", features = ["macros", "time"] }
|
tokio = { version = "0.2.18", features = ["macros", "time"] }
|
||||||
serde_url_params = "0.2.0"
|
|
||||||
|
|
||||||
[dev-dependencies.assert-json-diff]
|
[dev-dependencies.assert-json-diff]
|
||||||
git = "https://github.com/qdequele/assert-json-diff"
|
git = "https://github.com/qdequele/assert-json-diff"
|
||||||
|
@ -20,11 +20,11 @@ use slice_group_by::GroupBy;
|
|||||||
use crate::error::{Error, ResponseError};
|
use crate::error::{Error, ResponseError};
|
||||||
|
|
||||||
pub trait IndexSearchExt {
|
pub trait IndexSearchExt {
|
||||||
fn new_search(&self, query: String) -> SearchBuilder;
|
fn new_search(&self, query: Option<String>) -> SearchBuilder;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IndexSearchExt for Index {
|
impl IndexSearchExt for Index {
|
||||||
fn new_search(&self, query: String) -> SearchBuilder {
|
fn new_search(&self, query: Option<String>) -> SearchBuilder {
|
||||||
SearchBuilder {
|
SearchBuilder {
|
||||||
index: self,
|
index: self,
|
||||||
query,
|
query,
|
||||||
@ -43,7 +43,7 @@ impl IndexSearchExt for Index {
|
|||||||
|
|
||||||
pub struct SearchBuilder<'a> {
|
pub struct SearchBuilder<'a> {
|
||||||
index: &'a Index,
|
index: &'a Index,
|
||||||
query: String,
|
query: Option<String>,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
limit: usize,
|
limit: usize,
|
||||||
attributes_to_crop: Option<HashMap<String, usize>>,
|
attributes_to_crop: Option<HashMap<String, usize>>,
|
||||||
@ -156,7 +156,7 @@ impl<'a> SearchBuilder<'a> {
|
|||||||
query_builder.set_facets(self.facets);
|
query_builder.set_facets(self.facets);
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
|
let result = query_builder.query(reader, self.query.as_deref(), self.offset..(self.offset + self.limit));
|
||||||
let search_result = result.map_err(Error::search_documents)?;
|
let search_result = result.map_err(Error::search_documents)?;
|
||||||
let time_ms = start.elapsed().as_millis() as usize;
|
let time_ms = start.elapsed().as_millis() as usize;
|
||||||
|
|
||||||
@ -245,7 +245,7 @@ impl<'a> SearchBuilder<'a> {
|
|||||||
nb_hits: search_result.nb_hits,
|
nb_hits: search_result.nb_hits,
|
||||||
exhaustive_nb_hits: search_result.exhaustive_nb_hit,
|
exhaustive_nb_hits: search_result.exhaustive_nb_hit,
|
||||||
processing_time_ms: time_ms,
|
processing_time_ms: time_ms,
|
||||||
query: self.query.to_string(),
|
query: self.query.unwrap_or_default(),
|
||||||
facets_distribution: search_result.facets,
|
facets_distribution: search_result.facets,
|
||||||
exhaustive_facets_count: search_result.exhaustive_facets_count,
|
exhaustive_facets_count: search_result.exhaustive_facets_count,
|
||||||
};
|
};
|
||||||
|
@ -24,7 +24,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
|||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
pub struct SearchQuery {
|
pub struct SearchQuery {
|
||||||
q: String,
|
q: Option<String>,
|
||||||
offset: Option<usize>,
|
offset: Option<usize>,
|
||||||
limit: Option<usize>,
|
limit: Option<usize>,
|
||||||
attributes_to_retrieve: Option<String>,
|
attributes_to_retrieve: Option<String>,
|
||||||
@ -50,7 +50,7 @@ async fn search_with_url_query(
|
|||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
pub struct SearchQueryPost {
|
pub struct SearchQueryPost {
|
||||||
q: String,
|
q: Option<String>,
|
||||||
offset: Option<usize>,
|
offset: Option<usize>,
|
||||||
limit: Option<usize>,
|
limit: Option<usize>,
|
||||||
attributes_to_retrieve: Option<Vec<String>>,
|
attributes_to_retrieve: Option<Vec<String>>,
|
||||||
@ -177,7 +177,6 @@ impl SearchQuery {
|
|||||||
None => (),
|
None => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
search_builder.attributes_to_crop(final_attributes);
|
search_builder.attributes_to_crop(final_attributes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,16 +1,32 @@
|
|||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
use actix_web::{http::StatusCode, test};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use actix_web::{http::StatusCode, test};
|
|
||||||
use meilisearch_core::DatabaseOptions;
|
|
||||||
use meilisearch_http::data::Data;
|
|
||||||
use meilisearch_http::option::Opt;
|
|
||||||
use meilisearch_http::helpers::NormalizePath;
|
|
||||||
use tempdir::TempDir;
|
use tempdir::TempDir;
|
||||||
use tokio::time::delay_for;
|
use tokio::time::delay_for;
|
||||||
|
|
||||||
|
use meilisearch_core::DatabaseOptions;
|
||||||
|
use meilisearch_http::data::Data;
|
||||||
|
use meilisearch_http::helpers::NormalizePath;
|
||||||
|
use meilisearch_http::option::Opt;
|
||||||
|
|
||||||
|
/// Performs a search test on both post and get routes
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! test_post_get_search {
|
||||||
|
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => {
|
||||||
|
let post_query: meilisearch_http::routes::search::SearchQueryPost = serde_json::from_str(&$query.clone().to_string()).unwrap();
|
||||||
|
let get_query: meilisearch_http::routes::search::SearchQuery = post_query.into();
|
||||||
|
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
|
||||||
|
let ($response, $status_code) = $server.search_get(&get_query).await;
|
||||||
|
let _ =::std::panic::catch_unwind(|| $block)
|
||||||
|
.map_err(|e| panic!("panic in get route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||||
|
let ($response, $status_code) = $server.search_post($query).await;
|
||||||
|
let _ = ::std::panic::catch_unwind(|| $block)
|
||||||
|
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Server {
|
pub struct Server {
|
||||||
uid: String,
|
uid: String,
|
||||||
data: Data,
|
data: Data,
|
||||||
|
497
meilisearch-http/tests/placeholder_search.rs
Normal file
497
meilisearch-http/tests/placeholder_search.rs
Normal file
@ -0,0 +1,497 @@
|
|||||||
|
use std::convert::Into;
|
||||||
|
|
||||||
|
use serde_json::json;
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
use std::cell::RefCell;
|
||||||
|
|
||||||
|
#[macro_use] mod common;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_search_with_limit() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
let query = json! ({
|
||||||
|
"limit": 3
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, status_code| {
|
||||||
|
assert_eq!(status_code, 200);
|
||||||
|
assert_eq!(response["hits"].as_array().unwrap().len(), 3);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_search_with_offset() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"limit": 6,
|
||||||
|
});
|
||||||
|
|
||||||
|
// hack to take a value out of macro (must implement UnwindSafe)
|
||||||
|
let expected = Mutex::new(RefCell::new(Vec::new()));
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, status_code| {
|
||||||
|
assert_eq!(status_code, 200);
|
||||||
|
// take results at offset 3 as reference
|
||||||
|
let lock = expected.lock().unwrap();
|
||||||
|
lock.replace(response["hits"].as_array().unwrap()[3..6].iter().cloned().collect());
|
||||||
|
});
|
||||||
|
|
||||||
|
let expected = expected.into_inner().unwrap().into_inner();
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"limit": 3,
|
||||||
|
"offset": 3,
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, status_code| {
|
||||||
|
assert_eq!(status_code, 200);
|
||||||
|
let response = response["hits"].as_array().unwrap();
|
||||||
|
assert_eq!(&expected, response);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_search_with_attribute_to_highlight_wildcard() {
|
||||||
|
// there should be no highlight in placeholder search
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"limit": 1,
|
||||||
|
"attributesToHighlight": ["*"]
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, status_code| {
|
||||||
|
assert_eq!(status_code, 200);
|
||||||
|
let result = response["hits"]
|
||||||
|
.as_array()
|
||||||
|
.unwrap()[0]
|
||||||
|
.as_object()
|
||||||
|
.unwrap();
|
||||||
|
for value in result.values() {
|
||||||
|
assert!(value.to_string().find("<em>").is_none());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_search_with_matches() {
|
||||||
|
// matches is always empty
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"matches": true
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, status_code| {
|
||||||
|
assert_eq!(status_code, 200);
|
||||||
|
let result = response["hits"]
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.map(|v| v.as_object().unwrap()["_matchesInfo"].clone())
|
||||||
|
.all(|m| m.as_object().unwrap().is_empty());
|
||||||
|
assert!(result);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_search_witch_crop() {
|
||||||
|
// placeholder search crop always crop from beggining
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"attributesToCrop": ["about"],
|
||||||
|
"cropLength": 20
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, status_code| {
|
||||||
|
assert_eq!(status_code, 200);
|
||||||
|
|
||||||
|
let hits = response["hits"].as_array().unwrap();
|
||||||
|
|
||||||
|
for hit in hits {
|
||||||
|
let hit = hit.as_object().unwrap();
|
||||||
|
let formatted = hit["_formatted"].as_object().unwrap();
|
||||||
|
|
||||||
|
let about = hit["about"].as_str().unwrap();
|
||||||
|
let about_formatted = formatted["about"].as_str().unwrap();
|
||||||
|
// the formatted about length should be about 20 characters long
|
||||||
|
assert!(about_formatted.len() < 20 + 10);
|
||||||
|
// the formatted part should be located at the beginning of the original one
|
||||||
|
assert_eq!(about.find(&about_formatted).unwrap(), 0);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_search_with_attributes_to_retrieve() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"limit": 1,
|
||||||
|
"attributesToRetrieve": ["gender", "about"],
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
let hit = response["hits"]
|
||||||
|
.as_array()
|
||||||
|
.unwrap()[0]
|
||||||
|
.as_object()
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(hit.values().count(), 2);
|
||||||
|
let _ = hit["gender"];
|
||||||
|
let _ = hit["about"];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_search_with_filter() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"filters": "color='green'"
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
let hits = response["hits"].as_array().unwrap();
|
||||||
|
assert!(hits.iter().all(|v| v["color"].as_str().unwrap() == "green"));
|
||||||
|
});
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"filters": "tags=bug"
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
let hits = response["hits"].as_array().unwrap();
|
||||||
|
let value = Value::String(String::from("bug"));
|
||||||
|
assert!(hits.iter().all(|v| v["tags"].as_array().unwrap().contains(&value)));
|
||||||
|
});
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"filters": "color='green' AND (tags='bug' OR tags='wontfix')"
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
let hits = response["hits"].as_array().unwrap();
|
||||||
|
let bug = Value::String(String::from("bug"));
|
||||||
|
let wontfix = Value::String(String::from("wontfix"));
|
||||||
|
assert!(hits.iter().all(|v|
|
||||||
|
v["color"].as_str().unwrap() == "green" &&
|
||||||
|
v["tags"].as_array().unwrap().contains(&bug) ||
|
||||||
|
v["tags"].as_array().unwrap().contains(&wontfix)));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_test_faceted_search_valid() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
// simple tests on attributes with string value
|
||||||
|
let body = json!({
|
||||||
|
"attributesForFaceting": ["color"]
|
||||||
|
});
|
||||||
|
|
||||||
|
server.update_all_settings(body).await;
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": ["color:green"]
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||||
|
assert!(response
|
||||||
|
.get("hits")
|
||||||
|
.unwrap()
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.all(|value| value.get("color").unwrap() == "green"));
|
||||||
|
});
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": [["color:blue"]]
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||||
|
assert!(response
|
||||||
|
.get("hits")
|
||||||
|
.unwrap()
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.all(|value| value.get("color").unwrap() == "blue"));
|
||||||
|
});
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": ["color:Blue"]
|
||||||
|
});
|
||||||
|
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||||
|
assert!(response
|
||||||
|
.get("hits")
|
||||||
|
.unwrap()
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.all(|value| value.get("color").unwrap() == "blue"));
|
||||||
|
});
|
||||||
|
|
||||||
|
// test on arrays: ["tags:bug"]
|
||||||
|
let body = json!({
|
||||||
|
"attributesForFaceting": ["color", "tags"]
|
||||||
|
});
|
||||||
|
|
||||||
|
server.update_all_settings(body).await;
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": ["tags:bug"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||||
|
assert!(response
|
||||||
|
.get("hits")
|
||||||
|
.unwrap()
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.all(|value| value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
|
||||||
|
});
|
||||||
|
|
||||||
|
// test and: ["color:blue", "tags:bug"]
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": ["color:blue", "tags:bug"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||||
|
assert!(response
|
||||||
|
.get("hits")
|
||||||
|
.unwrap()
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.all(|value| value
|
||||||
|
.get("color")
|
||||||
|
.unwrap() == "blue"
|
||||||
|
&& value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
|
||||||
|
});
|
||||||
|
|
||||||
|
// test or: [["color:blue", "color:green"]]
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": [["color:blue", "color:green"]]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||||
|
assert!(response
|
||||||
|
.get("hits")
|
||||||
|
.unwrap()
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.all(|value|
|
||||||
|
value
|
||||||
|
.get("color")
|
||||||
|
.unwrap() == "blue"
|
||||||
|
|| value
|
||||||
|
.get("color")
|
||||||
|
.unwrap() == "green"));
|
||||||
|
});
|
||||||
|
// test and-or: ["tags:bug", ["color:blue", "color:green"]]
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": ["tags:bug", ["color:blue", "color:green"]]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||||
|
assert!(response
|
||||||
|
.get("hits")
|
||||||
|
.unwrap()
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.all(|value|
|
||||||
|
value
|
||||||
|
.get("tags")
|
||||||
|
.unwrap()
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.contains(&Value::String("bug".to_owned()))
|
||||||
|
&& (value
|
||||||
|
.get("color")
|
||||||
|
.unwrap() == "blue"
|
||||||
|
|| value
|
||||||
|
.get("color")
|
||||||
|
.unwrap() == "green")));
|
||||||
|
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_test_faceted_search_invalid() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
//no faceted attributes set
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": ["color:blue"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||||
|
|
||||||
|
let body = json!({
|
||||||
|
"attributesForFaceting": ["color", "tags"]
|
||||||
|
});
|
||||||
|
server.update_all_settings(body).await;
|
||||||
|
// empty arrays are error
|
||||||
|
// []
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": []
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||||
|
// [[]]
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": [[]]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||||
|
// ["color:green", []]
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": ["color:green", []]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||||
|
|
||||||
|
// too much depth
|
||||||
|
// [[[]]]
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": [[[]]]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||||
|
// [["color:green", ["color:blue"]]]
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": [["color:green", ["color:blue"]]]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||||
|
// "color:green"
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": "color:green"
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_test_facet_count() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
// test without facet distribution
|
||||||
|
let query = json!({
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code|{
|
||||||
|
assert!(response.get("exhaustiveFacetsCount").is_none());
|
||||||
|
assert!(response.get("facetsDistribution").is_none());
|
||||||
|
});
|
||||||
|
|
||||||
|
// test no facets set, search on color
|
||||||
|
let query = json!({
|
||||||
|
"facetsDistribution": ["color"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query.clone(), |_response, status_code|{
|
||||||
|
assert_eq!(status_code, 400);
|
||||||
|
});
|
||||||
|
|
||||||
|
let body = json!({
|
||||||
|
"attributesForFaceting": ["color", "tags"]
|
||||||
|
});
|
||||||
|
server.update_all_settings(body).await;
|
||||||
|
// same as before, but now facets are set:
|
||||||
|
test_post_get_search!(server, query, |response, _status_code|{
|
||||||
|
println!("{}", response);
|
||||||
|
assert!(response.get("exhaustiveFacetsCount").is_some());
|
||||||
|
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 1);
|
||||||
|
});
|
||||||
|
// searching on color and tags
|
||||||
|
let query = json!({
|
||||||
|
"facetsDistribution": ["color", "tags"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code|{
|
||||||
|
let facets = response.get("facetsDistribution").unwrap().as_object().unwrap();
|
||||||
|
assert_eq!(facets.values().count(), 2);
|
||||||
|
assert_ne!(!facets.get("color").unwrap().as_object().unwrap().values().count(), 0);
|
||||||
|
assert_ne!(!facets.get("tags").unwrap().as_object().unwrap().values().count(), 0);
|
||||||
|
});
|
||||||
|
// wildcard
|
||||||
|
let query = json!({
|
||||||
|
"facetsDistribution": ["*"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code|{
|
||||||
|
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 2);
|
||||||
|
});
|
||||||
|
// wildcard with other attributes:
|
||||||
|
let query = json!({
|
||||||
|
"facetsDistribution": ["color", "*"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code|{
|
||||||
|
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// empty facet list
|
||||||
|
let query = json!({
|
||||||
|
"facetsDistribution": []
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code|{
|
||||||
|
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// attr not set as facet passed:
|
||||||
|
let query = json!({
|
||||||
|
"facetsDistribution": ["gender"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, status_code|{
|
||||||
|
assert_eq!(status_code, 400);
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[should_panic]
|
||||||
|
async fn placeholder_test_bad_facet_distribution() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
// string instead of array:
|
||||||
|
let query = json!({
|
||||||
|
"facetsDistribution": "color"
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, _status_code| {});
|
||||||
|
|
||||||
|
// invalid value in array:
|
||||||
|
let query = json!({
|
||||||
|
"facetsDistribution": ["color", true]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |_response, _status_code| {});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn placeholder_test_sort() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
|
||||||
|
let body = json!({
|
||||||
|
"rankingRules": ["asc(age)"],
|
||||||
|
"attributesForFaceting": ["color"]
|
||||||
|
});
|
||||||
|
server.update_all_settings(body).await;
|
||||||
|
let query = json!({ });
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
let hits = response["hits"].as_array().unwrap();
|
||||||
|
hits.iter().map(|v| v["age"].as_u64().unwrap()).fold(0, |prev, cur| {
|
||||||
|
assert!(cur >= prev);
|
||||||
|
cur
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
let query = json!({
|
||||||
|
"facetFilters": ["color:green"]
|
||||||
|
});
|
||||||
|
test_post_get_search!(server, query, |response, _status_code| {
|
||||||
|
let hits = response["hits"].as_array().unwrap();
|
||||||
|
hits.iter().map(|v| v["age"].as_u64().unwrap()).fold(0, |prev, cur| {
|
||||||
|
assert!(cur >= prev);
|
||||||
|
cur
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
@ -1,25 +1,10 @@
|
|||||||
use std::convert::Into;
|
use std::convert::Into;
|
||||||
|
|
||||||
use meilisearch_http::routes::search::{SearchQuery, SearchQueryPost};
|
|
||||||
use assert_json_diff::assert_json_eq;
|
use assert_json_diff::assert_json_eq;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
mod common;
|
#[macro_use] mod common;
|
||||||
|
|
||||||
macro_rules! test_post_get_search {
|
|
||||||
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => {
|
|
||||||
let post_query: SearchQueryPost = serde_json::from_str(&$query.clone().to_string()).unwrap();
|
|
||||||
let get_query: SearchQuery = post_query.into();
|
|
||||||
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
|
|
||||||
let ($response, $status_code) = $server.search_get(&get_query).await;
|
|
||||||
let _ =::std::panic::catch_unwind(|| $block)
|
|
||||||
.map_err(|e| panic!("panic in get route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
|
||||||
let ($response, $status_code) = $server.search_post($query).await;
|
|
||||||
let _ =::std::panic::catch_unwind(|| $block)
|
|
||||||
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_with_limit() {
|
async fn search_with_limit() {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user