mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-26 06:44:27 +01:00
Merge branch 'master' into html-sanitize
This commit is contained in:
commit
451061f4b8
@ -3,6 +3,7 @@
|
||||
[![Build Status](https://github.com/meilisearch/MeiliSearch/workflows/Cargo%20test/badge.svg)](https://github.com/meilisearch/MeiliSearch/actions)
|
||||
[![dependency status](https://deps.rs/repo/github/meilisearch/MeiliSearch/status.svg)](https://deps.rs/repo/github/meilisearch/MeiliSearch)
|
||||
[![License](https://img.shields.io/badge/license-MIT-informational)](https://github.com/meilisearch/MeiliSearch/blob/master/LICENSE)
|
||||
[![Slack](https://img.shields.io/badge/slack-MeiliSearch-blue.svg?logo=slack)](https://slack.meilisearch.com)
|
||||
|
||||
⚡ Ultra relevant and instant full-text search API 🔍
|
||||
|
||||
|
@ -371,7 +371,7 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
|
||||
});
|
||||
}
|
||||
|
||||
let documents = builder.query(ref_reader, &query, 0..command.number_results)?;
|
||||
let (documents, _nb_hits) = builder.query(ref_reader, &query, 0..command.number_results)?;
|
||||
|
||||
let mut retrieve_duration = Duration::default();
|
||||
|
||||
|
@ -37,7 +37,7 @@ pub fn bucket_sort<'c, FI>(
|
||||
synonyms_store: store::Synonyms,
|
||||
prefix_documents_cache_store: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
||||
) -> MResult<Vec<Document>>
|
||||
) -> MResult<(Vec<Document>, usize)>
|
||||
where
|
||||
FI: Fn(DocumentId) -> bool,
|
||||
{
|
||||
@ -66,7 +66,7 @@ where
|
||||
|
||||
let words_set = match unsafe { main_store.static_words_fst(reader)? } {
|
||||
Some(words) => words,
|
||||
None => return Ok(Vec::new()),
|
||||
None => return Ok((Vec::new(), 0)),
|
||||
};
|
||||
|
||||
let stop_words = main_store.stop_words_fst(reader)?.unwrap_or_default();
|
||||
@ -172,7 +172,7 @@ where
|
||||
|
||||
debug!("bucket sort took {:.02?}", before_bucket_sort.elapsed());
|
||||
|
||||
Ok(documents)
|
||||
Ok((documents, docids.len()))
|
||||
}
|
||||
|
||||
pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||
@ -190,14 +190,14 @@ pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||
synonyms_store: store::Synonyms,
|
||||
_prefix_documents_cache_store: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
||||
) -> MResult<Vec<Document>>
|
||||
) -> MResult<(Vec<Document>, usize)>
|
||||
where
|
||||
FI: Fn(DocumentId) -> bool,
|
||||
FD: Fn(DocumentId) -> Option<u64>,
|
||||
{
|
||||
let words_set = match unsafe { main_store.static_words_fst(reader)? } {
|
||||
Some(words) => words,
|
||||
None => return Ok(Vec::new()),
|
||||
None => return Ok((Vec::new(), 0)),
|
||||
};
|
||||
|
||||
let stop_words = main_store.stop_words_fst(reader)?.unwrap_or_default();
|
||||
@ -363,7 +363,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
Ok(documents)
|
||||
Ok((documents, docids.len()))
|
||||
}
|
||||
|
||||
fn cleanup_bare_matches<'tag, 'txn>(
|
||||
|
@ -661,7 +661,7 @@ mod tests {
|
||||
|
||||
// even try to search for a document
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
let results = index.query_builder().query(&reader, "21 ", 0..20).unwrap();
|
||||
let (results, _nb_hits) = index.query_builder().query(&reader, "21 ", 0..20).unwrap();
|
||||
assert_matches!(results.len(), 1);
|
||||
|
||||
reader.abort();
|
||||
@ -1059,7 +1059,7 @@ mod tests {
|
||||
|
||||
let builder = index.query_builder_with_criteria(criteria);
|
||||
|
||||
let results = builder.query(&reader, "Kevin", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "Kevin", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(
|
||||
|
@ -92,7 +92,7 @@ impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
) -> MResult<Vec<Document>> {
|
||||
) -> MResult<(Vec<Document>, usize)> {
|
||||
match self.distinct {
|
||||
Some((distinct, distinct_size)) => bucket_sort_with_distinct(
|
||||
reader,
|
||||
@ -331,7 +331,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "iphone from apple", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "iphone from apple", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -354,7 +354,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -365,7 +365,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -469,7 +469,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -490,7 +490,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -511,7 +511,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "salut", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "salut", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -557,7 +557,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -579,7 +579,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -621,7 +621,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NY", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
||||
@ -645,7 +645,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "new york", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "new york", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -679,7 +679,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -696,7 +696,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "new york subway", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "new york subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -744,7 +744,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -766,7 +766,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -819,7 +819,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NY subway broken", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway broken", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -835,7 +835,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -891,7 +891,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder
|
||||
let (results, _nb_hits) = builder
|
||||
.query(&reader, "new york underground train broken", 0..20)
|
||||
.unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
@ -921,7 +921,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder
|
||||
let (results, _nb_hits) = builder
|
||||
.query(&reader, "new york city underground train broken", 0..20)
|
||||
.unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
@ -965,7 +965,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "new york big ", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "new york big ", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -999,7 +999,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "NY subway ", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway ", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1049,7 +1049,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder
|
||||
let (results, _nb_hits) = builder
|
||||
.query(&reader, "new york city long subway cool ", 0..20)
|
||||
.unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
@ -1082,7 +1082,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "telephone", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "telephone", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1099,7 +1099,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "téléphone", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "téléphone", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1116,7 +1116,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "télephone", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "télephone", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -1143,7 +1143,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "i phone case", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "i phone case", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1172,7 +1172,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1212,7 +1212,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1244,7 +1244,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let results = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let (results, _nb_hits) = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
|
@ -223,12 +223,12 @@ impl<'a> SearchBuilder<'a> {
|
||||
}
|
||||
|
||||
let start = Instant::now();
|
||||
let docs =
|
||||
query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
|
||||
let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
|
||||
let (docs, nb_hits) = result.map_err(|e| Error::SearchDocuments(e.to_string()))?;
|
||||
let time_ms = start.elapsed().as_millis() as usize;
|
||||
|
||||
let mut hits = Vec::with_capacity(self.limit);
|
||||
for doc in docs.map_err(|e| Error::SearchDocuments(e.to_string()))? {
|
||||
for doc in docs {
|
||||
// retrieve the content of document in kv store
|
||||
let mut fields: Option<HashSet<&str>> = None;
|
||||
if let Some(attributes_to_retrieve) = &self.attributes_to_retrieve {
|
||||
@ -282,6 +282,8 @@ impl<'a> SearchBuilder<'a> {
|
||||
hits,
|
||||
offset: self.offset,
|
||||
limit: self.limit,
|
||||
nb_hits,
|
||||
exhaustive_nb_hits: false,
|
||||
processing_time_ms: time_ms,
|
||||
query: self.query.to_string(),
|
||||
};
|
||||
@ -358,12 +360,14 @@ pub struct SearchHit {
|
||||
pub matches_info: Option<MatchesInfos>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SearchResult {
|
||||
pub hits: Vec<SearchHit>,
|
||||
pub offset: usize,
|
||||
pub limit: usize,
|
||||
pub nb_hits: usize,
|
||||
pub exhaustive_nb_hits: bool,
|
||||
pub processing_time_ms: usize,
|
||||
pub query: String,
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user