mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-26 06:44:27 +01:00
update milli
This commit is contained in:
parent
f4f42ec441
commit
944a5bb36e
3
Cargo.lock
generated
3
Cargo.lock
generated
@ -1725,7 +1725,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "milli"
|
name = "milli"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?rev=794fce7#794fce7bff3e3461a7f3954fd97f58f8232e5a8e"
|
source = "git+https://github.com/meilisearch/milli.git?rev=f190d5f#f190d5f496cc39517b6a81300c6dee9b6dba7a38"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bstr",
|
"bstr",
|
||||||
@ -1756,6 +1756,7 @@ dependencies = [
|
|||||||
"roaring",
|
"roaring",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"slice-group-by",
|
||||||
"smallstr",
|
"smallstr",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
@ -38,7 +38,7 @@ main_error = "0.1.0"
|
|||||||
meilisearch-error = { path = "../meilisearch-error" }
|
meilisearch-error = { path = "../meilisearch-error" }
|
||||||
meilisearch-tokenizer = { git = "https://github.com/meilisearch/Tokenizer.git", branch = "main" }
|
meilisearch-tokenizer = { git = "https://github.com/meilisearch/Tokenizer.git", branch = "main" }
|
||||||
memmap = "0.7.0"
|
memmap = "0.7.0"
|
||||||
milli = { git = "https://github.com/meilisearch/milli.git", rev = "794fce7" }
|
milli = { git = "https://github.com/meilisearch/milli.git", rev = "f190d5f" }
|
||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
once_cell = "1.5.2"
|
once_cell = "1.5.2"
|
||||||
rand = "0.7.3"
|
rand = "0.7.3"
|
||||||
|
@ -109,7 +109,7 @@ impl Data {
|
|||||||
let criteria = index
|
let criteria = index
|
||||||
.criteria(&txn)?
|
.criteria(&txn)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|v| format!("{:?}", v))
|
.map(|v| format!("{}", v))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(Settings {
|
Ok(Settings {
|
||||||
|
@ -6,7 +6,7 @@ use anyhow::{bail, Context};
|
|||||||
use either::Either;
|
use either::Either;
|
||||||
use heed::RoTxn;
|
use heed::RoTxn;
|
||||||
use meilisearch_tokenizer::{Analyzer, AnalyzerConfig};
|
use meilisearch_tokenizer::{Analyzer, AnalyzerConfig};
|
||||||
use milli::{obkv_to_json, FacetCondition, Index, facet::FacetValue};
|
use milli::{FacetCondition, Index, MatchingWords, facet::FacetValue, obkv_to_json};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
|
|
||||||
@ -60,7 +60,7 @@ impl SearchQuery {
|
|||||||
|
|
||||||
let milli::SearchResult {
|
let milli::SearchResult {
|
||||||
documents_ids,
|
documents_ids,
|
||||||
found_words,
|
matching_words,
|
||||||
candidates,
|
candidates,
|
||||||
..
|
..
|
||||||
} = search.execute()?;
|
} = search.execute()?;
|
||||||
@ -92,7 +92,7 @@ impl SearchQuery {
|
|||||||
for (_id, obkv) in index.documents(&rtxn, documents_ids)? {
|
for (_id, obkv) in index.documents(&rtxn, documents_ids)? {
|
||||||
let mut object = obkv_to_json(&displayed_fields_ids, &fields_ids_map, obkv)?;
|
let mut object = obkv_to_json(&displayed_fields_ids, &fields_ids_map, obkv)?;
|
||||||
if let Some(ref attributes_to_highlight) = self.attributes_to_highlight {
|
if let Some(ref attributes_to_highlight) = self.attributes_to_highlight {
|
||||||
highlighter.highlight_record(&mut object, &found_words, attributes_to_highlight);
|
highlighter.highlight_record(&mut object, &matching_words, attributes_to_highlight);
|
||||||
}
|
}
|
||||||
documents.push(object);
|
documents.push(object);
|
||||||
}
|
}
|
||||||
@ -145,7 +145,7 @@ impl<'a, A: AsRef<[u8]>> Highlighter<'a, A> {
|
|||||||
Self { analyzer }
|
Self { analyzer }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn highlight_value(&self, value: Value, words_to_highlight: &HashSet<String>) -> Value {
|
fn highlight_value(&self, value: Value, words_to_highlight: &MatchingWords) -> Value {
|
||||||
match value {
|
match value {
|
||||||
Value::Null => Value::Null,
|
Value::Null => Value::Null,
|
||||||
Value::Bool(boolean) => Value::Bool(boolean),
|
Value::Bool(boolean) => Value::Bool(boolean),
|
||||||
@ -155,7 +155,7 @@ impl<'a, A: AsRef<[u8]>> Highlighter<'a, A> {
|
|||||||
let analyzed = self.analyzer.analyze(&old_string);
|
let analyzed = self.analyzer.analyze(&old_string);
|
||||||
for (word, token) in analyzed.reconstruct() {
|
for (word, token) in analyzed.reconstruct() {
|
||||||
if token.is_word() {
|
if token.is_word() {
|
||||||
let to_highlight = words_to_highlight.contains(token.text());
|
let to_highlight = words_to_highlight.matches(token.text());
|
||||||
if to_highlight {
|
if to_highlight {
|
||||||
string.push_str("<mark>")
|
string.push_str("<mark>")
|
||||||
}
|
}
|
||||||
@ -187,7 +187,7 @@ impl<'a, A: AsRef<[u8]>> Highlighter<'a, A> {
|
|||||||
fn highlight_record(
|
fn highlight_record(
|
||||||
&self,
|
&self,
|
||||||
object: &mut Map<String, Value>,
|
object: &mut Map<String, Value>,
|
||||||
words_to_highlight: &HashSet<String>,
|
words_to_highlight: &MatchingWords,
|
||||||
attributes_to_highlight: &HashSet<String>,
|
attributes_to_highlight: &HashSet<String>,
|
||||||
) {
|
) {
|
||||||
// TODO do we need to create a string for element that are not and needs to be highlight?
|
// TODO do we need to create a string for element that are not and needs to be highlight?
|
||||||
|
@ -10,7 +10,6 @@ async fn get_settings_unexisting_index() {
|
|||||||
|
|
||||||
// test broken, should be fixed with milli#101
|
// test broken, should be fixed with milli#101
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
#[ignore]
|
|
||||||
async fn get_settings() {
|
async fn get_settings() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
@ -22,7 +21,7 @@ async fn get_settings() {
|
|||||||
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["facetedAttributes"], json!({}));
|
assert_eq!(settings["facetedAttributes"], json!({}));
|
||||||
assert_eq!(settings["rankingRules"], json!(["typo", "words", "proximmity", "attributes", "wordsPosition", "exactness"]));
|
assert_eq!(settings["rankingRules"], json!(["typo", "words", "proximity", "attribute", "wordsPosition", "exactness"]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
Loading…
Reference in New Issue
Block a user