mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-02-05 01:53:28 +01:00
refactor document tokenization
This commit is contained in:
parent
ed882f26b1
commit
4b926b881d
@ -3,9 +3,10 @@ use std::collections::HashMap;
|
|||||||
use charabia::{SeparatorKind, Token, TokenKind, Tokenizer, TokenizerBuilder};
|
use charabia::{SeparatorKind, Token, TokenKind, Tokenizer, TokenizerBuilder};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
|
use crate::attribute_patterns::PatternMatch;
|
||||||
use crate::update::new::document::Document;
|
use crate::update::new::document::Document;
|
||||||
use crate::update::new::extract::perm_json_p::{
|
use crate::update::new::extract::perm_json_p::{
|
||||||
seek_leaf_values_in_array, seek_leaf_values_in_object, select_field, Depth, Selection,
|
seek_leaf_values_in_array, seek_leaf_values_in_object, Depth,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
FieldId, GlobalFieldsIdsMap, InternalError, LocalizedAttributesRule, Result, UserError,
|
FieldId, GlobalFieldsIdsMap, InternalError, LocalizedAttributesRule, Result, UserError,
|
||||||
@ -17,8 +18,6 @@ const MAX_DISTANCE: u32 = 8;
|
|||||||
|
|
||||||
pub struct DocumentTokenizer<'a> {
|
pub struct DocumentTokenizer<'a> {
|
||||||
pub tokenizer: &'a Tokenizer<'a>,
|
pub tokenizer: &'a Tokenizer<'a>,
|
||||||
pub attribute_to_extract: Option<&'a [&'a str]>,
|
|
||||||
pub attribute_to_skip: &'a [&'a str],
|
|
||||||
pub localized_attributes_rules: &'a [LocalizedAttributesRule],
|
pub localized_attributes_rules: &'a [LocalizedAttributesRule],
|
||||||
pub max_positions_per_attributes: u32,
|
pub max_positions_per_attributes: u32,
|
||||||
}
|
}
|
||||||
@ -31,21 +30,53 @@ impl<'a> DocumentTokenizer<'a> {
|
|||||||
token_fn: &mut impl FnMut(&str, FieldId, u16, &str) -> Result<()>,
|
token_fn: &mut impl FnMut(&str, FieldId, u16, &str) -> Result<()>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let mut field_position = HashMap::new();
|
let mut field_position = HashMap::new();
|
||||||
|
|
||||||
for entry in document.iter_top_level_fields() {
|
|
||||||
let (field_name, value) = entry?;
|
|
||||||
|
|
||||||
let mut tokenize_field = |field_name: &str, _depth, value: &Value| {
|
let mut tokenize_field = |field_name: &str, _depth, value: &Value| {
|
||||||
let Some(field_id) = field_id_map.id_or_insert(field_name) else {
|
let Some((field_id, meta)) = field_id_map.id_with_metadata_or_insert(field_name) else {
|
||||||
return Err(UserError::AttributeLimitReached.into());
|
return Err(UserError::AttributeLimitReached.into());
|
||||||
};
|
};
|
||||||
|
|
||||||
if select_field(field_name, self.attribute_to_extract, self.attribute_to_skip)
|
if meta.is_searchable() {
|
||||||
!= Selection::Select
|
self.tokenize_field(field_id, field_name, value, token_fn, &mut field_position)?;
|
||||||
{
|
|
||||||
return Ok(());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// todo: should be a match on the field_name using `match_field_legacy` function,
|
||||||
|
// but for legacy reasons we iterate over all the fields to fill the field_id_map.
|
||||||
|
Ok(PatternMatch::Match)
|
||||||
|
};
|
||||||
|
|
||||||
|
for entry in document.iter_top_level_fields() {
|
||||||
|
let (field_name, value) = entry?;
|
||||||
|
// parse json.
|
||||||
|
match serde_json::to_value(value).map_err(InternalError::SerdeJson)? {
|
||||||
|
Value::Object(object) => seek_leaf_values_in_object(
|
||||||
|
&object,
|
||||||
|
field_name,
|
||||||
|
Depth::OnBaseKey,
|
||||||
|
&mut tokenize_field,
|
||||||
|
)?,
|
||||||
|
Value::Array(array) => seek_leaf_values_in_array(
|
||||||
|
&array,
|
||||||
|
field_name,
|
||||||
|
Depth::OnBaseKey,
|
||||||
|
&mut tokenize_field,
|
||||||
|
)?,
|
||||||
|
value => {
|
||||||
|
tokenize_field(field_name, Depth::OnBaseKey, &value)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tokenize_field(
|
||||||
|
&self,
|
||||||
|
field_id: FieldId,
|
||||||
|
field_name: &str,
|
||||||
|
value: &Value,
|
||||||
|
token_fn: &mut impl FnMut(&str, u16, u16, &str) -> std::result::Result<(), crate::Error>,
|
||||||
|
field_position: &mut HashMap<u16, u32>,
|
||||||
|
) -> Result<()> {
|
||||||
let position = field_position
|
let position = field_position
|
||||||
.entry(field_id)
|
.entry(field_id)
|
||||||
.and_modify(|counter| *counter += MAX_DISTANCE)
|
.and_modify(|counter| *counter += MAX_DISTANCE)
|
||||||
@ -68,7 +99,7 @@ impl<'a> DocumentTokenizer<'a> {
|
|||||||
let locales = self
|
let locales = self
|
||||||
.localized_attributes_rules
|
.localized_attributes_rules
|
||||||
.iter()
|
.iter()
|
||||||
.find(|rule| rule.match_str(field_name))
|
.find(|rule| rule.match_str(field_name) == PatternMatch::Match)
|
||||||
.map(|rule| rule.locales());
|
.map(|rule| rule.locales());
|
||||||
self.tokenizer.tokenize_with_allow_list(text.as_str(), locales)
|
self.tokenizer.tokenize_with_allow_list(text.as_str(), locales)
|
||||||
}
|
}
|
||||||
@ -90,31 +121,6 @@ impl<'a> DocumentTokenizer<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
// parse json.
|
|
||||||
match serde_json::to_value(value).map_err(InternalError::SerdeJson)? {
|
|
||||||
Value::Object(object) => seek_leaf_values_in_object(
|
|
||||||
&object,
|
|
||||||
None,
|
|
||||||
&[],
|
|
||||||
field_name,
|
|
||||||
Depth::OnBaseKey,
|
|
||||||
&mut tokenize_field,
|
|
||||||
)?,
|
|
||||||
Value::Array(array) => seek_leaf_values_in_array(
|
|
||||||
&array,
|
|
||||||
None,
|
|
||||||
&[],
|
|
||||||
field_name,
|
|
||||||
Depth::OnBaseKey,
|
|
||||||
&mut tokenize_field,
|
|
||||||
)?,
|
|
||||||
value => tokenize_field(field_name, Depth::OnBaseKey, &value)?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -215,8 +221,6 @@ mod test {
|
|||||||
let mut tb = TokenizerBuilder::default();
|
let mut tb = TokenizerBuilder::default();
|
||||||
let document_tokenizer = DocumentTokenizer {
|
let document_tokenizer = DocumentTokenizer {
|
||||||
tokenizer: &tb.build(),
|
tokenizer: &tb.build(),
|
||||||
attribute_to_extract: None,
|
|
||||||
attribute_to_skip: &["not-me", "me-nether.nope"],
|
|
||||||
localized_attributes_rules: &[],
|
localized_attributes_rules: &[],
|
||||||
max_positions_per_attributes: 1000,
|
max_positions_per_attributes: 1000,
|
||||||
};
|
};
|
||||||
|
Loading…
x
Reference in New Issue
Block a user