mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-25 22:34:28 +01:00
Fix PR comments
This commit is contained in:
parent
43c13faeda
commit
6b2d671be7
@ -28,8 +28,8 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
|||||||
indexer: GrenadParameters,
|
indexer: GrenadParameters,
|
||||||
searchable_fields: &Option<HashSet<FieldId>>,
|
searchable_fields: &Option<HashSet<FieldId>>,
|
||||||
stop_words: Option<&fst::Set<&[u8]>>,
|
stop_words: Option<&fst::Set<&[u8]>>,
|
||||||
allowed_separators: Option<&Vec<&str>>,
|
allowed_separators: Option<&[&str]>,
|
||||||
dictionary: Option<&Vec<&str>>,
|
dictionary: Option<&[&str]>,
|
||||||
max_positions_per_attributes: Option<u32>,
|
max_positions_per_attributes: Option<u32>,
|
||||||
) -> Result<(RoaringBitmap, grenad::Reader<File>, ScriptLanguageDocidsMap)> {
|
) -> Result<(RoaringBitmap, grenad::Reader<File>, ScriptLanguageDocidsMap)> {
|
||||||
puffin::profile_function!();
|
puffin::profile_function!();
|
||||||
@ -55,10 +55,10 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
|||||||
tokenizer_builder.stop_words(stop_words);
|
tokenizer_builder.stop_words(stop_words);
|
||||||
}
|
}
|
||||||
if let Some(dictionary) = dictionary {
|
if let Some(dictionary) = dictionary {
|
||||||
tokenizer_builder.words_dict(dictionary.as_slice());
|
tokenizer_builder.words_dict(dictionary);
|
||||||
}
|
}
|
||||||
if let Some(separators) = allowed_separators {
|
if let Some(separators) = allowed_separators {
|
||||||
tokenizer_builder.separators(separators.as_slice());
|
tokenizer_builder.separators(separators);
|
||||||
}
|
}
|
||||||
let tokenizer = tokenizer_builder.build();
|
let tokenizer = tokenizer_builder.build();
|
||||||
|
|
||||||
|
@ -49,8 +49,8 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
geo_fields_ids: Option<(FieldId, FieldId)>,
|
geo_fields_ids: Option<(FieldId, FieldId)>,
|
||||||
vectors_field_id: Option<FieldId>,
|
vectors_field_id: Option<FieldId>,
|
||||||
stop_words: Option<fst::Set<&[u8]>>,
|
stop_words: Option<fst::Set<&[u8]>>,
|
||||||
allowed_separators: Option<Vec<&str>>,
|
allowed_separators: Option<&[&str]>,
|
||||||
dictionary: Option<Vec<&str>>,
|
dictionary: Option<&[&str]>,
|
||||||
max_positions_per_attributes: Option<u32>,
|
max_positions_per_attributes: Option<u32>,
|
||||||
exact_attributes: HashSet<FieldId>,
|
exact_attributes: HashSet<FieldId>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@ -293,8 +293,8 @@ fn send_and_extract_flattened_documents_data(
|
|||||||
geo_fields_ids: Option<(FieldId, FieldId)>,
|
geo_fields_ids: Option<(FieldId, FieldId)>,
|
||||||
vectors_field_id: Option<FieldId>,
|
vectors_field_id: Option<FieldId>,
|
||||||
stop_words: &Option<fst::Set<&[u8]>>,
|
stop_words: &Option<fst::Set<&[u8]>>,
|
||||||
allowed_separators: &Option<Vec<&str>>,
|
allowed_separators: &Option<&[&str]>,
|
||||||
dictionary: &Option<Vec<&str>>,
|
dictionary: &Option<&[&str]>,
|
||||||
max_positions_per_attributes: Option<u32>,
|
max_positions_per_attributes: Option<u32>,
|
||||||
) -> Result<(
|
) -> Result<(
|
||||||
grenad::Reader<CursorClonableMmap>,
|
grenad::Reader<CursorClonableMmap>,
|
||||||
@ -350,8 +350,8 @@ fn send_and_extract_flattened_documents_data(
|
|||||||
indexer,
|
indexer,
|
||||||
searchable_fields,
|
searchable_fields,
|
||||||
stop_words.as_ref(),
|
stop_words.as_ref(),
|
||||||
allowed_separators.as_ref(),
|
*allowed_separators,
|
||||||
dictionary.as_ref(),
|
*dictionary,
|
||||||
max_positions_per_attributes,
|
max_positions_per_attributes,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
@ -359,8 +359,8 @@ where
|
|||||||
geo_fields_ids,
|
geo_fields_ids,
|
||||||
vectors_field_id,
|
vectors_field_id,
|
||||||
stop_words,
|
stop_words,
|
||||||
separators,
|
separators.as_ref().map(Vec::as_slice),
|
||||||
dictionary,
|
dictionary.as_ref().map(Vec::as_slice),
|
||||||
max_positions_per_attributes,
|
max_positions_per_attributes,
|
||||||
exact_attributes,
|
exact_attributes,
|
||||||
)
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user