mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 20:37:15 +02:00
Change order of arguments to tokenizer_builder
This commit is contained in:
parent
9fedd8101a
commit
be395c7944
1 changed files with 2 additions and 2 deletions
|
@ -56,7 +56,7 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
|||
let mut value_buffer = Vec::new();
|
||||
|
||||
// initialize tokenizer.
|
||||
let mut builder = tokenizer_builder(stop_words, dictionary, allowed_separators, None);
|
||||
let mut builder = tokenizer_builder(stop_words, allowed_separators, dictionary, None);
|
||||
let tokenizer = builder.build();
|
||||
|
||||
// iterate over documents.
|
||||
|
@ -247,8 +247,8 @@ fn lang_safe_tokens_from_document<'a>(
|
|||
// build a new temporary tokenizer including the allow list.
|
||||
let mut builder = tokenizer_builder(
|
||||
stop_words,
|
||||
dictionary,
|
||||
allowed_separators,
|
||||
dictionary,
|
||||
Some(&script_language),
|
||||
);
|
||||
let tokenizer = builder.build();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue