mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-30 09:04:59 +08:00
Change order of arguments to tokenizer_builder
This commit is contained in:
parent
9fedd8101a
commit
be395c7944
@ -56,7 +56,7 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
|||||||
let mut value_buffer = Vec::new();
|
let mut value_buffer = Vec::new();
|
||||||
|
|
||||||
// initialize tokenizer.
|
// initialize tokenizer.
|
||||||
let mut builder = tokenizer_builder(stop_words, dictionary, allowed_separators, None);
|
let mut builder = tokenizer_builder(stop_words, allowed_separators, dictionary, None);
|
||||||
let tokenizer = builder.build();
|
let tokenizer = builder.build();
|
||||||
|
|
||||||
// iterate over documents.
|
// iterate over documents.
|
||||||
@ -247,8 +247,8 @@ fn lang_safe_tokens_from_document<'a>(
|
|||||||
// build a new temporary tokenizer including the allow list.
|
// build a new temporary tokenizer including the allow list.
|
||||||
let mut builder = tokenizer_builder(
|
let mut builder = tokenizer_builder(
|
||||||
stop_words,
|
stop_words,
|
||||||
dictionary,
|
|
||||||
allowed_separators,
|
allowed_separators,
|
||||||
|
dictionary,
|
||||||
Some(&script_language),
|
Some(&script_language),
|
||||||
);
|
);
|
||||||
let tokenizer = builder.build();
|
let tokenizer = builder.build();
|
||||||
|
Loading…
Reference in New Issue
Block a user