From be395c7944204d90e9fd663f8bc5d01f1855be50 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 30 Oct 2023 16:26:29 +0100 Subject: [PATCH] Change order of arguments to tokenizer_builder --- .../index_documents/extract/extract_docid_word_positions.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/milli/src/update/index_documents/extract/extract_docid_word_positions.rs b/milli/src/update/index_documents/extract/extract_docid_word_positions.rs index e5d95cbdb..96156adb4 100644 --- a/milli/src/update/index_documents/extract/extract_docid_word_positions.rs +++ b/milli/src/update/index_documents/extract/extract_docid_word_positions.rs @@ -56,7 +56,7 @@ pub fn extract_docid_word_positions( let mut value_buffer = Vec::new(); // initialize tokenizer. - let mut builder = tokenizer_builder(stop_words, dictionary, allowed_separators, None); + let mut builder = tokenizer_builder(stop_words, allowed_separators, dictionary, None); let tokenizer = builder.build(); // iterate over documents. @@ -247,8 +247,8 @@ fn lang_safe_tokens_from_document<'a>( // build a new temporary tokenizer including the allow list. let mut builder = tokenizer_builder( stop_words, - dictionary, allowed_separators, + dictionary, Some(&script_language), ); let tokenizer = builder.build();