diff --git a/crates/meilisearch/tests/search/mod.rs b/crates/meilisearch/tests/search/mod.rs
index 1dc406fb3..f3c11e451 100644
--- a/crates/meilisearch/tests/search/mod.rs
+++ b/crates/meilisearch/tests/search/mod.rs
@@ -69,8 +69,9 @@ async fn search_with_stop_word() {
let server = Server::new().await;
let index = server.index("test");
- let (_, code) =
- index.update_settings(json!({"stopWords": ["the", "a", "an", "to", "in", "of"]})).await;
+ let (_, code) = index
+ .update_settings(json!({"stopWords": ["the", "The", "a", "an", "to", "in", "of"]}))
+ .await;
meili_snap::snapshot!(code, @"202 Accepted");
let documents = DOCUMENTS.clone();
@@ -81,16 +82,7 @@ async fn search_with_stop_word() {
index
.search(json!({"q": "to the", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
- snapshot!(json_string!(response["hits"]), @r###"
- [
- {
- "title": "How to Train Your Dragon: The Hidden World",
- "_formatted": {
- "title": "How to Train Your Dragon: The Hidden World"
- }
- }
- ]
- "###);
+ snapshot!(json_string!(response["hits"]), @"[]");
})
.await;
diff --git a/crates/milli/src/update/index_documents/extract/extract_docid_word_positions.rs b/crates/milli/src/update/index_documents/extract/extract_docid_word_positions.rs
index ba11ceeb3..16ea92fa4 100644
--- a/crates/milli/src/update/index_documents/extract/extract_docid_word_positions.rs
+++ b/crates/milli/src/update/index_documents/extract/extract_docid_word_positions.rs
@@ -57,9 +57,9 @@ pub fn extract_docid_word_positions(
.map(|s| s.iter().map(String::as_str).collect());
let old_dictionary: Option> =
settings_diff.old.dictionary.as_ref().map(|s| s.iter().map(String::as_str).collect());
- let del_builder =
+ let mut del_builder =
tokenizer_builder(old_stop_words, old_separators.as_deref(), old_dictionary.as_deref());
- let del_tokenizer = del_builder.into_tokenizer();
+ let del_tokenizer = del_builder.build();
let new_stop_words = settings_diff.new.stop_words.as_ref();
let new_separators: Option> = settings_diff
@@ -69,9 +69,9 @@ pub fn extract_docid_word_positions(
.map(|s| s.iter().map(String::as_str).collect());
let new_dictionary: Option> =
settings_diff.new.dictionary.as_ref().map(|s| s.iter().map(String::as_str).collect());
- let add_builder =
+ let mut add_builder =
tokenizer_builder(new_stop_words, new_separators.as_deref(), new_dictionary.as_deref());
- let add_tokenizer = add_builder.into_tokenizer();
+ let add_tokenizer = add_builder.build();
// iterate over documents.
let mut cursor = obkv_documents.into_cursor()?;