mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-22 10:07:40 +08:00
Fix #4984
This commit is contained in:
parent
cd796b0f4b
commit
e0c3f3d560
@ -69,8 +69,9 @@ async fn search_with_stop_word() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (_, code) =
|
let (_, code) = index
|
||||||
index.update_settings(json!({"stopWords": ["the", "a", "an", "to", "in", "of"]})).await;
|
.update_settings(json!({"stopWords": ["the", "The", "a", "an", "to", "in", "of"]}))
|
||||||
|
.await;
|
||||||
meili_snap::snapshot!(code, @"202 Accepted");
|
meili_snap::snapshot!(code, @"202 Accepted");
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
@ -81,16 +82,7 @@ async fn search_with_stop_word() {
|
|||||||
index
|
index
|
||||||
.search(json!({"q": "to the", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| {
|
.search(json!({"q": "to the", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| {
|
||||||
assert_eq!(code, 200, "{}", response);
|
assert_eq!(code, 200, "{}", response);
|
||||||
snapshot!(json_string!(response["hits"]), @r###"
|
snapshot!(json_string!(response["hits"]), @"[]");
|
||||||
[
|
|
||||||
{
|
|
||||||
"title": "How to Train Your Dragon: The Hidden World",
|
|
||||||
"_formatted": {
|
|
||||||
"title": "How to Train Your Dragon: <em>The</em> Hidden World"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
"###);
|
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
@ -57,9 +57,9 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
|||||||
.map(|s| s.iter().map(String::as_str).collect());
|
.map(|s| s.iter().map(String::as_str).collect());
|
||||||
let old_dictionary: Option<Vec<_>> =
|
let old_dictionary: Option<Vec<_>> =
|
||||||
settings_diff.old.dictionary.as_ref().map(|s| s.iter().map(String::as_str).collect());
|
settings_diff.old.dictionary.as_ref().map(|s| s.iter().map(String::as_str).collect());
|
||||||
let del_builder =
|
let mut del_builder =
|
||||||
tokenizer_builder(old_stop_words, old_separators.as_deref(), old_dictionary.as_deref());
|
tokenizer_builder(old_stop_words, old_separators.as_deref(), old_dictionary.as_deref());
|
||||||
let del_tokenizer = del_builder.into_tokenizer();
|
let del_tokenizer = del_builder.build();
|
||||||
|
|
||||||
let new_stop_words = settings_diff.new.stop_words.as_ref();
|
let new_stop_words = settings_diff.new.stop_words.as_ref();
|
||||||
let new_separators: Option<Vec<_>> = settings_diff
|
let new_separators: Option<Vec<_>> = settings_diff
|
||||||
@ -69,9 +69,9 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
|||||||
.map(|s| s.iter().map(String::as_str).collect());
|
.map(|s| s.iter().map(String::as_str).collect());
|
||||||
let new_dictionary: Option<Vec<_>> =
|
let new_dictionary: Option<Vec<_>> =
|
||||||
settings_diff.new.dictionary.as_ref().map(|s| s.iter().map(String::as_str).collect());
|
settings_diff.new.dictionary.as_ref().map(|s| s.iter().map(String::as_str).collect());
|
||||||
let add_builder =
|
let mut add_builder =
|
||||||
tokenizer_builder(new_stop_words, new_separators.as_deref(), new_dictionary.as_deref());
|
tokenizer_builder(new_stop_words, new_separators.as_deref(), new_dictionary.as_deref());
|
||||||
let add_tokenizer = add_builder.into_tokenizer();
|
let add_tokenizer = add_builder.build();
|
||||||
|
|
||||||
// iterate over documents.
|
// iterate over documents.
|
||||||
let mut cursor = obkv_documents.into_cursor()?;
|
let mut cursor = obkv_documents.into_cursor()?;
|
||||||
|
Loading…
Reference in New Issue
Block a user