mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-01-19 09:35:51 +08:00
Update tests
This commit is contained in:
parent
2dfbb6813a
commit
0597a97c84
@ -16,6 +16,9 @@ static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|
|
||||
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
|
||||
);
|
||||
map.insert("stop_words", json!([]));
|
||||
map.insert("non_separator_tokens", json!([]));
|
||||
map.insert("separator_tokens", json!([]));
|
||||
map.insert("dictionary", json!([]));
|
||||
map.insert("synonyms", json!({}));
|
||||
map.insert(
|
||||
"faceting",
|
||||
@ -62,6 +65,9 @@ async fn get_settings() {
|
||||
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"])
|
||||
);
|
||||
assert_eq!(settings["stopWords"], json!([]));
|
||||
assert_eq!(settings["non_separator_tokens"], json!([]));
|
||||
assert_eq!(settings["separator_tokens"], json!([]));
|
||||
assert_eq!(settings["dictionary"], json!([]));
|
||||
assert_eq!(
|
||||
settings["faceting"],
|
||||
json!({
|
||||
|
@ -1,3 +1,4 @@
|
||||
mod distinct;
|
||||
mod errors;
|
||||
mod get_settings;
|
||||
mod tokenizer_customization;
|
||||
|
38
meilisearch/tests/settings/tokenizer_customization.rs
Normal file
38
meilisearch/tests/settings/tokenizer_customization.rs
Normal file
@ -0,0 +1,38 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::Server;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (_response, _code) = index
|
||||
.update_settings(json!({
|
||||
"non_separator_tokens": ["#", "&"],
|
||||
"separator_tokens": ["&sep", "<br/>"],
|
||||
"dictionary": ["J.R.R.", "J. R. R."],
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
assert_eq!(response["non_separator_tokens"], json!(["#", "&"]));
|
||||
assert_eq!(response["separator_tokens"], json!(["&sep", "<br/>"]));
|
||||
assert_eq!(response["dictionary"], json!(["J.R.R.", "J. R. R."]));
|
||||
|
||||
index
|
||||
.update_settings(json!({
|
||||
"non_separator_tokens": null,
|
||||
"separator_tokens": null,
|
||||
"dictionary": null,
|
||||
}))
|
||||
.await;
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
assert_eq!(response["non_separator_tokens"], json!(null));
|
||||
assert_eq!(response["separator_tokens"], json!(null));
|
||||
assert_eq!(response["dictionary"], json!(null));
|
||||
}
|
Loading…
Reference in New Issue
Block a user