mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-26 12:05:05 +08:00
Be able to set and reset settings
This commit is contained in:
parent
0597a97c84
commit
d8d12d5979
@ -261,6 +261,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
|
@ -340,6 +340,9 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
stop_words: settings.stop_words.into(),
|
stop_words: settings.stop_words.into(),
|
||||||
|
non_separator_tokens: v6::Setting::NotSet,
|
||||||
|
separator_tokens: v6::Setting::NotSet,
|
||||||
|
dictionary: v6::Setting::NotSet,
|
||||||
synonyms: settings.synonyms.into(),
|
synonyms: settings.synonyms.into(),
|
||||||
distinct_attribute: settings.distinct_attribute.into(),
|
distinct_attribute: settings.distinct_attribute.into(),
|
||||||
typo_tolerance: match settings.typo_tolerance {
|
typo_tolerance: match settings.typo_tolerance {
|
||||||
|
@ -259,6 +259,9 @@ InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsNonSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsDictionary , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
||||||
|
@ -171,6 +171,15 @@ pub struct Settings<T> {
|
|||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||||
pub stop_words: Setting<BTreeSet<String>>,
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsNonSeparatorTokens>)]
|
||||||
|
pub non_separator_tokens: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSeparatorTokens>)]
|
||||||
|
pub separator_tokens: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsDictionary>)]
|
||||||
|
pub dictionary: Setting<BTreeSet<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
@ -201,6 +210,9 @@ impl Settings<Checked> {
|
|||||||
ranking_rules: Setting::Reset,
|
ranking_rules: Setting::Reset,
|
||||||
stop_words: Setting::Reset,
|
stop_words: Setting::Reset,
|
||||||
synonyms: Setting::Reset,
|
synonyms: Setting::Reset,
|
||||||
|
non_separator_tokens: Setting::Reset,
|
||||||
|
separator_tokens: Setting::Reset,
|
||||||
|
dictionary: Setting::Reset,
|
||||||
distinct_attribute: Setting::Reset,
|
distinct_attribute: Setting::Reset,
|
||||||
typo_tolerance: Setting::Reset,
|
typo_tolerance: Setting::Reset,
|
||||||
faceting: Setting::Reset,
|
faceting: Setting::Reset,
|
||||||
@ -217,6 +229,9 @@ impl Settings<Checked> {
|
|||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
non_separator_tokens,
|
||||||
|
separator_tokens,
|
||||||
|
dictionary,
|
||||||
synonyms,
|
synonyms,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
@ -232,6 +247,9 @@ impl Settings<Checked> {
|
|||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
non_separator_tokens,
|
||||||
|
separator_tokens,
|
||||||
|
dictionary,
|
||||||
synonyms,
|
synonyms,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
@ -274,6 +292,9 @@ impl Settings<Unchecked> {
|
|||||||
ranking_rules: self.ranking_rules,
|
ranking_rules: self.ranking_rules,
|
||||||
stop_words: self.stop_words,
|
stop_words: self.stop_words,
|
||||||
synonyms: self.synonyms,
|
synonyms: self.synonyms,
|
||||||
|
non_separator_tokens: self.non_separator_tokens,
|
||||||
|
separator_tokens: self.separator_tokens,
|
||||||
|
dictionary: self.dictionary,
|
||||||
distinct_attribute: self.distinct_attribute,
|
distinct_attribute: self.distinct_attribute,
|
||||||
typo_tolerance: self.typo_tolerance,
|
typo_tolerance: self.typo_tolerance,
|
||||||
faceting: self.faceting,
|
faceting: self.faceting,
|
||||||
@ -335,6 +356,28 @@ pub fn apply_settings_to_builder(
|
|||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
match settings.non_separator_tokens {
|
||||||
|
Setting::Set(ref non_separator_tokens) => {
|
||||||
|
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
||||||
|
}
|
||||||
|
Setting::Reset => builder.reset_non_separator_tokens(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
match settings.separator_tokens {
|
||||||
|
Setting::Set(ref separator_tokens) => {
|
||||||
|
builder.set_separator_tokens(separator_tokens.clone())
|
||||||
|
}
|
||||||
|
Setting::Reset => builder.reset_separator_tokens(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
match settings.dictionary {
|
||||||
|
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||||
|
Setting::Reset => builder.reset_dictionary(),
|
||||||
|
Setting::NotSet => (),
|
||||||
|
}
|
||||||
|
|
||||||
match settings.synonyms {
|
match settings.synonyms {
|
||||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||||
Setting::Reset => builder.reset_synonyms(),
|
Setting::Reset => builder.reset_synonyms(),
|
||||||
@ -459,6 +502,11 @@ pub fn settings(
|
|||||||
})
|
})
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let non_separator_tokens = index.non_separator_tokens(rtxn)?.unwrap_or_default();
|
||||||
|
let separator_tokens = index.separator_tokens(rtxn)?.unwrap_or_default();
|
||||||
|
let dictionary = index.dictionary(rtxn)?.unwrap_or_default();
|
||||||
|
|
||||||
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
||||||
|
|
||||||
// in milli each word in the synonyms map were split on their separator. Since we lost
|
// in milli each word in the synonyms map were split on their separator. Since we lost
|
||||||
@ -520,6 +568,9 @@ pub fn settings(
|
|||||||
sortable_attributes: Setting::Set(sortable_attributes),
|
sortable_attributes: Setting::Set(sortable_attributes),
|
||||||
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
||||||
stop_words: Setting::Set(stop_words),
|
stop_words: Setting::Set(stop_words),
|
||||||
|
non_separator_tokens: Setting::Set(non_separator_tokens),
|
||||||
|
separator_tokens: Setting::Set(separator_tokens),
|
||||||
|
dictionary: Setting::Set(dictionary),
|
||||||
distinct_attribute: match distinct_field {
|
distinct_attribute: match distinct_field {
|
||||||
Some(field) => Setting::Set(field),
|
Some(field) => Setting::Set(field),
|
||||||
None => Setting::Reset,
|
None => Setting::Reset,
|
||||||
@ -642,6 +693,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::NotSet,
|
sortable_attributes: Setting::NotSet,
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
@ -663,6 +717,9 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::NotSet,
|
sortable_attributes: Setting::NotSet,
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
|
@ -309,6 +309,81 @@ make_setting_route!(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/non-separator-tokens",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsNonSeparatorTokens,
|
||||||
|
>,
|
||||||
|
non_separator_tokens,
|
||||||
|
"nonSeparatorTokens",
|
||||||
|
analytics,
|
||||||
|
|non_separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"nonSeparatorTokens Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"non_separator_tokens": {
|
||||||
|
"total": non_separator_tokens.as_ref().map(|non_separator_tokens| non_separator_tokens.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/separator-tokens",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsSeparatorTokens,
|
||||||
|
>,
|
||||||
|
separator_tokens,
|
||||||
|
"separatorTokens",
|
||||||
|
analytics,
|
||||||
|
|separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"separatorTokens Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"separator_tokens": {
|
||||||
|
"total": separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
make_setting_route!(
|
||||||
|
"/dictionary",
|
||||||
|
put,
|
||||||
|
std::collections::BTreeSet<String>,
|
||||||
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
|
meilisearch_types::error::deserr_codes::InvalidSettingsDictionary,
|
||||||
|
>,
|
||||||
|
dictionary,
|
||||||
|
"dictionary",
|
||||||
|
analytics,
|
||||||
|
|dictionary: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
analytics.publish(
|
||||||
|
"dictionary Updated".to_string(),
|
||||||
|
json!({
|
||||||
|
"dictionary": {
|
||||||
|
"total": dictionary.as_ref().map(|dictionary| dictionary.len()),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
Some(req),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/synonyms",
|
"/synonyms",
|
||||||
put,
|
put,
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -54,7 +54,7 @@ async fn get_settings() {
|
|||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let settings = response.as_object().unwrap();
|
let settings = response.as_object().unwrap();
|
||||||
assert_eq!(settings.keys().len(), 11);
|
assert_eq!(settings.keys().len(), 14);
|
||||||
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["filterableAttributes"], json!([]));
|
assert_eq!(settings["filterableAttributes"], json!([]));
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
@ -9,22 +10,37 @@ async fn set_and_reset() {
|
|||||||
|
|
||||||
let (_response, _code) = index
|
let (_response, _code) = index
|
||||||
.update_settings(json!({
|
.update_settings(json!({
|
||||||
"non_separator_tokens": ["#", "&"],
|
"nonSeparatorTokens": ["#", "&"],
|
||||||
"separator_tokens": ["&sep", "<br/>"],
|
"separatorTokens": ["&sep", "<br/>"],
|
||||||
"dictionary": ["J.R.R.", "J. R. R."],
|
"dictionary": ["J.R.R.", "J. R. R."],
|
||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, _) = index.settings().await;
|
let (response, _) = index.settings().await;
|
||||||
assert_eq!(response["non_separator_tokens"], json!(["#", "&"]));
|
snapshot!(json_string!(response["nonSeparatorTokens"]), @r###"
|
||||||
assert_eq!(response["separator_tokens"], json!(["&sep", "<br/>"]));
|
[
|
||||||
assert_eq!(response["dictionary"], json!(["J.R.R.", "J. R. R."]));
|
"#",
|
||||||
|
"&"
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
snapshot!(json_string!(response["separatorTokens"]), @r###"
|
||||||
|
[
|
||||||
|
"&sep",
|
||||||
|
"<br/>"
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
snapshot!(json_string!(response["dictionary"]), @r###"
|
||||||
|
[
|
||||||
|
"J. R. R.",
|
||||||
|
"J.R.R."
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
|
||||||
index
|
index
|
||||||
.update_settings(json!({
|
.update_settings(json!({
|
||||||
"non_separator_tokens": null,
|
"nonSeparatorTokens": null,
|
||||||
"separator_tokens": null,
|
"separatorTokens": null,
|
||||||
"dictionary": null,
|
"dictionary": null,
|
||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
@ -32,7 +48,7 @@ async fn set_and_reset() {
|
|||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, _) = index.settings().await;
|
let (response, _) = index.settings().await;
|
||||||
assert_eq!(response["non_separator_tokens"], json!(null));
|
snapshot!(json_string!(response["nonSeparatorTokens"]), @"[]");
|
||||||
assert_eq!(response["separator_tokens"], json!(null));
|
snapshot!(json_string!(response["separatorTokens"]), @"[]");
|
||||||
assert_eq!(response["dictionary"], json!(null));
|
snapshot!(json_string!(response["dictionary"]), @"[]");
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::mem::size_of;
|
use std::mem::size_of;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@ -60,6 +60,9 @@ pub mod main_key {
|
|||||||
pub const USER_DEFINED_SEARCHABLE_FIELDS_KEY: &str = "user-defined-searchable-fields";
|
pub const USER_DEFINED_SEARCHABLE_FIELDS_KEY: &str = "user-defined-searchable-fields";
|
||||||
pub const SOFT_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "soft-external-documents-ids";
|
pub const SOFT_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "soft-external-documents-ids";
|
||||||
pub const STOP_WORDS_KEY: &str = "stop-words";
|
pub const STOP_WORDS_KEY: &str = "stop-words";
|
||||||
|
pub const NON_SEPARATOR_TOKENS_KEY: &str = "non-separator-tokens";
|
||||||
|
pub const SEPARATOR_TOKENS_KEY: &str = "separator-tokens";
|
||||||
|
pub const DICTIONARY_KEY: &str = "dictionary";
|
||||||
pub const STRING_FACETED_DOCUMENTS_IDS_PREFIX: &str = "string-faceted-documents-ids";
|
pub const STRING_FACETED_DOCUMENTS_IDS_PREFIX: &str = "string-faceted-documents-ids";
|
||||||
pub const SYNONYMS_KEY: &str = "synonyms";
|
pub const SYNONYMS_KEY: &str = "synonyms";
|
||||||
pub const WORDS_FST_KEY: &str = "words-fst";
|
pub const WORDS_FST_KEY: &str = "words-fst";
|
||||||
@ -1048,6 +1051,90 @@ impl Index {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* non separator tokens */
|
||||||
|
|
||||||
|
pub(crate) fn put_non_separator_tokens(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn,
|
||||||
|
set: &BTreeSet<String>,
|
||||||
|
) -> heed::Result<()> {
|
||||||
|
self.main.put::<_, Str, SerdeBincode<_>>(wtxn, main_key::NON_SEPARATOR_TOKENS_KEY, set)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn delete_non_separator_tokens(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
|
self.main.delete::<_, Str>(wtxn, main_key::NON_SEPARATOR_TOKENS_KEY)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn non_separator_tokens<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<BTreeSet<String>>> {
|
||||||
|
Ok(self.main.get::<_, Str, SerdeBincode<BTreeSet<String>>>(
|
||||||
|
rtxn,
|
||||||
|
main_key::NON_SEPARATOR_TOKENS_KEY,
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/* separator tokens */
|
||||||
|
|
||||||
|
pub(crate) fn put_separator_tokens(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn,
|
||||||
|
set: &BTreeSet<String>,
|
||||||
|
) -> heed::Result<()> {
|
||||||
|
self.main.put::<_, Str, SerdeBincode<_>>(wtxn, main_key::SEPARATOR_TOKENS_KEY, set)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn delete_separator_tokens(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
|
self.main.delete::<_, Str>(wtxn, main_key::SEPARATOR_TOKENS_KEY)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn separator_tokens<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<BTreeSet<String>>> {
|
||||||
|
Ok(self
|
||||||
|
.main
|
||||||
|
.get::<_, Str, SerdeBincode<BTreeSet<String>>>(rtxn, main_key::SEPARATOR_TOKENS_KEY)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/* separators easing method */
|
||||||
|
|
||||||
|
pub(crate) fn allowed_separators<'t>(
|
||||||
|
&self,
|
||||||
|
rtxn: &'t RoTxn,
|
||||||
|
) -> Result<Option<BTreeSet<String>>> {
|
||||||
|
let default_separators =
|
||||||
|
charabia::separators::DEFAULT_SEPARATORS.iter().map(|s| s.to_string());
|
||||||
|
let mut separators: Option<BTreeSet<_>> = None;
|
||||||
|
if let Some(mut separator_tokens) = self.separator_tokens(rtxn)? {
|
||||||
|
separator_tokens.extend(default_separators.clone());
|
||||||
|
separators = Some(separator_tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(non_separator_tokens) = self.non_separator_tokens(rtxn)? {
|
||||||
|
separators = separators
|
||||||
|
.or_else(|| Some(default_separators.collect()))
|
||||||
|
.map(|separators| &separators - &non_separator_tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(separators)
|
||||||
|
}
|
||||||
|
|
||||||
|
/* dictionary */
|
||||||
|
|
||||||
|
pub(crate) fn put_dictionary(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn,
|
||||||
|
set: &BTreeSet<String>,
|
||||||
|
) -> heed::Result<()> {
|
||||||
|
self.main.put::<_, Str, SerdeBincode<_>>(wtxn, main_key::DICTIONARY_KEY, set)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn delete_dictionary(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
|
self.main.delete::<_, Str>(wtxn, main_key::DICTIONARY_KEY)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dictionary<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<BTreeSet<String>>> {
|
||||||
|
Ok(self
|
||||||
|
.main
|
||||||
|
.get::<_, Str, SerdeBincode<BTreeSet<String>>>(rtxn, main_key::DICTIONARY_KEY)?)
|
||||||
|
}
|
||||||
|
|
||||||
/* synonyms */
|
/* synonyms */
|
||||||
|
|
||||||
pub(crate) fn put_synonyms(
|
pub(crate) fn put_synonyms(
|
||||||
|
@ -112,6 +112,9 @@ pub struct Settings<'a, 't, 'u, 'i> {
|
|||||||
sortable_fields: Setting<HashSet<String>>,
|
sortable_fields: Setting<HashSet<String>>,
|
||||||
criteria: Setting<Vec<Criterion>>,
|
criteria: Setting<Vec<Criterion>>,
|
||||||
stop_words: Setting<BTreeSet<String>>,
|
stop_words: Setting<BTreeSet<String>>,
|
||||||
|
non_separator_tokens: Setting<BTreeSet<String>>,
|
||||||
|
separator_tokens: Setting<BTreeSet<String>>,
|
||||||
|
dictionary: Setting<BTreeSet<String>>,
|
||||||
distinct_field: Setting<String>,
|
distinct_field: Setting<String>,
|
||||||
synonyms: Setting<HashMap<String, Vec<String>>>,
|
synonyms: Setting<HashMap<String, Vec<String>>>,
|
||||||
primary_key: Setting<String>,
|
primary_key: Setting<String>,
|
||||||
@ -141,6 +144,9 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
sortable_fields: Setting::NotSet,
|
sortable_fields: Setting::NotSet,
|
||||||
criteria: Setting::NotSet,
|
criteria: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
|
non_separator_tokens: Setting::NotSet,
|
||||||
|
separator_tokens: Setting::NotSet,
|
||||||
|
dictionary: Setting::NotSet,
|
||||||
distinct_field: Setting::NotSet,
|
distinct_field: Setting::NotSet,
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
primary_key: Setting::NotSet,
|
primary_key: Setting::NotSet,
|
||||||
@ -205,6 +211,39 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
if stop_words.is_empty() { Setting::Reset } else { Setting::Set(stop_words) }
|
if stop_words.is_empty() { Setting::Reset } else { Setting::Set(stop_words) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn reset_non_separator_tokens(&mut self) {
|
||||||
|
self.non_separator_tokens = Setting::Reset;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_non_separator_tokens(&mut self, non_separator_tokens: BTreeSet<String>) {
|
||||||
|
self.non_separator_tokens = if non_separator_tokens.is_empty() {
|
||||||
|
Setting::Reset
|
||||||
|
} else {
|
||||||
|
Setting::Set(non_separator_tokens)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reset_separator_tokens(&mut self) {
|
||||||
|
self.separator_tokens = Setting::Reset;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_separator_tokens(&mut self, separator_tokens: BTreeSet<String>) {
|
||||||
|
self.separator_tokens = if separator_tokens.is_empty() {
|
||||||
|
Setting::Reset
|
||||||
|
} else {
|
||||||
|
Setting::Set(separator_tokens)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reset_dictionary(&mut self) {
|
||||||
|
self.dictionary = Setting::Reset;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_dictionary(&mut self, dictionary: BTreeSet<String>) {
|
||||||
|
self.dictionary =
|
||||||
|
if dictionary.is_empty() { Setting::Reset } else { Setting::Set(dictionary) }
|
||||||
|
}
|
||||||
|
|
||||||
pub fn reset_distinct_field(&mut self) {
|
pub fn reset_distinct_field(&mut self) {
|
||||||
self.distinct_field = Setting::Reset;
|
self.distinct_field = Setting::Reset;
|
||||||
}
|
}
|
||||||
@ -451,6 +490,60 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn update_non_separator_tokens(&mut self) -> Result<bool> {
|
||||||
|
match self.non_separator_tokens {
|
||||||
|
Setting::Set(ref non_separator_tokens) => {
|
||||||
|
let current = self.index.non_separator_tokens(self.wtxn)?;
|
||||||
|
|
||||||
|
// Does the new list differ from the previous one?
|
||||||
|
if current.map_or(true, |current| ¤t != non_separator_tokens) {
|
||||||
|
self.index.put_non_separator_tokens(self.wtxn, &non_separator_tokens)?;
|
||||||
|
Ok(true)
|
||||||
|
} else {
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Setting::Reset => Ok(self.index.delete_non_separator_tokens(self.wtxn)?),
|
||||||
|
Setting::NotSet => Ok(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_separator_tokens(&mut self) -> Result<bool> {
|
||||||
|
match self.separator_tokens {
|
||||||
|
Setting::Set(ref separator_tokens) => {
|
||||||
|
let current = self.index.separator_tokens(self.wtxn)?;
|
||||||
|
|
||||||
|
// Does the new list differ from the previous one?
|
||||||
|
if current.map_or(true, |current| ¤t != separator_tokens) {
|
||||||
|
self.index.put_separator_tokens(self.wtxn, &separator_tokens)?;
|
||||||
|
Ok(true)
|
||||||
|
} else {
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Setting::Reset => Ok(self.index.delete_separator_tokens(self.wtxn)?),
|
||||||
|
Setting::NotSet => Ok(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_dictionary(&mut self) -> Result<bool> {
|
||||||
|
match self.dictionary {
|
||||||
|
Setting::Set(ref dictionary) => {
|
||||||
|
let current = self.index.dictionary(self.wtxn)?;
|
||||||
|
|
||||||
|
// Does the new list differ from the previous one?
|
||||||
|
if current.map_or(true, |current| ¤t != dictionary) {
|
||||||
|
self.index.put_dictionary(self.wtxn, &dictionary)?;
|
||||||
|
Ok(true)
|
||||||
|
} else {
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Setting::Reset => Ok(self.index.delete_dictionary(self.wtxn)?),
|
||||||
|
Setting::NotSet => Ok(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn update_synonyms(&mut self) -> Result<bool> {
|
fn update_synonyms(&mut self) -> Result<bool> {
|
||||||
match self.synonyms {
|
match self.synonyms {
|
||||||
Setting::Set(ref synonyms) => {
|
Setting::Set(ref synonyms) => {
|
||||||
@ -756,11 +849,17 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
let faceted_updated = old_faceted_fields != new_faceted_fields;
|
let faceted_updated = old_faceted_fields != new_faceted_fields;
|
||||||
|
|
||||||
let stop_words_updated = self.update_stop_words()?;
|
let stop_words_updated = self.update_stop_words()?;
|
||||||
|
let non_separator_tokens_updated = self.update_non_separator_tokens()?;
|
||||||
|
let separator_tokens_updated = self.update_separator_tokens()?;
|
||||||
|
let dictionary_updated = self.update_dictionary()?;
|
||||||
let synonyms_updated = self.update_synonyms()?;
|
let synonyms_updated = self.update_synonyms()?;
|
||||||
let searchable_updated = self.update_searchable()?;
|
let searchable_updated = self.update_searchable()?;
|
||||||
let exact_attributes_updated = self.update_exact_attributes()?;
|
let exact_attributes_updated = self.update_exact_attributes()?;
|
||||||
|
|
||||||
if stop_words_updated
|
if stop_words_updated
|
||||||
|
|| non_separator_tokens_updated
|
||||||
|
|| separator_tokens_updated
|
||||||
|
|| dictionary_updated
|
||||||
|| faceted_updated
|
|| faceted_updated
|
||||||
|| synonyms_updated
|
|| synonyms_updated
|
||||||
|| searchable_updated
|
|| searchable_updated
|
||||||
@ -1539,6 +1638,9 @@ mod tests {
|
|||||||
sortable_fields,
|
sortable_fields,
|
||||||
criteria,
|
criteria,
|
||||||
stop_words,
|
stop_words,
|
||||||
|
non_separator_tokens,
|
||||||
|
separator_tokens,
|
||||||
|
dictionary,
|
||||||
distinct_field,
|
distinct_field,
|
||||||
synonyms,
|
synonyms,
|
||||||
primary_key,
|
primary_key,
|
||||||
@ -1557,6 +1659,9 @@ mod tests {
|
|||||||
assert!(matches!(sortable_fields, Setting::NotSet));
|
assert!(matches!(sortable_fields, Setting::NotSet));
|
||||||
assert!(matches!(criteria, Setting::NotSet));
|
assert!(matches!(criteria, Setting::NotSet));
|
||||||
assert!(matches!(stop_words, Setting::NotSet));
|
assert!(matches!(stop_words, Setting::NotSet));
|
||||||
|
assert!(matches!(non_separator_tokens, Setting::NotSet));
|
||||||
|
assert!(matches!(separator_tokens, Setting::NotSet));
|
||||||
|
assert!(matches!(dictionary, Setting::NotSet));
|
||||||
assert!(matches!(distinct_field, Setting::NotSet));
|
assert!(matches!(distinct_field, Setting::NotSet));
|
||||||
assert!(matches!(synonyms, Setting::NotSet));
|
assert!(matches!(synonyms, Setting::NotSet));
|
||||||
assert!(matches!(primary_key, Setting::NotSet));
|
assert!(matches!(primary_key, Setting::NotSet));
|
||||||
|
Loading…
Reference in New Issue
Block a user