mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-01-18 08:48:32 +08:00
Merge pull request #482 from meilisearch/review-settings-endpoint
Review settings endpoint
This commit is contained in:
commit
06ace88901
@ -1059,12 +1059,12 @@ mod tests {
|
|||||||
let data = r#"
|
let data = r#"
|
||||||
{
|
{
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)"
|
"dsc(release_date)"
|
||||||
],
|
],
|
||||||
"searchableAttributes": ["name", "release_date"],
|
"searchableAttributes": ["name", "release_date"],
|
||||||
|
@ -5,6 +5,10 @@ use std::iter::IntoIterator;
|
|||||||
use serde::{Deserialize, Deserializer, Serialize};
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
use self::RankingRule::*;
|
||||||
|
|
||||||
|
pub const DEFAULT_RANKING_RULES: [RankingRule; 6] = [Typo, Words, Proximity, Attribute, WordsPosition, Exactness];
|
||||||
|
|
||||||
static RANKING_RULE_REGEX: Lazy<regex::Regex> = Lazy::new(|| {
|
static RANKING_RULE_REGEX: Lazy<regex::Regex> = Lazy::new(|| {
|
||||||
let regex = regex::Regex::new(r"(asc|dsc)\(([a-zA-Z0-9-_]*)\)").unwrap();
|
let regex = regex::Regex::new(r"(asc|dsc)\(([a-zA-Z0-9-_]*)\)").unwrap();
|
||||||
regex
|
regex
|
||||||
@ -16,7 +20,7 @@ pub struct Settings {
|
|||||||
#[serde(default, deserialize_with = "deserialize_some")]
|
#[serde(default, deserialize_with = "deserialize_some")]
|
||||||
pub ranking_rules: Option<Option<Vec<String>>>,
|
pub ranking_rules: Option<Option<Vec<String>>>,
|
||||||
#[serde(default, deserialize_with = "deserialize_some")]
|
#[serde(default, deserialize_with = "deserialize_some")]
|
||||||
pub ranking_distinct: Option<Option<String>>,
|
pub distinct_attribute: Option<Option<String>>,
|
||||||
#[serde(default, deserialize_with = "deserialize_some")]
|
#[serde(default, deserialize_with = "deserialize_some")]
|
||||||
pub searchable_attributes: Option<Option<Vec<String>>>,
|
pub searchable_attributes: Option<Option<Vec<String>>>,
|
||||||
#[serde(default, deserialize_with = "deserialize_some")]
|
#[serde(default, deserialize_with = "deserialize_some")]
|
||||||
@ -26,7 +30,7 @@ pub struct Settings {
|
|||||||
#[serde(default, deserialize_with = "deserialize_some")]
|
#[serde(default, deserialize_with = "deserialize_some")]
|
||||||
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
|
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
|
||||||
#[serde(default, deserialize_with = "deserialize_some")]
|
#[serde(default, deserialize_with = "deserialize_some")]
|
||||||
pub index_new_fields: Option<Option<bool>>,
|
pub accept_new_fields: Option<Option<bool>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Any value that is present is considered Some value, including null.
|
// Any value that is present is considered Some value, including null.
|
||||||
@ -49,13 +53,13 @@ impl Settings {
|
|||||||
|
|
||||||
Ok(SettingsUpdate {
|
Ok(SettingsUpdate {
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
ranking_distinct: settings.ranking_distinct.into(),
|
distinct_attribute: settings.distinct_attribute.into(),
|
||||||
identifier: UpdateState::Nothing,
|
identifier: UpdateState::Nothing,
|
||||||
searchable_attributes: settings.searchable_attributes.into(),
|
searchable_attributes: settings.searchable_attributes.into(),
|
||||||
displayed_attributes: settings.displayed_attributes.into(),
|
displayed_attributes: settings.displayed_attributes.into(),
|
||||||
stop_words: settings.stop_words.into(),
|
stop_words: settings.stop_words.into(),
|
||||||
synonyms: settings.synonyms.into(),
|
synonyms: settings.synonyms.into(),
|
||||||
index_new_fields: settings.index_new_fields.into(),
|
accept_new_fields: settings.accept_new_fields.into(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -98,17 +102,17 @@ pub enum RankingRule {
|
|||||||
Dsc(String),
|
Dsc(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToString for RankingRule {
|
impl std::fmt::Display for RankingRule {
|
||||||
fn to_string(&self) -> String {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
RankingRule::Typo => "_typo".to_string(),
|
RankingRule::Typo => f.write_str("typo"),
|
||||||
RankingRule::Words => "_words".to_string(),
|
RankingRule::Words => f.write_str("words"),
|
||||||
RankingRule::Proximity => "_proximity".to_string(),
|
RankingRule::Proximity => f.write_str("proximity"),
|
||||||
RankingRule::Attribute => "_attribute".to_string(),
|
RankingRule::Attribute => f.write_str("attribute"),
|
||||||
RankingRule::WordsPosition => "_words_position".to_string(),
|
RankingRule::WordsPosition => f.write_str("wordsPosition"),
|
||||||
RankingRule::Exactness => "_exactness".to_string(),
|
RankingRule::Exactness => f.write_str("exactness"),
|
||||||
RankingRule::Asc(field) => format!("asc({})", field),
|
RankingRule::Asc(field) => write!(f, "asc({})", field),
|
||||||
RankingRule::Dsc(field) => format!("dsc({})", field),
|
RankingRule::Dsc(field) => write!(f, "dsc({})", field),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -118,12 +122,12 @@ impl FromStr for RankingRule {
|
|||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
let rule = match s {
|
let rule = match s {
|
||||||
"_typo" => RankingRule::Typo,
|
"typo" => RankingRule::Typo,
|
||||||
"_words" => RankingRule::Words,
|
"words" => RankingRule::Words,
|
||||||
"_proximity" => RankingRule::Proximity,
|
"proximity" => RankingRule::Proximity,
|
||||||
"_attribute" => RankingRule::Attribute,
|
"attribute" => RankingRule::Attribute,
|
||||||
"_words_position" => RankingRule::WordsPosition,
|
"wordsPosition" => RankingRule::WordsPosition,
|
||||||
"_exactness" => RankingRule::Exactness,
|
"exactness" => RankingRule::Exactness,
|
||||||
_ => {
|
_ => {
|
||||||
let captures = RANKING_RULE_REGEX.captures(s).ok_or(RankingRuleConversionError)?;
|
let captures = RANKING_RULE_REGEX.captures(s).ok_or(RankingRuleConversionError)?;
|
||||||
match (captures.get(1).map(|m| m.as_str()), captures.get(2)) {
|
match (captures.get(1).map(|m| m.as_str()), captures.get(2)) {
|
||||||
@ -155,26 +159,26 @@ impl RankingRule {
|
|||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct SettingsUpdate {
|
pub struct SettingsUpdate {
|
||||||
pub ranking_rules: UpdateState<Vec<RankingRule>>,
|
pub ranking_rules: UpdateState<Vec<RankingRule>>,
|
||||||
pub ranking_distinct: UpdateState<String>,
|
pub distinct_attribute: UpdateState<String>,
|
||||||
pub identifier: UpdateState<String>,
|
pub identifier: UpdateState<String>,
|
||||||
pub searchable_attributes: UpdateState<Vec<String>>,
|
pub searchable_attributes: UpdateState<Vec<String>>,
|
||||||
pub displayed_attributes: UpdateState<HashSet<String>>,
|
pub displayed_attributes: UpdateState<HashSet<String>>,
|
||||||
pub stop_words: UpdateState<BTreeSet<String>>,
|
pub stop_words: UpdateState<BTreeSet<String>>,
|
||||||
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
|
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
|
||||||
pub index_new_fields: UpdateState<bool>,
|
pub accept_new_fields: UpdateState<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for SettingsUpdate {
|
impl Default for SettingsUpdate {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
ranking_rules: UpdateState::Nothing,
|
ranking_rules: UpdateState::Nothing,
|
||||||
ranking_distinct: UpdateState::Nothing,
|
distinct_attribute: UpdateState::Nothing,
|
||||||
identifier: UpdateState::Nothing,
|
identifier: UpdateState::Nothing,
|
||||||
searchable_attributes: UpdateState::Nothing,
|
searchable_attributes: UpdateState::Nothing,
|
||||||
displayed_attributes: UpdateState::Nothing,
|
displayed_attributes: UpdateState::Nothing,
|
||||||
stop_words: UpdateState::Nothing,
|
stop_words: UpdateState::Nothing,
|
||||||
synonyms: UpdateState::Nothing,
|
synonyms: UpdateState::Nothing,
|
||||||
index_new_fields: UpdateState::Nothing,
|
accept_new_fields: UpdateState::Nothing,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,7 @@ use crate::settings::RankingRule;
|
|||||||
|
|
||||||
const CREATED_AT_KEY: &str = "created-at";
|
const CREATED_AT_KEY: &str = "created-at";
|
||||||
const RANKING_RULES_KEY: &str = "ranking-rules";
|
const RANKING_RULES_KEY: &str = "ranking-rules";
|
||||||
const RANKING_DISTINCT_KEY: &str = "ranking-distinct";
|
const DISTINCT_ATTRIBUTE_KEY: &str = "distinct-attribute";
|
||||||
const STOP_WORDS_KEY: &str = "stop-words";
|
const STOP_WORDS_KEY: &str = "stop-words";
|
||||||
const SYNONYMS_KEY: &str = "synonyms";
|
const SYNONYMS_KEY: &str = "synonyms";
|
||||||
const CUSTOMS_KEY: &str = "customs";
|
const CUSTOMS_KEY: &str = "customs";
|
||||||
@ -200,19 +200,19 @@ impl Main {
|
|||||||
self.main.delete::<_, Str>(writer, RANKING_RULES_KEY)
|
self.main.delete::<_, Str>(writer, RANKING_RULES_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ranking_distinct(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<String>> {
|
pub fn distinct_attribute(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<String>> {
|
||||||
if let Some(value) = self.main.get::<_, Str, Str>(reader, RANKING_DISTINCT_KEY)? {
|
if let Some(value) = self.main.get::<_, Str, Str>(reader, DISTINCT_ATTRIBUTE_KEY)? {
|
||||||
return Ok(Some(value.to_owned()))
|
return Ok(Some(value.to_owned()))
|
||||||
}
|
}
|
||||||
return Ok(None)
|
return Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn put_ranking_distinct(self, writer: &mut heed::RwTxn<MainT>, value: &str) -> ZResult<()> {
|
pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>, value: &str) -> ZResult<()> {
|
||||||
self.main.put::<_, Str, Str>(writer, RANKING_DISTINCT_KEY, value)
|
self.main.put::<_, Str, Str>(writer, DISTINCT_ATTRIBUTE_KEY, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_ranking_distinct(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
|
pub fn delete_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
|
||||||
self.main.delete::<_, Str>(writer, RANKING_DISTINCT_KEY)
|
self.main.delete::<_, Str>(writer, DISTINCT_ATTRIBUTE_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn put_customs(self, writer: &mut heed::RwTxn<MainT>, customs: &[u8]) -> ZResult<()> {
|
pub fn put_customs(self, writer: &mut heed::RwTxn<MainT>, customs: &[u8]) -> ZResult<()> {
|
||||||
|
@ -58,22 +58,22 @@ pub fn apply_settings_update(
|
|||||||
UpdateState::Nothing => (),
|
UpdateState::Nothing => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match settings.ranking_distinct {
|
match settings.distinct_attribute {
|
||||||
UpdateState::Update(v) => {
|
UpdateState::Update(v) => {
|
||||||
index.main.put_ranking_distinct(writer, &v)?;
|
index.main.put_distinct_attribute(writer, &v)?;
|
||||||
},
|
},
|
||||||
UpdateState::Clear => {
|
UpdateState::Clear => {
|
||||||
index.main.delete_ranking_distinct(writer)?;
|
index.main.delete_distinct_attribute(writer)?;
|
||||||
},
|
},
|
||||||
UpdateState::Nothing => (),
|
UpdateState::Nothing => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match settings.index_new_fields {
|
match settings.accept_new_fields {
|
||||||
UpdateState::Update(v) => {
|
UpdateState::Update(v) => {
|
||||||
schema.set_index_new_fields(v);
|
schema.set_accept_new_fields(v);
|
||||||
},
|
},
|
||||||
UpdateState::Clear => {
|
UpdateState::Clear => {
|
||||||
schema.set_index_new_fields(true);
|
schema.set_accept_new_fields(true);
|
||||||
},
|
},
|
||||||
UpdateState::Nothing => (),
|
UpdateState::Nothing => (),
|
||||||
}
|
}
|
||||||
@ -84,8 +84,7 @@ pub fn apply_settings_update(
|
|||||||
must_reindex = true;
|
must_reindex = true;
|
||||||
},
|
},
|
||||||
UpdateState::Clear => {
|
UpdateState::Clear => {
|
||||||
let clear: Vec<&str> = Vec::new();
|
schema.set_all_fields_as_indexed();
|
||||||
schema.update_indexed(clear)?;
|
|
||||||
must_reindex = true;
|
must_reindex = true;
|
||||||
},
|
},
|
||||||
UpdateState::Nothing => (),
|
UpdateState::Nothing => (),
|
||||||
@ -93,8 +92,7 @@ pub fn apply_settings_update(
|
|||||||
match settings.displayed_attributes.clone() {
|
match settings.displayed_attributes.clone() {
|
||||||
UpdateState::Update(v) => schema.update_displayed(v)?,
|
UpdateState::Update(v) => schema.update_displayed(v)?,
|
||||||
UpdateState::Clear => {
|
UpdateState::Clear => {
|
||||||
let clear: Vec<&str> = Vec::new();
|
schema.set_all_fields_as_displayed();
|
||||||
schema.update_displayed(clear)?;
|
|
||||||
},
|
},
|
||||||
UpdateState::Nothing => (),
|
UpdateState::Nothing => (),
|
||||||
}
|
}
|
||||||
|
@ -6,8 +6,8 @@ use chrono::{DateTime, Utc};
|
|||||||
use heed::types::{SerdeBincode, Str};
|
use heed::types::{SerdeBincode, Str};
|
||||||
use log::error;
|
use log::error;
|
||||||
use meilisearch_core::{Database, Error as MError, MResult, MainT, UpdateT};
|
use meilisearch_core::{Database, Error as MError, MResult, MainT, UpdateT};
|
||||||
use sysinfo::Pid;
|
|
||||||
use sha2::Digest;
|
use sha2::Digest;
|
||||||
|
use sysinfo::Pid;
|
||||||
|
|
||||||
use crate::option::Opt;
|
use crate::option::Opt;
|
||||||
use crate::routes::index::index_update_callback;
|
use crate::routes::index::index_update_callback;
|
||||||
@ -117,9 +117,7 @@ impl DataInner {
|
|||||||
// convert attributes to their names
|
// convert attributes to their names
|
||||||
let frequency: HashMap<_, _> = fields_frequency
|
let frequency: HashMap<_, _> = fields_frequency
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(a, c)| {
|
.filter_map(|(a, c)| schema.name(a).map(|name| (name.to_string(), c)))
|
||||||
schema.name(a).map(|name| (name.to_string(), c))
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
index
|
index
|
||||||
|
@ -6,7 +6,7 @@ use tide::Request;
|
|||||||
pub enum ACL {
|
pub enum ACL {
|
||||||
Admin,
|
Admin,
|
||||||
Private,
|
Private,
|
||||||
Public
|
Public,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait RequestExt {
|
pub trait RequestExt {
|
||||||
@ -23,31 +23,33 @@ impl RequestExt for Request<Data> {
|
|||||||
match acl {
|
match acl {
|
||||||
ACL::Admin => {
|
ACL::Admin => {
|
||||||
if user_api_key == self.state().api_keys.master.as_deref() {
|
if user_api_key == self.state().api_keys.master.as_deref() {
|
||||||
return Ok(())
|
return Ok(());
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
ACL::Private => {
|
ACL::Private => {
|
||||||
if user_api_key == self.state().api_keys.master.as_deref() {
|
if user_api_key == self.state().api_keys.master.as_deref() {
|
||||||
return Ok(())
|
return Ok(());
|
||||||
}
|
}
|
||||||
if user_api_key == self.state().api_keys.private.as_deref() {
|
if user_api_key == self.state().api_keys.private.as_deref() {
|
||||||
return Ok(())
|
return Ok(());
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
ACL::Public => {
|
ACL::Public => {
|
||||||
if user_api_key == self.state().api_keys.master.as_deref() {
|
if user_api_key == self.state().api_keys.master.as_deref() {
|
||||||
return Ok(())
|
return Ok(());
|
||||||
}
|
}
|
||||||
if user_api_key == self.state().api_keys.private.as_deref() {
|
if user_api_key == self.state().api_keys.private.as_deref() {
|
||||||
return Ok(())
|
return Ok(());
|
||||||
}
|
}
|
||||||
if user_api_key == self.state().api_keys.public.as_deref() {
|
if user_api_key == self.state().api_keys.public.as_deref() {
|
||||||
return Ok(())
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(ResponseError::InvalidToken(user_api_key.unwrap_or("Need a token").to_owned()))
|
Err(ResponseError::InvalidToken(
|
||||||
|
user_api_key.unwrap_or("Need a token").to_owned(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn url_param(&self, name: &str) -> SResult<String> {
|
fn url_param(&self, name: &str) -> SResult<String> {
|
||||||
|
@ -18,19 +18,21 @@ mod analytics;
|
|||||||
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
||||||
|
|
||||||
pub fn main() -> Result<(), MainError> {
|
pub fn main() -> Result<(), MainError> {
|
||||||
|
|
||||||
let opt = Opt::from_args();
|
let opt = Opt::from_args();
|
||||||
|
|
||||||
match opt.env.as_ref() {
|
match opt.env.as_ref() {
|
||||||
"production" => {
|
"production" => {
|
||||||
if opt.master_key.is_none() {
|
if opt.master_key.is_none() {
|
||||||
return Err("In production mode, the environment variable MEILI_MASTER_KEY is mandatory".into());
|
return Err(
|
||||||
|
"In production mode, the environment variable MEILI_MASTER_KEY is mandatory"
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
env_logger::init();
|
env_logger::init();
|
||||||
},
|
}
|
||||||
"development" => {
|
"development" => {
|
||||||
env_logger::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
env_logger::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
||||||
},
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,7 +60,6 @@ pub fn main() -> Result<(), MainError> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn print_launch_resume(opt: &Opt, data: &Data) {
|
pub fn print_launch_resume(opt: &Opt, data: &Data) {
|
||||||
let ascii_name = r#"
|
let ascii_name = r#"
|
||||||
888b d888 d8b 888 d8b .d8888b. 888
|
888b d888 d8b 888 d8b .d8888b. 888
|
||||||
@ -77,8 +78,14 @@ pub fn print_launch_resume(opt: &Opt, data: &Data) {
|
|||||||
info!("Start server on: {:?}", opt.http_addr);
|
info!("Start server on: {:?}", opt.http_addr);
|
||||||
info!("Environment: {:?}", opt.env);
|
info!("Environment: {:?}", opt.env);
|
||||||
info!("Commit SHA: {:?}", env!("VERGEN_SHA").to_string());
|
info!("Commit SHA: {:?}", env!("VERGEN_SHA").to_string());
|
||||||
info!("Build date: {:?}", env!("VERGEN_BUILD_TIMESTAMP").to_string());
|
info!(
|
||||||
info!("Package version: {:?}", env!("CARGO_PKG_VERSION").to_string());
|
"Build date: {:?}",
|
||||||
|
env!("VERGEN_BUILD_TIMESTAMP").to_string()
|
||||||
|
);
|
||||||
|
info!(
|
||||||
|
"Package version: {:?}",
|
||||||
|
env!("CARGO_PKG_VERSION").to_string()
|
||||||
|
);
|
||||||
|
|
||||||
if let Some(master_key) = &data.api_keys.master {
|
if let Some(master_key) = &data.api_keys.master {
|
||||||
info!("Master Key: {:?}", master_key);
|
info!("Master Key: {:?}", master_key);
|
||||||
|
@ -145,7 +145,7 @@ async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) ->
|
|||||||
None => return Err(ResponseError::bad_request("Could not infer a schema")),
|
None => return Err(ResponseError::bad_request("Could not infer a schema")),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let settings_update = SettingsUpdate{
|
let settings_update = SettingsUpdate {
|
||||||
identifier: UpdateState::Update(id),
|
identifier: UpdateState::Update(id),
|
||||||
..SettingsUpdate::default()
|
..SettingsUpdate::default()
|
||||||
};
|
};
|
||||||
|
@ -42,7 +42,7 @@ pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
let identifier = match index.main.schema(&reader) {
|
let identifier = match index.main.schema(&reader) {
|
||||||
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
||||||
_ => None
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let index_response = IndexResponse {
|
let index_response = IndexResponse {
|
||||||
@ -89,7 +89,7 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
let identifier = match index.main.schema(&reader) {
|
let identifier = match index.main.schema(&reader) {
|
||||||
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
||||||
_ => None
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let response_body = IndexResponse {
|
let response_body = IndexResponse {
|
||||||
@ -97,7 +97,7 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
uid,
|
uid,
|
||||||
created_at,
|
created_at,
|
||||||
updated_at,
|
updated_at,
|
||||||
identifier
|
identifier,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(tide::Response::new(200).body_json(&response_body)?)
|
Ok(tide::Response::new(200).body_json(&response_body)?)
|
||||||
@ -220,9 +220,13 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
if let Some(identifier) = body.identifier {
|
if let Some(identifier) = body.identifier {
|
||||||
if let Ok(Some(_)) = index.main.schema(&writer) {
|
if let Ok(Some(_)) = index.main.schema(&writer) {
|
||||||
return Err(ResponseError::bad_request("The index identifier cannot be updated"));
|
return Err(ResponseError::bad_request(
|
||||||
|
"The index identifier cannot be updated",
|
||||||
|
));
|
||||||
}
|
}
|
||||||
index.main.put_schema(&mut writer, &Schema::with_identifier(&identifier))?;
|
index
|
||||||
|
.main
|
||||||
|
.put_schema(&mut writer, &Schema::with_identifier(&identifier))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
index.main.put_updated_at(&mut writer)?;
|
index.main.put_updated_at(&mut writer)?;
|
||||||
@ -235,7 +239,7 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
let identifier = match index.main.schema(&reader) {
|
let identifier = match index.main.schema(&reader) {
|
||||||
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
||||||
_ => None
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let response_body = UpdateIndexResponse {
|
let response_body = UpdateIndexResponse {
|
||||||
@ -243,7 +247,7 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
|
|||||||
uid: index_uid,
|
uid: index_uid,
|
||||||
created_at,
|
created_at,
|
||||||
updated_at,
|
updated_at,
|
||||||
identifier
|
identifier,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(tide::Response::new(200).body_json(&response_body)?)
|
Ok(tide::Response::new(200).body_json(&response_body)?)
|
||||||
|
@ -1,18 +1,17 @@
|
|||||||
use tide::{Request, Response};
|
|
||||||
use serde_json::json;
|
|
||||||
use crate::error::SResult;
|
use crate::error::SResult;
|
||||||
use crate::helpers::tide::RequestExt;
|
use crate::helpers::tide::RequestExt;
|
||||||
use crate::helpers::tide::ACL::*;
|
use crate::helpers::tide::ACL::*;
|
||||||
use crate::Data;
|
use crate::Data;
|
||||||
|
use serde_json::json;
|
||||||
|
use tide::{Request, Response};
|
||||||
|
|
||||||
pub async fn list(ctx: Request<Data>) -> SResult<Response> {
|
pub async fn list(ctx: Request<Data>) -> SResult<Response> {
|
||||||
ctx.is_allowed(Admin)?;
|
ctx.is_allowed(Admin)?;
|
||||||
|
|
||||||
let keys = &ctx.state().api_keys;
|
let keys = &ctx.state().api_keys;
|
||||||
|
|
||||||
Ok(tide::Response::new(200)
|
Ok(tide::Response::new(200).body_json(&json!({
|
||||||
.body_json(&json!({
|
"private": keys.private,
|
||||||
"private": keys.private,
|
"public": keys.public,
|
||||||
"public": keys.public,
|
}))?)
|
||||||
}))?)
|
|
||||||
}
|
}
|
||||||
|
@ -23,19 +23,15 @@ async fn into_response<T: IntoResponse, U: IntoResponse>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_routes(app: &mut tide::Server<Data>) {
|
pub fn load_routes(app: &mut tide::Server<Data>) {
|
||||||
app.at("/").get(|_| {
|
app.at("/").get(|_| async {
|
||||||
async move {
|
tide::Response::new(200)
|
||||||
tide::Response::new(200)
|
.body_string(include_str!("../../public/interface.html").to_string())
|
||||||
.body_string(include_str!("../../public/interface.html").to_string())
|
.set_mime(mime::TEXT_HTML_UTF_8)
|
||||||
.set_mime(mime::TEXT_HTML_UTF_8)
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
app.at("/bulma.min.css").get(|_| {
|
app.at("/bulma.min.css").get(|_| async {
|
||||||
async {
|
tide::Response::new(200)
|
||||||
tide::Response::new(200)
|
.body_string(include_str!("../../public/bulma.min.css").to_string())
|
||||||
.body_string(include_str!("../../public/bulma.min.css").to_string())
|
.set_mime(mime::TEXT_CSS_UTF_8)
|
||||||
.set_mime(mime::TEXT_CSS_UTF_8)
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
app.at("/indexes")
|
app.at("/indexes")
|
||||||
@ -82,7 +78,7 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
|
|||||||
.post(|ctx| into_response(setting::update_rules(ctx)))
|
.post(|ctx| into_response(setting::update_rules(ctx)))
|
||||||
.delete(|ctx| into_response(setting::delete_rules(ctx)));
|
.delete(|ctx| into_response(setting::delete_rules(ctx)));
|
||||||
|
|
||||||
app.at("/indexes/:index/settings/ranking-distinct")
|
app.at("/indexes/:index/settings/distinct-attribute")
|
||||||
.get(|ctx| into_response(setting::get_distinct(ctx)))
|
.get(|ctx| into_response(setting::get_distinct(ctx)))
|
||||||
.post(|ctx| into_response(setting::update_distinct(ctx)))
|
.post(|ctx| into_response(setting::update_distinct(ctx)))
|
||||||
.delete(|ctx| into_response(setting::delete_distinct(ctx)));
|
.delete(|ctx| into_response(setting::delete_distinct(ctx)));
|
||||||
@ -101,8 +97,8 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
|
|||||||
.delete(|ctx| into_response(setting::delete_displayed(ctx)));
|
.delete(|ctx| into_response(setting::delete_displayed(ctx)));
|
||||||
|
|
||||||
app.at("/indexes/:index/settings/index-new-field")
|
app.at("/indexes/:index/settings/index-new-field")
|
||||||
.get(|ctx| into_response(setting::get_index_new_fields(ctx)))
|
.get(|ctx| into_response(setting::get_accept_new_fields(ctx)))
|
||||||
.post(|ctx| into_response(setting::update_index_new_fields(ctx)));
|
.post(|ctx| into_response(setting::update_accept_new_fields(ctx)));
|
||||||
|
|
||||||
app.at("/indexes/:index/settings/synonyms")
|
app.at("/indexes/:index/settings/synonyms")
|
||||||
.get(|ctx| into_response(synonym::get(ctx)))
|
.get(|ctx| into_response(synonym::get(ctx)))
|
||||||
@ -117,8 +113,7 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
|
|||||||
app.at("/indexes/:index/stats")
|
app.at("/indexes/:index/stats")
|
||||||
.get(|ctx| into_response(stats::index_stats(ctx)));
|
.get(|ctx| into_response(stats::index_stats(ctx)));
|
||||||
|
|
||||||
app.at("/keys/")
|
app.at("/keys/").get(|ctx| into_response(key::list(ctx)));
|
||||||
.get(|ctx| into_response(key::list(ctx)));
|
|
||||||
|
|
||||||
app.at("/health")
|
app.at("/health")
|
||||||
.get(|ctx| into_response(health::get_health(ctx)))
|
.get(|ctx| into_response(health::get_health(ctx)))
|
||||||
|
@ -7,10 +7,10 @@ use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tide::{Request, Response};
|
use tide::{Request, Response};
|
||||||
|
|
||||||
use crate::helpers::tide::ACL::*;
|
|
||||||
use crate::error::{ResponseError, SResult};
|
use crate::error::{ResponseError, SResult};
|
||||||
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit};
|
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit};
|
||||||
use crate::helpers::tide::RequestExt;
|
use crate::helpers::tide::RequestExt;
|
||||||
|
use crate::helpers::tide::ACL::*;
|
||||||
use crate::Data;
|
use crate::Data;
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState};
|
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||||
use tide::{Request, Response};
|
use tide::{Request, Response};
|
||||||
@ -46,16 +46,21 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let ranking_rules = match index.main.ranking_rules(&reader)? {
|
let ranking_rules = index
|
||||||
Some(rules) => Some(rules.iter().map(|r| r.to_string()).collect()),
|
.main
|
||||||
None => None,
|
.ranking_rules(&reader)?
|
||||||
};
|
.unwrap_or(DEFAULT_RANKING_RULES.to_vec())
|
||||||
let ranking_distinct = index.main.ranking_distinct(&reader)?;
|
.into_iter()
|
||||||
|
.map(|r| r.to_string())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let distinct_attribute = index.main.distinct_attribute(&reader)?;
|
||||||
|
|
||||||
let schema = index.main.schema(&reader)?;
|
let schema = index.main.schema(&reader)?;
|
||||||
|
|
||||||
let searchable_attributes = schema.clone().map(|s| {
|
let searchable_attributes = schema.clone().map(|s| {
|
||||||
let attrs = s.indexed_name()
|
let attrs = s
|
||||||
|
.indexed_name()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| (*s).to_string())
|
.map(|s| (*s).to_string())
|
||||||
.collect::<Vec<String>>();
|
.collect::<Vec<String>>();
|
||||||
@ -67,7 +72,8 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let displayed_attributes = schema.clone().map(|s| {
|
let displayed_attributes = schema.clone().map(|s| {
|
||||||
let attrs = s.displayed_name()
|
let attrs = s
|
||||||
|
.displayed_name()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| (*s).to_string())
|
.map(|s| (*s).to_string())
|
||||||
.collect::<HashSet<String>>();
|
.collect::<HashSet<String>>();
|
||||||
@ -77,16 +83,16 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
Some(attrs)
|
Some(attrs)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
let index_new_fields = schema.map(|s| s.index_new_fields());
|
let accept_new_fields = schema.map(|s| s.accept_new_fields());
|
||||||
|
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
ranking_rules: Some(ranking_rules),
|
ranking_rules: Some(Some(ranking_rules)),
|
||||||
ranking_distinct: Some(ranking_distinct),
|
distinct_attribute: Some(distinct_attribute),
|
||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
displayed_attributes,
|
displayed_attributes,
|
||||||
stop_words: Some(stop_words),
|
stop_words: Some(stop_words),
|
||||||
synonyms: Some(synonyms),
|
synonyms: Some(synonyms),
|
||||||
index_new_fields: Some(index_new_fields),
|
accept_new_fields: Some(accept_new_fields),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(tide::Response::new(200).body_json(&settings).unwrap())
|
Ok(tide::Response::new(200).body_json(&settings).unwrap())
|
||||||
@ -96,13 +102,13 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
pub struct UpdateSettings {
|
pub struct UpdateSettings {
|
||||||
pub ranking_rules: Option<Vec<String>>,
|
pub ranking_rules: Option<Vec<String>>,
|
||||||
pub ranking_distinct: Option<String>,
|
pub distinct_attribute: Option<String>,
|
||||||
pub identifier: Option<String>,
|
pub identifier: Option<String>,
|
||||||
pub searchable_attributes: Option<Vec<String>>,
|
pub searchable_attributes: Option<Vec<String>>,
|
||||||
pub displayed_attributes: Option<HashSet<String>>,
|
pub displayed_attributes: Option<HashSet<String>>,
|
||||||
pub stop_words: Option<BTreeSet<String>>,
|
pub stop_words: Option<BTreeSet<String>>,
|
||||||
pub synonyms: Option<BTreeMap<String, Vec<String>>>,
|
pub synonyms: Option<BTreeMap<String, Vec<String>>>,
|
||||||
pub index_new_fields: Option<bool>,
|
pub accept_new_fields: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_all(mut ctx: Request<Data>) -> SResult<Response> {
|
pub async fn update_all(mut ctx: Request<Data>) -> SResult<Response> {
|
||||||
@ -114,12 +120,12 @@ pub async fn update_all(mut ctx: Request<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
ranking_rules: Some(settings_update.ranking_rules),
|
ranking_rules: Some(settings_update.ranking_rules),
|
||||||
ranking_distinct: Some(settings_update.ranking_distinct),
|
distinct_attribute: Some(settings_update.distinct_attribute),
|
||||||
searchable_attributes: Some(settings_update.searchable_attributes),
|
searchable_attributes: Some(settings_update.searchable_attributes),
|
||||||
displayed_attributes: Some(settings_update.displayed_attributes),
|
displayed_attributes: Some(settings_update.displayed_attributes),
|
||||||
stop_words: Some(settings_update.stop_words),
|
stop_words: Some(settings_update.stop_words),
|
||||||
synonyms: Some(settings_update.synonyms),
|
synonyms: Some(settings_update.synonyms),
|
||||||
index_new_fields: Some(settings_update.index_new_fields),
|
accept_new_fields: Some(settings_update.accept_new_fields),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut writer = db.update_write_txn()?;
|
let mut writer = db.update_write_txn()?;
|
||||||
@ -138,13 +144,13 @@ pub async fn delete_all(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
let settings = SettingsUpdate {
|
let settings = SettingsUpdate {
|
||||||
ranking_rules: UpdateState::Clear,
|
ranking_rules: UpdateState::Clear,
|
||||||
ranking_distinct: UpdateState::Clear,
|
distinct_attribute: UpdateState::Clear,
|
||||||
identifier: UpdateState::Clear,
|
identifier: UpdateState::Clear,
|
||||||
searchable_attributes: UpdateState::Clear,
|
searchable_attributes: UpdateState::Clear,
|
||||||
displayed_attributes: UpdateState::Clear,
|
displayed_attributes: UpdateState::Clear,
|
||||||
stop_words: UpdateState::Clear,
|
stop_words: UpdateState::Clear,
|
||||||
synonyms: UpdateState::Clear,
|
synonyms: UpdateState::Clear,
|
||||||
index_new_fields: UpdateState::Clear,
|
accept_new_fields: UpdateState::Clear,
|
||||||
};
|
};
|
||||||
|
|
||||||
let update_id = index.settings_update(&mut writer, settings)?;
|
let update_id = index.settings_update(&mut writer, settings)?;
|
||||||
@ -161,10 +167,13 @@ pub async fn get_rules(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
let db = &ctx.state().db;
|
let db = &ctx.state().db;
|
||||||
let reader = db.main_read_txn()?;
|
let reader = db.main_read_txn()?;
|
||||||
|
|
||||||
let ranking_rules: Option<Vec<String>> = match index.main.ranking_rules(&reader)? {
|
let ranking_rules = index
|
||||||
Some(rules) => Some(rules.iter().map(|r| r.to_string()).collect()),
|
.main
|
||||||
None => None,
|
.ranking_rules(&reader)?
|
||||||
};
|
.unwrap_or(DEFAULT_RANKING_RULES.to_vec())
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.to_string())
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
|
||||||
Ok(tide::Response::new(200).body_json(&ranking_rules).unwrap())
|
Ok(tide::Response::new(200).body_json(&ranking_rules).unwrap())
|
||||||
}
|
}
|
||||||
@ -214,22 +223,22 @@ pub async fn get_distinct(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
let db = &ctx.state().db;
|
let db = &ctx.state().db;
|
||||||
let reader = db.main_read_txn()?;
|
let reader = db.main_read_txn()?;
|
||||||
|
|
||||||
let ranking_distinct = index.main.ranking_distinct(&reader)?;
|
let distinct_attribute = index.main.distinct_attribute(&reader)?;
|
||||||
|
|
||||||
Ok(tide::Response::new(200)
|
Ok(tide::Response::new(200)
|
||||||
.body_json(&ranking_distinct)
|
.body_json(&distinct_attribute)
|
||||||
.unwrap())
|
.unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_distinct(mut ctx: Request<Data>) -> SResult<Response> {
|
pub async fn update_distinct(mut ctx: Request<Data>) -> SResult<Response> {
|
||||||
ctx.is_allowed(Private)?;
|
ctx.is_allowed(Private)?;
|
||||||
let index = ctx.index()?;
|
let index = ctx.index()?;
|
||||||
let ranking_distinct: Option<String> =
|
let distinct_attribute: Option<String> =
|
||||||
ctx.body_json().await.map_err(ResponseError::bad_request)?;
|
ctx.body_json().await.map_err(ResponseError::bad_request)?;
|
||||||
let db = &ctx.state().db;
|
let db = &ctx.state().db;
|
||||||
|
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
ranking_distinct: Some(ranking_distinct),
|
distinct_attribute: Some(distinct_attribute),
|
||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -248,7 +257,7 @@ pub async fn delete_distinct(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
let mut writer = db.update_write_txn()?;
|
let mut writer = db.update_write_txn()?;
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
let settings = SettingsUpdate {
|
||||||
ranking_distinct: UpdateState::Clear,
|
distinct_attribute: UpdateState::Clear,
|
||||||
..SettingsUpdate::default()
|
..SettingsUpdate::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -385,7 +394,7 @@ pub async fn delete_displayed(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
Ok(tide::Response::new(202).body_json(&response_body)?)
|
Ok(tide::Response::new(202).body_json(&response_body)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_index_new_fields(ctx: Request<Data>) -> SResult<Response> {
|
pub async fn get_accept_new_fields(ctx: Request<Data>) -> SResult<Response> {
|
||||||
ctx.is_allowed(Private)?;
|
ctx.is_allowed(Private)?;
|
||||||
let index = ctx.index()?;
|
let index = ctx.index()?;
|
||||||
let db = &ctx.state().db;
|
let db = &ctx.state().db;
|
||||||
@ -393,22 +402,22 @@ pub async fn get_index_new_fields(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
let schema = index.main.schema(&reader)?;
|
let schema = index.main.schema(&reader)?;
|
||||||
|
|
||||||
let index_new_fields = schema.map(|s| s.index_new_fields());
|
let accept_new_fields = schema.map(|s| s.accept_new_fields());
|
||||||
|
|
||||||
Ok(tide::Response::new(200)
|
Ok(tide::Response::new(200)
|
||||||
.body_json(&index_new_fields)
|
.body_json(&accept_new_fields)
|
||||||
.unwrap())
|
.unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_index_new_fields(mut ctx: Request<Data>) -> SResult<Response> {
|
pub async fn update_accept_new_fields(mut ctx: Request<Data>) -> SResult<Response> {
|
||||||
ctx.is_allowed(Private)?;
|
ctx.is_allowed(Private)?;
|
||||||
let index = ctx.index()?;
|
let index = ctx.index()?;
|
||||||
let index_new_fields: Option<bool> =
|
let accept_new_fields: Option<bool> =
|
||||||
ctx.body_json().await.map_err(ResponseError::bad_request)?;
|
ctx.body_json().await.map_err(ResponseError::bad_request)?;
|
||||||
let db = &ctx.state().db;
|
let db = &ctx.state().db;
|
||||||
|
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
index_new_fields: Some(index_new_fields),
|
accept_new_fields: Some(accept_new_fields),
|
||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -57,16 +57,16 @@ pub fn enrich_server_with_movies_settings(
|
|||||||
) -> Result<(), Box<dyn Error>> {
|
) -> Result<(), Box<dyn Error>> {
|
||||||
let json = json!({
|
let json = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"dsc(popularity)",
|
"dsc(popularity)",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(vote_average)",
|
"dsc(vote_average)",
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"title",
|
"title",
|
||||||
"tagline",
|
"tagline",
|
||||||
@ -92,7 +92,7 @@ pub fn enrich_server_with_movies_settings(
|
|||||||
],
|
],
|
||||||
"stopWords": null,
|
"stopWords": null,
|
||||||
"synonyms": null,
|
"synonyms": null,
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
let body = json.to_string().into_bytes();
|
let body = json.to_string().into_bytes();
|
||||||
@ -179,7 +179,7 @@ pub fn wait_update_id(server: &mut TestBackend<Service<Data>>, update_id: u64) {
|
|||||||
let response: Value = serde_json::from_slice(&buf).unwrap();
|
let response: Value = serde_json::from_slice(&buf).unwrap();
|
||||||
|
|
||||||
if response["status"] == "processed" {
|
if response["status"] == "processed" {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
block_on(sleep(Duration::from_secs(1)));
|
block_on(sleep(Duration::from_secs(1)));
|
||||||
}
|
}
|
||||||
|
@ -626,16 +626,16 @@ fn search_with_settings_basic() {
|
|||||||
|
|
||||||
let config = json!({
|
let config = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"dsc(popularity)",
|
"dsc(popularity)",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(vote_average)"
|
"dsc(vote_average)"
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"identifier": "id",
|
"identifier": "id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"title",
|
"title",
|
||||||
@ -662,7 +662,7 @@ fn search_with_settings_basic() {
|
|||||||
],
|
],
|
||||||
"stopWords": null,
|
"stopWords": null,
|
||||||
"synonyms": null,
|
"synonyms": null,
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
common::update_config(&mut server, config);
|
common::update_config(&mut server, config);
|
||||||
@ -732,16 +732,16 @@ fn search_with_settings_stop_words() {
|
|||||||
|
|
||||||
let config = json!({
|
let config = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"dsc(popularity)",
|
"dsc(popularity)",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(vote_average)"
|
"dsc(vote_average)"
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"identifier": "id",
|
"identifier": "id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"title",
|
"title",
|
||||||
@ -768,7 +768,7 @@ fn search_with_settings_stop_words() {
|
|||||||
],
|
],
|
||||||
"stopWords": ["the"],
|
"stopWords": ["the"],
|
||||||
"synonyms": null,
|
"synonyms": null,
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
common::update_config(&mut server, config);
|
common::update_config(&mut server, config);
|
||||||
@ -839,16 +839,16 @@ fn search_with_settings_synonyms() {
|
|||||||
|
|
||||||
let config = json!({
|
let config = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"dsc(popularity)",
|
"dsc(popularity)",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(vote_average)"
|
"dsc(vote_average)"
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"identifier": "id",
|
"identifier": "id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"title",
|
"title",
|
||||||
@ -880,7 +880,7 @@ fn search_with_settings_synonyms() {
|
|||||||
"Iron Man"
|
"Iron Man"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
common::update_config(&mut server, config);
|
common::update_config(&mut server, config);
|
||||||
@ -951,16 +951,16 @@ fn search_with_settings_ranking_rules() {
|
|||||||
|
|
||||||
let config = json!({
|
let config = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"asc(vote_average)",
|
"asc(vote_average)",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(popularity)"
|
"dsc(popularity)"
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"identifier": "id",
|
"identifier": "id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"title",
|
"title",
|
||||||
@ -987,7 +987,7 @@ fn search_with_settings_ranking_rules() {
|
|||||||
],
|
],
|
||||||
"stopWords": null,
|
"stopWords": null,
|
||||||
"synonyms": null,
|
"synonyms": null,
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
common::update_config(&mut server, config);
|
common::update_config(&mut server, config);
|
||||||
@ -1058,16 +1058,16 @@ fn search_with_settings_searchable_attributes() {
|
|||||||
|
|
||||||
let config = json!({
|
let config = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"dsc(popularity)",
|
"dsc(popularity)",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(vote_average)"
|
"dsc(vote_average)"
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"identifier": "id",
|
"identifier": "id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"tagline",
|
"tagline",
|
||||||
@ -1093,7 +1093,7 @@ fn search_with_settings_searchable_attributes() {
|
|||||||
],
|
],
|
||||||
"stopWords": null,
|
"stopWords": null,
|
||||||
"synonyms": null,
|
"synonyms": null,
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
common::update_config(&mut server, config);
|
common::update_config(&mut server, config);
|
||||||
@ -1164,16 +1164,16 @@ fn search_with_settings_displayed_attributes() {
|
|||||||
|
|
||||||
let config = json!({
|
let config = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"dsc(popularity)",
|
"dsc(popularity)",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(vote_average)"
|
"dsc(vote_average)"
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"identifier": "id",
|
"identifier": "id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"title",
|
"title",
|
||||||
@ -1194,7 +1194,7 @@ fn search_with_settings_displayed_attributes() {
|
|||||||
],
|
],
|
||||||
"stopWords": null,
|
"stopWords": null,
|
||||||
"synonyms": null,
|
"synonyms": null,
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
common::update_config(&mut server, config);
|
common::update_config(&mut server, config);
|
||||||
@ -1235,16 +1235,16 @@ fn search_with_settings_searchable_attributes_2() {
|
|||||||
|
|
||||||
let config = json!({
|
let config = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"dsc(popularity)",
|
"dsc(popularity)",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(vote_average)"
|
"dsc(vote_average)"
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"identifier": "id",
|
"identifier": "id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"tagline",
|
"tagline",
|
||||||
@ -1265,7 +1265,7 @@ fn search_with_settings_searchable_attributes_2() {
|
|||||||
],
|
],
|
||||||
"stopWords": null,
|
"stopWords": null,
|
||||||
"synonyms": null,
|
"synonyms": null,
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
common::update_config(&mut server, config);
|
common::update_config(&mut server, config);
|
||||||
|
@ -41,16 +41,16 @@ fn write_all_and_delete() {
|
|||||||
|
|
||||||
let json = json!({
|
let json = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)",
|
"dsc(release_date)",
|
||||||
"dsc(rank)",
|
"dsc(rank)",
|
||||||
],
|
],
|
||||||
"rankingDistinct": "movie_id",
|
"distinctAttribute": "movie_id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"id",
|
"id",
|
||||||
"movie_id",
|
"movie_id",
|
||||||
@ -76,7 +76,7 @@ fn write_all_and_delete() {
|
|||||||
"wolverine": ["xmen", "logan"],
|
"wolverine": ["xmen", "logan"],
|
||||||
"logan": ["wolverine"],
|
"logan": ["wolverine"],
|
||||||
},
|
},
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
let body = json.to_string().into_bytes();
|
let body = json.to_string().into_bytes();
|
||||||
@ -126,13 +126,36 @@ fn write_all_and_delete() {
|
|||||||
let res_value: Value = serde_json::from_slice(&buf).unwrap();
|
let res_value: Value = serde_json::from_slice(&buf).unwrap();
|
||||||
|
|
||||||
let json = json!({
|
let json = json!({
|
||||||
"rankingRules": null,
|
"rankingRules": [
|
||||||
"rankingDistinct": null,
|
"typo",
|
||||||
"searchableAttributes": null,
|
"words",
|
||||||
"displayedAttributes": null,
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"wordsPosition",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"distinctAttribute": null,
|
||||||
|
"searchableAttributes": [
|
||||||
|
"id",
|
||||||
|
"release_date",
|
||||||
|
"poster",
|
||||||
|
"description",
|
||||||
|
"title",
|
||||||
|
"movie_id",
|
||||||
|
"rank"
|
||||||
|
],
|
||||||
|
"displayedAttributes": [
|
||||||
|
"movie_id",
|
||||||
|
"description",
|
||||||
|
"poster",
|
||||||
|
"id",
|
||||||
|
"release_date",
|
||||||
|
"rank",
|
||||||
|
"title"
|
||||||
|
],
|
||||||
"stopWords": null,
|
"stopWords": null,
|
||||||
"synonyms": null,
|
"synonyms": null,
|
||||||
"indexNewFields": true,
|
"acceptNewFields": true,
|
||||||
});
|
});
|
||||||
|
|
||||||
assert_json_eq!(json, res_value, ordered: false);
|
assert_json_eq!(json, res_value, ordered: false);
|
||||||
@ -169,16 +192,16 @@ fn write_all_and_update() {
|
|||||||
|
|
||||||
let json = json!({
|
let json = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)",
|
"dsc(release_date)",
|
||||||
"dsc(rank)",
|
"dsc(rank)",
|
||||||
],
|
],
|
||||||
"rankingDistinct": "movie_id",
|
"distinctAttribute": "movie_id",
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"uid",
|
"uid",
|
||||||
"movie_id",
|
"movie_id",
|
||||||
@ -204,7 +227,7 @@ fn write_all_and_update() {
|
|||||||
"wolverine": ["xmen", "logan"],
|
"wolverine": ["xmen", "logan"],
|
||||||
"logan": ["wolverine"],
|
"logan": ["wolverine"],
|
||||||
},
|
},
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
let body = json.to_string().into_bytes();
|
let body = json.to_string().into_bytes();
|
||||||
@ -235,12 +258,12 @@ fn write_all_and_update() {
|
|||||||
|
|
||||||
let json_update = json!({
|
let json_update = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)",
|
"dsc(release_date)",
|
||||||
],
|
],
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
@ -261,7 +284,7 @@ fn write_all_and_update() {
|
|||||||
"wolverine": ["xmen", "logan"],
|
"wolverine": ["xmen", "logan"],
|
||||||
"logan": ["wolverine", "xmen"],
|
"logan": ["wolverine", "xmen"],
|
||||||
},
|
},
|
||||||
"indexNewFields": false,
|
"acceptNewFields": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
let body_update = json_update.to_string().into_bytes();
|
let body_update = json_update.to_string().into_bytes();
|
||||||
@ -288,15 +311,15 @@ fn write_all_and_update() {
|
|||||||
|
|
||||||
let res_expected = json!({
|
let res_expected = json!({
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)",
|
"dsc(release_date)",
|
||||||
],
|
],
|
||||||
"rankingDistinct": null,
|
"distinctAttribute": null,
|
||||||
"searchableAttributes": [
|
"searchableAttributes": [
|
||||||
"title",
|
"title",
|
||||||
"description",
|
"description",
|
||||||
@ -314,7 +337,7 @@ fn write_all_and_update() {
|
|||||||
"wolverine": ["xmen", "logan"],
|
"wolverine": ["xmen", "logan"],
|
||||||
"logan": ["wolverine", "xmen"],
|
"logan": ["wolverine", "xmen"],
|
||||||
},
|
},
|
||||||
"indexNewFields": false
|
"acceptNewFields": false
|
||||||
});
|
});
|
||||||
|
|
||||||
assert_json_eq!(res_expected, res_value, ordered: false);
|
assert_json_eq!(res_expected, res_value, ordered: false);
|
||||||
|
@ -39,12 +39,12 @@ fn write_all_and_delete() {
|
|||||||
// 2 - Send the settings
|
// 2 - Send the settings
|
||||||
|
|
||||||
let json = json!([
|
let json = json!([
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)",
|
"dsc(release_date)",
|
||||||
"dsc(rank)",
|
"dsc(rank)",
|
||||||
]);
|
]);
|
||||||
@ -95,7 +95,14 @@ fn write_all_and_delete() {
|
|||||||
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
|
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
|
||||||
let res_value: Value = serde_json::from_slice(&buf).unwrap();
|
let res_value: Value = serde_json::from_slice(&buf).unwrap();
|
||||||
|
|
||||||
let json = json!(null);
|
let json = json!([
|
||||||
|
"typo",
|
||||||
|
"words",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"wordsPosition",
|
||||||
|
"exactness"
|
||||||
|
]);
|
||||||
|
|
||||||
assert_json_eq!(json, res_value, ordered: false);
|
assert_json_eq!(json, res_value, ordered: false);
|
||||||
}
|
}
|
||||||
@ -130,12 +137,12 @@ fn write_all_and_update() {
|
|||||||
// 2 - Send the settings
|
// 2 - Send the settings
|
||||||
|
|
||||||
let json = json!([
|
let json = json!([
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)",
|
"dsc(release_date)",
|
||||||
"dsc(rank)",
|
"dsc(rank)",
|
||||||
]);
|
]);
|
||||||
@ -167,12 +174,12 @@ fn write_all_and_update() {
|
|||||||
// 4 - Update all settings
|
// 4 - Update all settings
|
||||||
|
|
||||||
let json_update = json!([
|
let json_update = json!([
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)",
|
"dsc(release_date)",
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@ -199,12 +206,12 @@ fn write_all_and_update() {
|
|||||||
let res_value: Value = serde_json::from_slice(&buf).unwrap();
|
let res_value: Value = serde_json::from_slice(&buf).unwrap();
|
||||||
|
|
||||||
let res_expected = json!([
|
let res_expected = json!([
|
||||||
"_typo",
|
"typo",
|
||||||
"_words",
|
"words",
|
||||||
"_proximity",
|
"proximity",
|
||||||
"_attribute",
|
"attribute",
|
||||||
"_words_position",
|
"wordsPosition",
|
||||||
"_exactness",
|
"exactness",
|
||||||
"dsc(release_date)",
|
"dsc(release_date)",
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::collections::hash_map::Iter;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@ -45,6 +46,10 @@ impl FieldsMap {
|
|||||||
pub fn name<I: Into<FieldId>>(&self, id: I) -> Option<&str> {
|
pub fn name<I: Into<FieldId>>(&self, id: I) -> Option<&str> {
|
||||||
self.id_map.get(&id.into()).map(|s| s.as_str())
|
self.id_map.get(&id.into()).map(|s| s.as_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> Iter<'_, String, FieldId> {
|
||||||
|
self.name_map.iter()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -13,7 +13,7 @@ pub struct Schema {
|
|||||||
indexed: Vec<FieldId>,
|
indexed: Vec<FieldId>,
|
||||||
indexed_map: HashMap<FieldId, IndexedPos>,
|
indexed_map: HashMap<FieldId, IndexedPos>,
|
||||||
|
|
||||||
index_new_fields: bool,
|
accept_new_fields: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Schema {
|
impl Schema {
|
||||||
@ -28,7 +28,7 @@ impl Schema {
|
|||||||
displayed: HashSet::new(),
|
displayed: HashSet::new(),
|
||||||
indexed: Vec::new(),
|
indexed: Vec::new(),
|
||||||
indexed_map: HashMap::new(),
|
indexed_map: HashMap::new(),
|
||||||
index_new_fields: true,
|
accept_new_fields: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -68,7 +68,7 @@ impl Schema {
|
|||||||
Ok(id)
|
Ok(id)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if self.index_new_fields {
|
if self.accept_new_fields {
|
||||||
self.set_indexed(name)?;
|
self.set_indexed(name)?;
|
||||||
self.set_displayed(name)
|
self.set_displayed(name)
|
||||||
} else {
|
} else {
|
||||||
@ -190,11 +190,30 @@ impl Schema {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index_new_fields(&self) -> bool {
|
pub fn set_all_fields_as_indexed(&mut self) {
|
||||||
self.index_new_fields
|
self.indexed.clear();
|
||||||
|
self.indexed_map.clear();
|
||||||
|
|
||||||
|
for (_name, id) in self.fields_map.iter() {
|
||||||
|
let pos = self.indexed.len() as u16;
|
||||||
|
self.indexed.push(*id);
|
||||||
|
self.indexed_map.insert(*id, pos.into());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_index_new_fields(&mut self, value: bool) {
|
pub fn set_all_fields_as_displayed(&mut self) {
|
||||||
self.index_new_fields = value;
|
self.displayed.clear();
|
||||||
|
|
||||||
|
for (_name, id) in self.fields_map.iter() {
|
||||||
|
self.displayed.insert(*id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn accept_new_fields(&self) -> bool {
|
||||||
|
self.accept_new_fields
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_accept_new_fields(&mut self, value: bool) {
|
||||||
|
self.accept_new_fields = value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user