From a56db854a215eaa7f7c39a7984602d73f1c6385e Mon Sep 17 00:00:00 2001 From: mpostma Date: Thu, 4 Mar 2021 11:56:32 +0100 Subject: [PATCH] refactor update handler --- src/data/mod.rs | 3 +- src/data/updates.rs | 5 +- src/index/mod.rs | 19 ++ src/index/search.rs | 245 ++++++++++++++++++ src/index/updates.rs | 229 ++++++++++++++++ .../actor_index_controller/index_actor.rs | 3 +- .../actor_index_controller/mod.rs | 9 +- .../actor_index_controller/update_actor.rs | 3 +- .../actor_index_controller/update_handler.rs | 178 +------------ src/index_controller/mod.rs | 68 +---- src/routes/settings/mod.rs | 6 +- 11 files changed, 522 insertions(+), 246 deletions(-) create mode 100644 src/index/mod.rs create mode 100644 src/index/search.rs create mode 100644 src/index/updates.rs diff --git a/src/data/mod.rs b/src/data/mod.rs index 58acb105a..79572fcf7 100644 --- a/src/data/mod.rs +++ b/src/data/mod.rs @@ -7,8 +7,9 @@ use std::sync::Arc; use sha2::Digest; -use crate::index_controller::{IndexMetadata, Settings, IndexSettings}; +use crate::index_controller::{IndexMetadata, IndexSettings}; use crate::index_controller::actor_index_controller::IndexController; +use crate::index::Settings; use crate::option::Opt; #[derive(Clone)] diff --git a/src/data/updates.rs b/src/data/updates.rs index 01f5174a2..9f3e9b3fd 100644 --- a/src/data/updates.rs +++ b/src/data/updates.rs @@ -4,10 +4,11 @@ use milli::update::{IndexDocumentsMethod, UpdateFormat}; //use tokio::io::AsyncWriteExt; use actix_web::web::Payload; -use crate::index_controller::UpdateStatus; -use crate::index_controller::{Settings, IndexMetadata}; +use crate::index_controller::{UpdateStatus, IndexMetadata}; +use crate::index::Settings; use super::Data; + impl Data { pub async fn add_documents( &self, diff --git a/src/index/mod.rs b/src/index/mod.rs new file mode 100644 index 000000000..f35e6b3dd --- /dev/null +++ b/src/index/mod.rs @@ -0,0 +1,19 @@ +mod search; +mod updates; + +use std::sync::Arc; +use std::ops::Deref; + +pub use search::{SearchQuery, SearchResult, DEFAULT_SEARCH_LIMIT}; +pub use updates::{Settings, Facets, UpdateResult}; + +#[derive(Clone)] +pub struct Index(pub Arc); + +impl Deref for Index { + type Target = milli::Index; + + fn deref(&self) -> &Self::Target { + self.0.as_ref() + } +} diff --git a/src/index/search.rs b/src/index/search.rs new file mode 100644 index 000000000..1264d49d6 --- /dev/null +++ b/src/index/search.rs @@ -0,0 +1,245 @@ +use std::time::Instant; +use std::collections::{HashSet, BTreeMap}; +use std::mem; + +use either::Either; +use anyhow::bail; +use heed::RoTxn; +use meilisearch_tokenizer::{Analyzer, AnalyzerConfig}; +use milli::{FacetCondition, facet::FacetValue}; +use serde::{Serialize, Deserialize}; +use serde_json::{Value, Map}; + +use super::Index; + +pub const DEFAULT_SEARCH_LIMIT: usize = 20; + +const fn default_search_limit() -> usize { + DEFAULT_SEARCH_LIMIT +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[allow(dead_code)] +pub struct SearchQuery { + pub q: Option, + pub offset: Option, + #[serde(default = "default_search_limit")] + pub limit: usize, + pub attributes_to_retrieve: Option>, + pub attributes_to_crop: Option>, + pub crop_length: Option, + pub attributes_to_highlight: Option>, + pub filters: Option, + pub matches: Option, + pub facet_filters: Option, + pub facet_distributions: Option>, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SearchResult { + pub hits: Vec>, + pub nb_hits: u64, + pub query: String, + pub limit: usize, + pub offset: usize, + pub processing_time_ms: u128, + #[serde(skip_serializing_if = "Option::is_none")] + pub facet_distributions: Option>>, +} + +impl Index { + pub fn perform_search(&self, query: SearchQuery) -> anyhow::Result { + let before_search = Instant::now(); + let rtxn = self.read_txn()?; + + let mut search = self.search(&rtxn); + + if let Some(ref query) = query.q { + search.query(query); + } + + search.limit(query.limit); + search.offset(query.offset.unwrap_or_default()); + + if let Some(ref facets) = query.facet_filters { + if let Some(facets) = parse_facets(facets, self, &rtxn)? { + search.facet_condition(facets); + } + } + + let milli::SearchResult { + documents_ids, + found_words, + candidates, + .. + } = search.execute()?; + let mut documents = Vec::new(); + let fields_ids_map = self.fields_ids_map(&rtxn).unwrap(); + + let displayed_fields_ids = self.displayed_fields_ids(&rtxn).unwrap(); + + let attributes_to_retrieve_ids = match query.attributes_to_retrieve { + Some(ref attrs) if attrs.iter().any(|f| f == "*") => None, + Some(ref attrs) => attrs + .iter() + .filter_map(|f| fields_ids_map.id(f)) + .collect::>() + .into(), + None => None, + }; + + let displayed_fields_ids = match (displayed_fields_ids, attributes_to_retrieve_ids) { + (_, Some(ids)) => ids, + (Some(ids), None) => ids, + (None, None) => fields_ids_map.iter().map(|(id, _)| id).collect(), + }; + + let stop_words = fst::Set::default(); + let highlighter = Highlighter::new(&stop_words); + + for (_id, obkv) in self.documents(&rtxn, documents_ids)? { + let mut object = milli::obkv_to_json(&displayed_fields_ids, &fields_ids_map, obkv).unwrap(); + if let Some(ref attributes_to_highlight) = query.attributes_to_highlight { + highlighter.highlight_record(&mut object, &found_words, attributes_to_highlight); + } + documents.push(object); + } + + let nb_hits = candidates.len(); + + let facet_distributions = match query.facet_distributions { + Some(ref fields) => { + let mut facet_distribution = self.facets_distribution(&rtxn); + if fields.iter().all(|f| f != "*") { + facet_distribution.facets(fields); + } + Some(facet_distribution.candidates(candidates).execute()?) + } + None => None, + }; + + let result = SearchResult { + hits: documents, + nb_hits, + query: query.q.clone().unwrap_or_default(), + limit: query.limit, + offset: query.offset.unwrap_or_default(), + processing_time_ms: before_search.elapsed().as_millis(), + facet_distributions, + }; + Ok(result) + } +} + +fn parse_facets_array( + txn: &RoTxn, + index: &Index, + arr: &Vec, +) -> anyhow::Result> { + let mut ands = Vec::new(); + for value in arr { + match value { + Value::String(s) => ands.push(Either::Right(s.clone())), + Value::Array(arr) => { + let mut ors = Vec::new(); + for value in arr { + match value { + Value::String(s) => ors.push(s.clone()), + v => bail!("Invalid facet expression, expected String, found: {:?}", v), + } + } + ands.push(Either::Left(ors)); + } + v => bail!( + "Invalid facet expression, expected String or [String], found: {:?}", + v + ), + } + } + + FacetCondition::from_array(txn, &index.0, ands) +} + +pub struct Highlighter<'a, A> { + analyzer: Analyzer<'a, A>, +} + +impl<'a, A: AsRef<[u8]>> Highlighter<'a, A> { + pub fn new(stop_words: &'a fst::Set) -> Self { + let analyzer = Analyzer::new(AnalyzerConfig::default_with_stopwords(stop_words)); + + Self { analyzer } + } + + pub fn highlight_value(&self, value: Value, words_to_highlight: &HashSet) -> Value { + match value { + Value::Null => Value::Null, + Value::Bool(boolean) => Value::Bool(boolean), + Value::Number(number) => Value::Number(number), + Value::String(old_string) => { + let mut string = String::new(); + let analyzed = self.analyzer.analyze(&old_string); + for (word, token) in analyzed.reconstruct() { + if token.is_word() { + let to_highlight = words_to_highlight.contains(token.text()); + if to_highlight { + string.push_str("") + } + string.push_str(word); + if to_highlight { + string.push_str("") + } + } else { + string.push_str(word); + } + } + Value::String(string) + } + Value::Array(values) => Value::Array( + values + .into_iter() + .map(|v| self.highlight_value(v, words_to_highlight)) + .collect(), + ), + Value::Object(object) => Value::Object( + object + .into_iter() + .map(|(k, v)| (k, self.highlight_value(v, words_to_highlight))) + .collect(), + ), + } + } + + pub fn highlight_record( + &self, + object: &mut Map, + words_to_highlight: &HashSet, + attributes_to_highlight: &HashSet, + ) { + // TODO do we need to create a string for element that are not and needs to be highlight? + for (key, value) in object.iter_mut() { + if attributes_to_highlight.contains(key) { + let old_value = mem::take(value); + *value = self.highlight_value(old_value, words_to_highlight); + } + } + } +} + +fn parse_facets( + facets: &Value, + index: &Index, + txn: &RoTxn, +) -> anyhow::Result> { + match facets { + // Disabled for now + //Value::String(expr) => Ok(Some(FacetCondition::from_str(txn, index, expr)?)), + Value::Array(arr) => parse_facets_array(txn, index, arr), + v => bail!( + "Invalid facet expression, expected Array, found: {:?}", + v + ), + } +} diff --git a/src/index/updates.rs b/src/index/updates.rs new file mode 100644 index 000000000..d339406f7 --- /dev/null +++ b/src/index/updates.rs @@ -0,0 +1,229 @@ +use std::collections::HashMap; +use std::io; +use std::num::NonZeroUsize; + +use flate2::read::GzDecoder; +use log::info; +use milli::update::{UpdateFormat, IndexDocumentsMethod, UpdateBuilder, DocumentAdditionResult}; +use serde::{Serialize, Deserialize, de::Deserializer}; + +use super::Index; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum UpdateResult { + DocumentsAddition(DocumentAdditionResult), + DocumentDeletion { deleted: usize }, + Other, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct Settings { + #[serde( + default, + deserialize_with = "deserialize_some", + skip_serializing_if = "Option::is_none", + )] + pub displayed_attributes: Option>>, + + #[serde( + default, + deserialize_with = "deserialize_some", + skip_serializing_if = "Option::is_none", + )] + pub searchable_attributes: Option>>, + + #[serde(default)] + pub faceted_attributes: Option>>, + + #[serde( + default, + deserialize_with = "deserialize_some", + skip_serializing_if = "Option::is_none", + )] + pub criteria: Option>>, +} + +impl Settings { + pub fn cleared() -> Self { + Self { + displayed_attributes: Some(None), + searchable_attributes: Some(None), + faceted_attributes: Some(None), + criteria: Some(None), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct Facets { + pub level_group_size: Option, + pub min_level_size: Option, +} + +fn deserialize_some<'de, T, D>(deserializer: D) -> Result, D::Error> +where T: Deserialize<'de>, + D: Deserializer<'de> +{ + Deserialize::deserialize(deserializer).map(Some) +} + +impl Index { + pub fn update_documents( + &self, + format: UpdateFormat, + method: IndexDocumentsMethod, + content: impl io::Read, + update_builder: UpdateBuilder, + primary_key: Option<&str>, + ) -> anyhow::Result { + info!("performing document addition"); + // We must use the write transaction of the update here. + let mut wtxn = self.write_txn()?; + + // Set the primary key if not set already, ignore if already set. + match (self.primary_key(&wtxn)?, primary_key) { + (None, Some(ref primary_key)) => { + self.put_primary_key(&mut wtxn, primary_key)?; + } + _ => (), + } + + let mut builder = update_builder.index_documents(&mut wtxn, self); + builder.update_format(format); + builder.index_documents_method(method); + + let gzipped = false; + let reader = if gzipped { + Box::new(GzDecoder::new(content)) + } else { + Box::new(content) as Box + }; + + let result = builder.execute(reader, |indexing_step, update_id| { + info!("update {}: {:?}", update_id, indexing_step) + }); + + info!("document addition done: {:?}", result); + + match result { + Ok(addition_result) => wtxn + .commit() + .and(Ok(UpdateResult::DocumentsAddition(addition_result))) + .map_err(Into::into), + Err(e) => Err(e.into()), + } + } + + pub fn clear_documents(&self, update_builder: UpdateBuilder) -> anyhow::Result { + // We must use the write transaction of the update here. + let mut wtxn = self.write_txn()?; + let builder = update_builder.clear_documents(&mut wtxn, self); + + match builder.execute() { + Ok(_count) => wtxn + .commit() + .and(Ok(UpdateResult::Other)) + .map_err(Into::into), + Err(e) => Err(e.into()), + } + } + + pub fn update_settings( + &self, + settings: &Settings, + update_builder: UpdateBuilder, + ) -> anyhow::Result { + // We must use the write transaction of the update here. + let mut wtxn = self.write_txn()?; + let mut builder = update_builder.settings(&mut wtxn, self); + + // We transpose the settings JSON struct into a real setting update. + if let Some(ref names) = settings.searchable_attributes { + match names { + Some(names) => builder.set_searchable_fields(names.clone()), + None => builder.reset_searchable_fields(), + } + } + + // We transpose the settings JSON struct into a real setting update. + if let Some(ref names) = settings.displayed_attributes { + match names { + Some(names) => builder.set_displayed_fields(names.clone()), + None => builder.reset_displayed_fields(), + } + } + + // We transpose the settings JSON struct into a real setting update. + if let Some(ref facet_types) = settings.faceted_attributes { + let facet_types = facet_types.clone().unwrap_or_else(|| HashMap::new()); + builder.set_faceted_fields(facet_types); + } + + // We transpose the settings JSON struct into a real setting update. + if let Some(ref criteria) = settings.criteria { + match criteria { + Some(criteria) => builder.set_criteria(criteria.clone()), + None => builder.reset_criteria(), + } + } + + let result = builder + .execute(|indexing_step, update_id| info!("update {}: {:?}", update_id, indexing_step)); + + match result { + Ok(()) => wtxn + .commit() + .and(Ok(UpdateResult::Other)) + .map_err(Into::into), + Err(e) => Err(e.into()), + } + } + + pub fn update_facets( + &self, + levels: &Facets, + update_builder: UpdateBuilder, + ) -> anyhow::Result { + // We must use the write transaction of the update here. + let mut wtxn = self.write_txn()?; + let mut builder = update_builder.facets(&mut wtxn, self); + if let Some(value) = levels.level_group_size { + builder.level_group_size(value); + } + if let Some(value) = levels.min_level_size { + builder.min_level_size(value); + } + match builder.execute() { + Ok(()) => wtxn + .commit() + .and(Ok(UpdateResult::Other)) + .map_err(Into::into), + Err(e) => Err(e.into()), + } + } + + pub fn delete_documents( + &self, + document_ids: impl io::Read, + update_builder: UpdateBuilder, + ) -> anyhow::Result { + let ids: Vec = serde_json::from_reader(document_ids)?; + let mut txn = self.write_txn()?; + let mut builder = update_builder.delete_documents(&mut txn, self)?; + + // We ignore unexisting document ids + ids.iter().for_each(|id| { builder.delete_external_id(id); }); + + match builder.execute() { + Ok(deleted) => txn + .commit() + .and(Ok(UpdateResult::DocumentDeletion { deleted })) + .map_err(Into::into), + Err(e) => Err(e.into()) + } + } +} diff --git a/src/index_controller/actor_index_controller/index_actor.rs b/src/index_controller/actor_index_controller/index_actor.rs index 27d206c06..96e5010db 100644 --- a/src/index_controller/actor_index_controller/index_actor.rs +++ b/src/index_controller/actor_index_controller/index_actor.rs @@ -13,7 +13,8 @@ use tokio::sync::{mpsc, oneshot, RwLock}; use uuid::Uuid; use super::update_handler::UpdateHandler; -use crate::index_controller::{IndexMetadata, UpdateMeta, updates::{Processed, Failed, Processing}, UpdateResult as UResult}; +use crate::index_controller::{IndexMetadata, UpdateMeta, updates::{Processed, Failed, Processing}}; +use crate::index::UpdateResult as UResult; use crate::option::IndexerOpts; use crate::index::{Index, SearchQuery, SearchResult}; diff --git a/src/index_controller/actor_index_controller/mod.rs b/src/index_controller/actor_index_controller/mod.rs index b893b62e4..188d85580 100644 --- a/src/index_controller/actor_index_controller/mod.rs +++ b/src/index_controller/actor_index_controller/mod.rs @@ -15,6 +15,9 @@ use super::UpdateMeta; use crate::index::{SearchResult, SearchQuery}; use actix_web::web::Bytes; +use crate::index::Settings; +use super::UpdateStatus; + pub struct IndexController { uuid_resolver: uuid_resolver::UuidResolverHandle, index_handle: index_actor::IndexActorHandle, @@ -69,7 +72,7 @@ impl IndexController { Ok(status) } - fn clear_documents(&self, index: String) -> anyhow::Result { + fn clear_documents(&self, index: String) -> anyhow::Result { todo!() } @@ -77,7 +80,7 @@ impl IndexController { todo!() } - fn update_settings(&self, index_uid: String, settings: super::Settings) -> anyhow::Result { + fn update_settings(&self, index_uid: String, settings: Settings) -> anyhow::Result { todo!() } @@ -100,7 +103,7 @@ impl IndexController { todo!() } - fn update_status(&self, index: String, id: u64) -> anyhow::Result> { + fn update_status(&self, index: String, id: u64) -> anyhow::Result> { todo!() } diff --git a/src/index_controller/actor_index_controller/update_actor.rs b/src/index_controller/actor_index_controller/update_actor.rs index e82f01092..384c52098 100644 --- a/src/index_controller/actor_index_controller/update_actor.rs +++ b/src/index_controller/actor_index_controller/update_actor.rs @@ -10,7 +10,8 @@ use uuid::Uuid; use tokio::fs::File; use tokio::io::AsyncWriteExt; -use crate::index_controller::{UpdateMeta, UpdateStatus, UpdateResult}; +use crate::index_controller::{UpdateMeta, UpdateStatus}; +use crate::index::UpdateResult; pub type Result = std::result::Result; type UpdateStore = super::update_store::UpdateStore; diff --git a/src/index_controller/actor_index_controller/update_handler.rs b/src/index_controller/actor_index_controller/update_handler.rs index c42a532ea..766dfc5f0 100644 --- a/src/index_controller/actor_index_controller/update_handler.rs +++ b/src/index_controller/actor_index_controller/update_handler.rs @@ -1,17 +1,14 @@ -use std::collections::HashMap; -use std::io; use std::fs::File; use anyhow::Result; -use flate2::read::GzDecoder; use grenad::CompressionType; -use log::info; -use milli::update::{IndexDocumentsMethod, UpdateBuilder, UpdateFormat}; +use milli::update::UpdateBuilder; use crate::index::Index; use rayon::ThreadPool; use crate::index_controller::updates::{Failed, Processed, Processing}; -use crate::index_controller::{Facets, Settings, UpdateMeta, UpdateResult}; +use crate::index_controller::UpdateMeta; +use crate::index::UpdateResult; use crate::option::IndexerOpts; pub struct UpdateHandler { @@ -62,164 +59,6 @@ impl UpdateHandler { update_builder } - fn update_documents( - &self, - format: UpdateFormat, - method: IndexDocumentsMethod, - content: File, - update_builder: UpdateBuilder, - primary_key: Option<&str>, - index: &Index, - ) -> anyhow::Result { - info!("performing document addition"); - // We must use the write transaction of the update here. - let mut wtxn = index.write_txn()?; - - // Set the primary key if not set already, ignore if already set. - match (index.primary_key(&wtxn)?, primary_key) { - (None, Some(ref primary_key)) => { - index.put_primary_key(&mut wtxn, primary_key)?; - } - _ => (), - } - - let mut builder = update_builder.index_documents(&mut wtxn, index); - builder.update_format(format); - builder.index_documents_method(method); - - let gzipped = false; - let reader = if gzipped { - Box::new(GzDecoder::new(content)) - } else { - Box::new(content) as Box - }; - - let result = builder.execute(reader, |indexing_step, update_id| { - info!("update {}: {:?}", update_id, indexing_step) - }); - - info!("document addition done: {:?}", result); - - match result { - Ok(addition_result) => wtxn - .commit() - .and(Ok(UpdateResult::DocumentsAddition(addition_result))) - .map_err(Into::into), - Err(e) => Err(e.into()), - } - } - - fn clear_documents(&self, update_builder: UpdateBuilder, index: &Index) -> anyhow::Result { - // We must use the write transaction of the update here. - let mut wtxn = index.write_txn()?; - let builder = update_builder.clear_documents(&mut wtxn, index); - - match builder.execute() { - Ok(_count) => wtxn - .commit() - .and(Ok(UpdateResult::Other)) - .map_err(Into::into), - Err(e) => Err(e.into()), - } - } - - fn update_settings( - &self, - settings: &Settings, - update_builder: UpdateBuilder, - index: &Index, - ) -> anyhow::Result { - // We must use the write transaction of the update here. - let mut wtxn = index.write_txn()?; - let mut builder = update_builder.settings(&mut wtxn, index); - - // We transpose the settings JSON struct into a real setting update. - if let Some(ref names) = settings.searchable_attributes { - match names { - Some(names) => builder.set_searchable_fields(names.clone()), - None => builder.reset_searchable_fields(), - } - } - - // We transpose the settings JSON struct into a real setting update. - if let Some(ref names) = settings.displayed_attributes { - match names { - Some(names) => builder.set_displayed_fields(names.clone()), - None => builder.reset_displayed_fields(), - } - } - - // We transpose the settings JSON struct into a real setting update. - if let Some(ref facet_types) = settings.faceted_attributes { - let facet_types = facet_types.clone().unwrap_or_else(|| HashMap::new()); - builder.set_faceted_fields(facet_types); - } - - // We transpose the settings JSON struct into a real setting update. - if let Some(ref criteria) = settings.criteria { - match criteria { - Some(criteria) => builder.set_criteria(criteria.clone()), - None => builder.reset_criteria(), - } - } - - let result = builder - .execute(|indexing_step, update_id| info!("update {}: {:?}", update_id, indexing_step)); - - match result { - Ok(()) => wtxn - .commit() - .and(Ok(UpdateResult::Other)) - .map_err(Into::into), - Err(e) => Err(e.into()), - } - } - - fn update_facets( - &self, - levels: &Facets, - update_builder: UpdateBuilder, - index: &Index, - ) -> anyhow::Result { - // We must use the write transaction of the update here. - let mut wtxn = index.write_txn()?; - let mut builder = update_builder.facets(&mut wtxn, index); - if let Some(value) = levels.level_group_size { - builder.level_group_size(value); - } - if let Some(value) = levels.min_level_size { - builder.min_level_size(value); - } - match builder.execute() { - Ok(()) => wtxn - .commit() - .and(Ok(UpdateResult::Other)) - .map_err(Into::into), - Err(e) => Err(e.into()), - } - } - - fn delete_documents( - &self, - document_ids: File, - update_builder: UpdateBuilder, - index: &Index, - ) -> anyhow::Result { - let ids: Vec = serde_json::from_reader(document_ids)?; - let mut txn = index.write_txn()?; - let mut builder = update_builder.delete_documents(&mut txn, index)?; - - // We ignore unexisting document ids - ids.iter().for_each(|id| { builder.delete_external_id(id); }); - - match builder.execute() { - Ok(deleted) => txn - .commit() - .and(Ok(UpdateResult::DocumentDeletion { deleted })) - .map_err(Into::into), - Err(e) => Err(e.into()) - } - } pub fn handle_update( &self, @@ -238,18 +77,17 @@ impl UpdateHandler { method, format, primary_key, - } => self.update_documents( + } => index.update_documents( *format, *method, content, update_builder, primary_key.as_deref(), - &index, ), - ClearDocuments => self.clear_documents(update_builder, &index), - DeleteDocuments => self.delete_documents(content, update_builder, &index), - Settings(settings) => self.update_settings(settings, update_builder, &index), - Facets(levels) => self.update_facets(levels, update_builder, &index), + ClearDocuments => index.clear_documents(update_builder), + DeleteDocuments => index.delete_documents(content, update_builder), + Settings(settings) => index.update_settings(settings, update_builder), + Facets(levels) => index.update_facets(levels, update_builder), }; match result { diff --git a/src/index_controller/mod.rs b/src/index_controller/mod.rs index fbe7a161d..16f884137 100644 --- a/src/index_controller/mod.rs +++ b/src/index_controller/mod.rs @@ -1,16 +1,13 @@ pub mod actor_index_controller; mod updates; -use std::collections::HashMap; -use std::num::NonZeroUsize; - -use anyhow::Result; use chrono::{DateTime, Utc}; -use milli::update::{IndexDocumentsMethod, UpdateFormat, DocumentAdditionResult}; -use serde::{Serialize, Deserialize, de::Deserializer}; +use milli::update::{IndexDocumentsMethod, UpdateFormat}; +use serde::{Serialize, Deserialize}; use uuid::Uuid; pub use updates::{Processed, Processing, Failed}; +use crate::index::{UpdateResult, Settings, Facets}; pub type UpdateStatus = updates::UpdateStatus; @@ -37,66 +34,7 @@ pub enum UpdateMeta { Facets(Facets), } -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -#[serde(rename_all = "camelCase")] -pub struct Facets { - pub level_group_size: Option, - pub min_level_size: Option, -} -fn deserialize_some<'de, T, D>(deserializer: D) -> Result, D::Error> -where T: Deserialize<'de>, - D: Deserializer<'de> -{ - Deserialize::deserialize(deserializer).map(Some) -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -#[serde(rename_all = "camelCase")] -pub struct Settings { - #[serde( - default, - deserialize_with = "deserialize_some", - skip_serializing_if = "Option::is_none", - )] - pub displayed_attributes: Option>>, - - #[serde( - default, - deserialize_with = "deserialize_some", - skip_serializing_if = "Option::is_none", - )] - pub searchable_attributes: Option>>, - - #[serde(default)] - pub faceted_attributes: Option>>, - - #[serde( - default, - deserialize_with = "deserialize_some", - skip_serializing_if = "Option::is_none", - )] - pub criteria: Option>>, -} - -impl Settings { - pub fn cleared() -> Self { - Self { - displayed_attributes: Some(None), - searchable_attributes: Some(None), - faceted_attributes: Some(None), - criteria: Some(None), - } - } -} -#[derive(Debug, Clone, Serialize, Deserialize)] -pub enum UpdateResult { - DocumentsAddition(DocumentAdditionResult), - DocumentDeletion { deleted: usize }, - Other, -} #[derive(Clone, Debug)] pub struct IndexSettings { diff --git a/src/routes/settings/mod.rs b/src/routes/settings/mod.rs index 00bc4220e..93e10674d 100644 --- a/src/routes/settings/mod.rs +++ b/src/routes/settings/mod.rs @@ -2,7 +2,7 @@ use actix_web::{web, HttpResponse, delete, get, post}; use crate::Data; use crate::error::ResponseError; -use crate::index_controller::Settings; +use crate::index::Settings; use crate::helpers::Authentication; #[macro_export] @@ -14,14 +14,14 @@ macro_rules! make_setting_route { use crate::data; use crate::error::ResponseError; use crate::helpers::Authentication; - use crate::index_controller::Settings; + use crate::index::Settings; #[actix_web::delete($route, wrap = "Authentication::Private")] pub async fn delete( data: web::Data, index_uid: web::Path, ) -> Result { - use crate::index_controller::Settings; + use crate::index::Settings; let settings = Settings { $attr: Some(None), ..Default::default()