mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-30 09:04:59 +08:00
Rewrite the synonym endpoint
This commit is contained in:
parent
cc10804607
commit
a4f26e8e48
@ -179,16 +179,8 @@ impl Index {
|
|||||||
update::push_clear_all(writer, self.updates, self.updates_results)
|
update::push_clear_all(writer, self.updates, self.updates_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn synonyms_addition(&self) -> update::SynonymsAddition {
|
pub fn synonyms_update(&self) -> update::SynonymsUpdate {
|
||||||
update::SynonymsAddition::new(
|
update::SynonymsUpdate::new(
|
||||||
self.updates,
|
|
||||||
self.updates_results,
|
|
||||||
self.updates_notifier.clone(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn synonyms_deletion(&self) -> update::SynonymsDeletion {
|
|
||||||
update::SynonymsDeletion::new(
|
|
||||||
self.updates,
|
self.updates,
|
||||||
self.updates_results,
|
self.updates_results,
|
||||||
self.updates_notifier.clone(),
|
self.updates_notifier.clone(),
|
||||||
|
@ -5,8 +5,7 @@ mod documents_deletion;
|
|||||||
mod schema_update;
|
mod schema_update;
|
||||||
mod stop_words_addition;
|
mod stop_words_addition;
|
||||||
mod stop_words_deletion;
|
mod stop_words_deletion;
|
||||||
mod synonyms_addition;
|
mod synonyms_update;
|
||||||
mod synonyms_deletion;
|
|
||||||
|
|
||||||
pub use self::clear_all::{apply_clear_all, push_clear_all};
|
pub use self::clear_all::{apply_clear_all, push_clear_all};
|
||||||
pub use self::customs_update::{apply_customs_update, push_customs_update};
|
pub use self::customs_update::{apply_customs_update, push_customs_update};
|
||||||
@ -17,8 +16,7 @@ pub use self::documents_deletion::{apply_documents_deletion, DocumentsDeletion};
|
|||||||
pub use self::schema_update::{apply_schema_update, push_schema_update};
|
pub use self::schema_update::{apply_schema_update, push_schema_update};
|
||||||
pub use self::stop_words_addition::{apply_stop_words_addition, StopWordsAddition};
|
pub use self::stop_words_addition::{apply_stop_words_addition, StopWordsAddition};
|
||||||
pub use self::stop_words_deletion::{apply_stop_words_deletion, StopWordsDeletion};
|
pub use self::stop_words_deletion::{apply_stop_words_deletion, StopWordsDeletion};
|
||||||
pub use self::synonyms_addition::{apply_synonyms_addition, SynonymsAddition};
|
pub use self::synonyms_update::{apply_synonyms_update, SynonymsUpdate};
|
||||||
pub use self::synonyms_deletion::{apply_synonyms_deletion, SynonymsDeletion};
|
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::collections::{BTreeMap, BTreeSet, HashMap};
|
use std::collections::{BTreeMap, BTreeSet, HashMap};
|
||||||
@ -82,16 +80,9 @@ impl Update {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn synonyms_addition(data: BTreeMap<String, Vec<String>>) -> Update {
|
fn synonyms_update(data: BTreeMap<String, Vec<String>>) -> Update {
|
||||||
Update {
|
Update {
|
||||||
data: UpdateData::SynonymsAddition(data),
|
data: UpdateData::SynonymsUpdate(data),
|
||||||
enqueued_at: Utc::now(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn synonyms_deletion(data: BTreeMap<String, Option<Vec<String>>>) -> Update {
|
|
||||||
Update {
|
|
||||||
data: UpdateData::SynonymsDeletion(data),
|
|
||||||
enqueued_at: Utc::now(),
|
enqueued_at: Utc::now(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -119,8 +110,7 @@ pub enum UpdateData {
|
|||||||
DocumentsAddition(Vec<HashMap<String, serde_json::Value>>),
|
DocumentsAddition(Vec<HashMap<String, serde_json::Value>>),
|
||||||
DocumentsPartial(Vec<HashMap<String, serde_json::Value>>),
|
DocumentsPartial(Vec<HashMap<String, serde_json::Value>>),
|
||||||
DocumentsDeletion(Vec<DocumentId>),
|
DocumentsDeletion(Vec<DocumentId>),
|
||||||
SynonymsAddition(BTreeMap<String, Vec<String>>),
|
SynonymsUpdate(BTreeMap<String, Vec<String>>),
|
||||||
SynonymsDeletion(BTreeMap<String, Option<Vec<String>>>),
|
|
||||||
StopWordsAddition(BTreeSet<String>),
|
StopWordsAddition(BTreeSet<String>),
|
||||||
StopWordsDeletion(BTreeSet<String>),
|
StopWordsDeletion(BTreeSet<String>),
|
||||||
}
|
}
|
||||||
@ -140,12 +130,9 @@ impl UpdateData {
|
|||||||
UpdateData::DocumentsDeletion(deletion) => UpdateType::DocumentsDeletion {
|
UpdateData::DocumentsDeletion(deletion) => UpdateType::DocumentsDeletion {
|
||||||
number: deletion.len(),
|
number: deletion.len(),
|
||||||
},
|
},
|
||||||
UpdateData::SynonymsAddition(addition) => UpdateType::SynonymsAddition {
|
UpdateData::SynonymsUpdate(addition) => UpdateType::SynonymsUpdate {
|
||||||
number: addition.len(),
|
number: addition.len(),
|
||||||
},
|
},
|
||||||
UpdateData::SynonymsDeletion(deletion) => UpdateType::SynonymsDeletion {
|
|
||||||
number: deletion.len(),
|
|
||||||
},
|
|
||||||
UpdateData::StopWordsAddition(addition) => UpdateType::StopWordsAddition {
|
UpdateData::StopWordsAddition(addition) => UpdateType::StopWordsAddition {
|
||||||
number: addition.len(),
|
number: addition.len(),
|
||||||
},
|
},
|
||||||
@ -165,8 +152,7 @@ pub enum UpdateType {
|
|||||||
DocumentsAddition { number: usize },
|
DocumentsAddition { number: usize },
|
||||||
DocumentsPartial { number: usize },
|
DocumentsPartial { number: usize },
|
||||||
DocumentsDeletion { number: usize },
|
DocumentsDeletion { number: usize },
|
||||||
SynonymsAddition { number: usize },
|
SynonymsUpdate { number: usize },
|
||||||
SynonymsDeletion { number: usize },
|
|
||||||
StopWordsAddition { number: usize },
|
StopWordsAddition { number: usize },
|
||||||
StopWordsDeletion { number: usize },
|
StopWordsDeletion { number: usize },
|
||||||
}
|
}
|
||||||
@ -361,25 +347,14 @@ pub fn update_task<'a, 'b>(
|
|||||||
|
|
||||||
(update_type, result, start.elapsed())
|
(update_type, result, start.elapsed())
|
||||||
}
|
}
|
||||||
UpdateData::SynonymsAddition(synonyms) => {
|
UpdateData::SynonymsUpdate(synonyms) => {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
|
||||||
let update_type = UpdateType::SynonymsAddition {
|
let update_type = UpdateType::SynonymsUpdate {
|
||||||
number: synonyms.len(),
|
number: synonyms.len(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = apply_synonyms_addition(writer, index.main, index.synonyms, synonyms);
|
let result = apply_synonyms_update(writer, index.main, index.synonyms, synonyms);
|
||||||
|
|
||||||
(update_type, result, start.elapsed())
|
|
||||||
}
|
|
||||||
UpdateData::SynonymsDeletion(synonyms) => {
|
|
||||||
let start = Instant::now();
|
|
||||||
|
|
||||||
let update_type = UpdateType::SynonymsDeletion {
|
|
||||||
number: synonyms.len(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = apply_synonyms_deletion(writer, index.main, index.synonyms, synonyms);
|
|
||||||
|
|
||||||
(update_type, result, start.elapsed())
|
(update_type, result, start.elapsed())
|
||||||
}
|
}
|
||||||
|
@ -1,158 +0,0 @@
|
|||||||
use std::collections::BTreeMap;
|
|
||||||
use std::iter::FromIterator;
|
|
||||||
|
|
||||||
use fst::{set::OpBuilder, SetBuilder};
|
|
||||||
use sdset::SetBuf;
|
|
||||||
|
|
||||||
use crate::database::{MainT, UpdateT};
|
|
||||||
use crate::automaton::normalize_str;
|
|
||||||
use crate::database::{UpdateEvent, UpdateEventsEmitter};
|
|
||||||
use crate::update::{next_update_id, Update};
|
|
||||||
use crate::{store, MResult};
|
|
||||||
|
|
||||||
pub struct SynonymsDeletion {
|
|
||||||
updates_store: store::Updates,
|
|
||||||
updates_results_store: store::UpdatesResults,
|
|
||||||
updates_notifier: UpdateEventsEmitter,
|
|
||||||
synonyms: BTreeMap<String, Option<Vec<String>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SynonymsDeletion {
|
|
||||||
pub fn new(
|
|
||||||
updates_store: store::Updates,
|
|
||||||
updates_results_store: store::UpdatesResults,
|
|
||||||
updates_notifier: UpdateEventsEmitter,
|
|
||||||
) -> SynonymsDeletion {
|
|
||||||
SynonymsDeletion {
|
|
||||||
updates_store,
|
|
||||||
updates_results_store,
|
|
||||||
updates_notifier,
|
|
||||||
synonyms: BTreeMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete_all_alternatives_of<S: AsRef<str>>(&mut self, synonym: S) {
|
|
||||||
let synonym = normalize_str(synonym.as_ref());
|
|
||||||
self.synonyms.insert(synonym, None);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete_specific_alternatives_of<S, T, I>(&mut self, synonym: S, alternatives: I)
|
|
||||||
where
|
|
||||||
S: AsRef<str>,
|
|
||||||
T: AsRef<str>,
|
|
||||||
I: Iterator<Item = T>,
|
|
||||||
{
|
|
||||||
let synonym = normalize_str(synonym.as_ref());
|
|
||||||
let value = self.synonyms.entry(synonym).or_insert(None);
|
|
||||||
let alternatives = alternatives.map(|s| s.as_ref().to_lowercase());
|
|
||||||
match value {
|
|
||||||
Some(v) => v.extend(alternatives),
|
|
||||||
None => *value = Some(Vec::from_iter(alternatives)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
|
|
||||||
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
|
|
||||||
let update_id = push_synonyms_deletion(
|
|
||||||
writer,
|
|
||||||
self.updates_store,
|
|
||||||
self.updates_results_store,
|
|
||||||
self.synonyms,
|
|
||||||
)?;
|
|
||||||
Ok(update_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn push_synonyms_deletion(
|
|
||||||
writer: &mut heed::RwTxn<UpdateT>,
|
|
||||||
updates_store: store::Updates,
|
|
||||||
updates_results_store: store::UpdatesResults,
|
|
||||||
deletion: BTreeMap<String, Option<Vec<String>>>,
|
|
||||||
) -> MResult<u64> {
|
|
||||||
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
|
|
||||||
|
|
||||||
let update = Update::synonyms_deletion(deletion);
|
|
||||||
updates_store.put_update(writer, last_update_id, &update)?;
|
|
||||||
|
|
||||||
Ok(last_update_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn apply_synonyms_deletion(
|
|
||||||
writer: &mut heed::RwTxn<MainT>,
|
|
||||||
main_store: store::Main,
|
|
||||||
synonyms_store: store::Synonyms,
|
|
||||||
deletion: BTreeMap<String, Option<Vec<String>>>,
|
|
||||||
) -> MResult<()> {
|
|
||||||
let mut delete_whole_synonym_builder = SetBuilder::memory();
|
|
||||||
|
|
||||||
for (synonym, alternatives) in deletion {
|
|
||||||
match alternatives {
|
|
||||||
Some(alternatives) => {
|
|
||||||
let prev_alternatives = synonyms_store.synonyms(writer, synonym.as_bytes())?;
|
|
||||||
let prev_alternatives = match prev_alternatives {
|
|
||||||
Some(alternatives) => alternatives,
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
let delta_alternatives = {
|
|
||||||
let alternatives = SetBuf::from_dirty(alternatives);
|
|
||||||
let mut builder = SetBuilder::memory();
|
|
||||||
builder.extend_iter(alternatives).unwrap();
|
|
||||||
builder.into_inner().and_then(fst::Set::from_bytes).unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
let op = OpBuilder::new()
|
|
||||||
.add(prev_alternatives.stream())
|
|
||||||
.add(delta_alternatives.stream())
|
|
||||||
.difference();
|
|
||||||
|
|
||||||
let (alternatives, empty_alternatives) = {
|
|
||||||
let mut builder = SetBuilder::memory();
|
|
||||||
let len = builder.get_ref().len();
|
|
||||||
builder.extend_stream(op).unwrap();
|
|
||||||
let is_empty = len == builder.get_ref().len();
|
|
||||||
let bytes = builder.into_inner().unwrap();
|
|
||||||
let alternatives = fst::Set::from_bytes(bytes).unwrap();
|
|
||||||
|
|
||||||
(alternatives, is_empty)
|
|
||||||
};
|
|
||||||
|
|
||||||
if empty_alternatives {
|
|
||||||
delete_whole_synonym_builder.insert(synonym.as_bytes())?;
|
|
||||||
} else {
|
|
||||||
synonyms_store.put_synonyms(writer, synonym.as_bytes(), &alternatives)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
delete_whole_synonym_builder.insert(&synonym).unwrap();
|
|
||||||
synonyms_store.del_synonyms(writer, synonym.as_bytes())?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let delta_synonyms = delete_whole_synonym_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let synonyms = match main_store.synonyms_fst(writer)? {
|
|
||||||
Some(synonyms) => {
|
|
||||||
let op = OpBuilder::new()
|
|
||||||
.add(synonyms.stream())
|
|
||||||
.add(delta_synonyms.stream())
|
|
||||||
.difference();
|
|
||||||
|
|
||||||
let mut synonyms_builder = SetBuilder::memory();
|
|
||||||
synonyms_builder.extend_stream(op).unwrap();
|
|
||||||
synonyms_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
None => fst::Set::default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
main_store.put_synonyms_fst(writer, &synonyms)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,6 +1,6 @@
|
|||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
use fst::{set::OpBuilder, SetBuilder};
|
use fst::SetBuilder;
|
||||||
use sdset::SetBuf;
|
use sdset::SetBuf;
|
||||||
|
|
||||||
use crate::database::{MainT, UpdateT};
|
use crate::database::{MainT, UpdateT};
|
||||||
@ -9,20 +9,20 @@ use crate::database::{UpdateEvent, UpdateEventsEmitter};
|
|||||||
use crate::update::{next_update_id, Update};
|
use crate::update::{next_update_id, Update};
|
||||||
use crate::{store, MResult};
|
use crate::{store, MResult};
|
||||||
|
|
||||||
pub struct SynonymsAddition {
|
pub struct SynonymsUpdate {
|
||||||
updates_store: store::Updates,
|
updates_store: store::Updates,
|
||||||
updates_results_store: store::UpdatesResults,
|
updates_results_store: store::UpdatesResults,
|
||||||
updates_notifier: UpdateEventsEmitter,
|
updates_notifier: UpdateEventsEmitter,
|
||||||
synonyms: BTreeMap<String, Vec<String>>,
|
synonyms: BTreeMap<String, Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SynonymsAddition {
|
impl SynonymsUpdate {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
updates_store: store::Updates,
|
updates_store: store::Updates,
|
||||||
updates_results_store: store::UpdatesResults,
|
updates_results_store: store::UpdatesResults,
|
||||||
updates_notifier: UpdateEventsEmitter,
|
updates_notifier: UpdateEventsEmitter,
|
||||||
) -> SynonymsAddition {
|
) -> SynonymsUpdate {
|
||||||
SynonymsAddition {
|
SynonymsUpdate {
|
||||||
updates_store,
|
updates_store,
|
||||||
updates_results_store,
|
updates_results_store,
|
||||||
updates_notifier,
|
updates_notifier,
|
||||||
@ -46,7 +46,7 @@ impl SynonymsAddition {
|
|||||||
|
|
||||||
pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
|
pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
|
||||||
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
|
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
|
||||||
let update_id = push_synonyms_addition(
|
let update_id = push_synonyms_update(
|
||||||
writer,
|
writer,
|
||||||
self.updates_store,
|
self.updates_store,
|
||||||
self.updates_results_store,
|
self.updates_results_store,
|
||||||
@ -56,7 +56,7 @@ impl SynonymsAddition {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push_synonyms_addition(
|
pub fn push_synonyms_update(
|
||||||
writer: &mut heed::RwTxn<UpdateT>,
|
writer: &mut heed::RwTxn<UpdateT>,
|
||||||
updates_store: store::Updates,
|
updates_store: store::Updates,
|
||||||
updates_results_store: store::UpdatesResults,
|
updates_results_store: store::UpdatesResults,
|
||||||
@ -64,20 +64,20 @@ pub fn push_synonyms_addition(
|
|||||||
) -> MResult<u64> {
|
) -> MResult<u64> {
|
||||||
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
|
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
|
||||||
|
|
||||||
let update = Update::synonyms_addition(addition);
|
let update = Update::synonyms_update(addition);
|
||||||
updates_store.put_update(writer, last_update_id, &update)?;
|
updates_store.put_update(writer, last_update_id, &update)?;
|
||||||
|
|
||||||
Ok(last_update_id)
|
Ok(last_update_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn apply_synonyms_addition(
|
pub fn apply_synonyms_update(
|
||||||
writer: &mut heed::RwTxn<MainT>,
|
writer: &mut heed::RwTxn<MainT>,
|
||||||
main_store: store::Main,
|
main_store: store::Main,
|
||||||
synonyms_store: store::Synonyms,
|
synonyms_store: store::Synonyms,
|
||||||
addition: BTreeMap<String, Vec<String>>,
|
addition: BTreeMap<String, Vec<String>>,
|
||||||
) -> MResult<()> {
|
) -> MResult<()> {
|
||||||
let mut synonyms_builder = SetBuilder::memory();
|
let mut synonyms_builder = SetBuilder::memory();
|
||||||
|
synonyms_store.clear(writer)?;
|
||||||
for (word, alternatives) in addition {
|
for (word, alternatives) in addition {
|
||||||
synonyms_builder.insert(&word).unwrap();
|
synonyms_builder.insert(&word).unwrap();
|
||||||
|
|
||||||
@ -92,28 +92,11 @@ pub fn apply_synonyms_addition(
|
|||||||
synonyms_store.put_synonyms(writer, word.as_bytes(), &alternatives)?;
|
synonyms_store.put_synonyms(writer, word.as_bytes(), &alternatives)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let delta_synonyms = synonyms_builder
|
let synonyms = synonyms_builder
|
||||||
.into_inner()
|
.into_inner()
|
||||||
.and_then(fst::Set::from_bytes)
|
.and_then(fst::Set::from_bytes)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let synonyms = match main_store.synonyms_fst(writer)? {
|
|
||||||
Some(synonyms) => {
|
|
||||||
let op = OpBuilder::new()
|
|
||||||
.add(synonyms.stream())
|
|
||||||
.add(delta_synonyms.stream())
|
|
||||||
.r#union();
|
|
||||||
|
|
||||||
let mut synonyms_builder = SetBuilder::memory();
|
|
||||||
synonyms_builder.extend_stream(op).unwrap();
|
|
||||||
synonyms_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
None => delta_synonyms,
|
|
||||||
};
|
|
||||||
|
|
||||||
main_store.put_synonyms_fst(writer, &synonyms)?;
|
main_store.put_synonyms_fst(writer, &synonyms)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
@ -6,7 +6,7 @@ pub enum UpdateOperation {
|
|||||||
ClearAllDocuments,
|
ClearAllDocuments,
|
||||||
DocumentsAddition,
|
DocumentsAddition,
|
||||||
DocumentsDeletion,
|
DocumentsDeletion,
|
||||||
SynonymsAddition,
|
SynonymsUpdate,
|
||||||
SynonymsDeletion,
|
SynonymsDeletion,
|
||||||
StopWordsAddition,
|
StopWordsAddition,
|
||||||
StopWordsDeletion,
|
StopWordsDeletion,
|
||||||
@ -22,7 +22,7 @@ impl fmt::Display for UpdateOperation {
|
|||||||
ClearAllDocuments => write!(f, "ClearAllDocuments"),
|
ClearAllDocuments => write!(f, "ClearAllDocuments"),
|
||||||
DocumentsAddition => write!(f, "DocumentsAddition"),
|
DocumentsAddition => write!(f, "DocumentsAddition"),
|
||||||
DocumentsDeletion => write!(f, "DocumentsDeletion"),
|
DocumentsDeletion => write!(f, "DocumentsDeletion"),
|
||||||
SynonymsAddition => write!(f, "SynonymsAddition"),
|
SynonymsUpdate => write!(f, "SynonymsUpdate"),
|
||||||
SynonymsDeletion => write!(f, "SynonymsDelettion"),
|
SynonymsDeletion => write!(f, "SynonymsDelettion"),
|
||||||
StopWordsAddition => write!(f, "StopWordsAddition"),
|
StopWordsAddition => write!(f, "StopWordsAddition"),
|
||||||
StopWordsDeletion => write!(f, "StopWordsDeletion"),
|
StopWordsDeletion => write!(f, "StopWordsDeletion"),
|
||||||
|
@ -60,21 +60,9 @@ pub fn load_routes(app: &mut tide::App<Data>) {
|
|||||||
.post(document::delete_multiple_documents);
|
.post(document::delete_multiple_documents);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.at("/synonyms").nest(|router| {
|
router.at("/synonyms")
|
||||||
router
|
|
||||||
.at("/")
|
|
||||||
.get(synonym::list)
|
|
||||||
.post(synonym::create)
|
|
||||||
.delete(synonym::clear);
|
|
||||||
|
|
||||||
router
|
|
||||||
.at("/:synonym")
|
|
||||||
.get(synonym::get)
|
.get(synonym::get)
|
||||||
.put(synonym::update)
|
.post(synonym::update);
|
||||||
.delete(synonym::delete);
|
|
||||||
|
|
||||||
router.at("/batch").post(synonym::batch_write);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.at("/stop-words").nest(|router| {
|
router.at("/stop-words").nest(|router| {
|
||||||
router
|
router
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use http::StatusCode;
|
use http::StatusCode;
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use tide::response::IntoResponse;
|
use tide::response::IntoResponse;
|
||||||
use tide::{Context, Response};
|
use tide::{Context, Response};
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
|
||||||
use crate::error::{ResponseError, SResult};
|
use crate::error::{ResponseError, SResult};
|
||||||
use crate::helpers::tide::ContextExt;
|
use crate::helpers::tide::ContextExt;
|
||||||
@ -11,23 +11,7 @@ use crate::models::token::ACL::*;
|
|||||||
use crate::routes::document::IndexUpdateResponse;
|
use crate::routes::document::IndexUpdateResponse;
|
||||||
use crate::Data;
|
use crate::Data;
|
||||||
|
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
pub async fn get(ctx: Context<Data>) -> SResult<Response> {
|
||||||
#[serde(untagged)]
|
|
||||||
pub enum Synonym {
|
|
||||||
OneWay(SynonymOneWay),
|
|
||||||
MultiWay { synonyms: Vec<String> },
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
pub struct SynonymOneWay {
|
|
||||||
pub input: String,
|
|
||||||
pub synonyms: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Synonyms = Vec<Synonym>;
|
|
||||||
|
|
||||||
pub async fn list(ctx: Context<Data>) -> SResult<Response> {
|
|
||||||
ctx.is_allowed(SettingsRead)?;
|
ctx.is_allowed(SettingsRead)?;
|
||||||
let index = ctx.index()?;
|
let index = ctx.index()?;
|
||||||
|
|
||||||
@ -42,7 +26,7 @@ pub async fn list(ctx: Context<Data>) -> SResult<Response> {
|
|||||||
let synonyms_fst = synonyms_fst.unwrap_or_default();
|
let synonyms_fst = synonyms_fst.unwrap_or_default();
|
||||||
let synonyms_list = synonyms_fst.stream().into_strs().map_err(ResponseError::internal)?;
|
let synonyms_list = synonyms_fst.stream().into_strs().map_err(ResponseError::internal)?;
|
||||||
|
|
||||||
let mut response = HashMap::new();
|
let mut response = IndexMap::new();
|
||||||
|
|
||||||
let index_synonyms = &index.synonyms;
|
let index_synonyms = &index.synonyms;
|
||||||
|
|
||||||
@ -60,171 +44,23 @@ pub async fn list(ctx: Context<Data>) -> SResult<Response> {
|
|||||||
Ok(tide::response::json(response))
|
Ok(tide::response::json(response))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(ctx: Context<Data>) -> SResult<Response> {
|
|
||||||
ctx.is_allowed(SettingsRead)?;
|
|
||||||
let synonym = ctx.url_param("synonym")?;
|
|
||||||
let index = ctx.index()?;
|
|
||||||
|
|
||||||
let db = &ctx.state().db;
|
|
||||||
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let synonym_list = index
|
|
||||||
.synonyms
|
|
||||||
.synonyms(&reader, synonym.as_bytes())
|
|
||||||
.map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let list = match synonym_list {
|
|
||||||
Some(list) => list.stream().into_strs().map_err(ResponseError::internal)?,
|
|
||||||
None => Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(tide::response::json(list))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn create(mut ctx: Context<Data>) -> SResult<Response> {
|
|
||||||
ctx.is_allowed(SettingsWrite)?;
|
|
||||||
|
|
||||||
let data: Synonym = ctx.body_json().await.map_err(ResponseError::bad_request)?;
|
|
||||||
|
|
||||||
let index = ctx.index()?;
|
|
||||||
|
|
||||||
let db = &ctx.state().db;
|
|
||||||
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let mut synonyms_addition = index.synonyms_addition();
|
|
||||||
|
|
||||||
match data.clone() {
|
|
||||||
Synonym::OneWay(content) => {
|
|
||||||
synonyms_addition.add_synonym(content.input, content.synonyms.into_iter())
|
|
||||||
}
|
|
||||||
Synonym::MultiWay { mut synonyms } => {
|
|
||||||
if synonyms.len() > 1 {
|
|
||||||
for _ in 0..synonyms.len() {
|
|
||||||
let (first, elems) = synonyms.split_first().unwrap();
|
|
||||||
synonyms_addition.add_synonym(first, elems.iter());
|
|
||||||
synonyms.rotate_left(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let update_id = synonyms_addition
|
|
||||||
.finalize(&mut writer)
|
|
||||||
.map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
writer.commit().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let response_body = IndexUpdateResponse { update_id };
|
|
||||||
Ok(tide::response::json(response_body)
|
|
||||||
.with_status(StatusCode::ACCEPTED)
|
|
||||||
.into_response())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
|
pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
|
||||||
ctx.is_allowed(SettingsWrite)?;
|
ctx.is_allowed(SettingsWrite)?;
|
||||||
let synonym = ctx.url_param("synonym")?;
|
|
||||||
|
let data: HashMap<String, Vec<String>> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
|
||||||
|
|
||||||
let index = ctx.index()?;
|
let index = ctx.index()?;
|
||||||
let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
|
|
||||||
|
|
||||||
let db = &ctx.state().db;
|
let db = &ctx.state().db;
|
||||||
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
|
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
|
||||||
|
|
||||||
let mut synonyms_addition = index.synonyms_addition();
|
let mut synonyms_update = index.synonyms_update();
|
||||||
synonyms_addition.add_synonym(synonym.clone(), data.clone().into_iter());
|
|
||||||
let update_id = synonyms_addition
|
for (input, synonyms) in data {
|
||||||
.finalize(&mut writer)
|
synonyms_update.add_synonym(input, synonyms.into_iter());
|
||||||
.map_err(ResponseError::internal)?;
|
}
|
||||||
|
|
||||||
writer.commit().map_err(ResponseError::internal)?;
|
let update_id = synonyms_update
|
||||||
|
|
||||||
let response_body = IndexUpdateResponse { update_id };
|
|
||||||
Ok(tide::response::json(response_body)
|
|
||||||
.with_status(StatusCode::ACCEPTED)
|
|
||||||
.into_response())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete(ctx: Context<Data>) -> SResult<Response> {
|
|
||||||
ctx.is_allowed(SettingsWrite)?;
|
|
||||||
let synonym = ctx.url_param("synonym")?;
|
|
||||||
let index = ctx.index()?;
|
|
||||||
|
|
||||||
let db = &ctx.state().db;
|
|
||||||
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let mut synonyms_deletion = index.synonyms_deletion();
|
|
||||||
synonyms_deletion.delete_all_alternatives_of(synonym);
|
|
||||||
let update_id = synonyms_deletion
|
|
||||||
.finalize(&mut writer)
|
|
||||||
.map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
writer.commit().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let response_body = IndexUpdateResponse { update_id };
|
|
||||||
Ok(tide::response::json(response_body)
|
|
||||||
.with_status(StatusCode::ACCEPTED)
|
|
||||||
.into_response())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn batch_write(mut ctx: Context<Data>) -> SResult<Response> {
|
|
||||||
ctx.is_allowed(SettingsWrite)?;
|
|
||||||
|
|
||||||
let data: Synonyms = ctx.body_json().await.map_err(ResponseError::bad_request)?;
|
|
||||||
|
|
||||||
let index = ctx.index()?;
|
|
||||||
|
|
||||||
let db = &ctx.state().db;
|
|
||||||
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let mut synonyms_addition = index.synonyms_addition();
|
|
||||||
for raw in data {
|
|
||||||
match raw {
|
|
||||||
Synonym::OneWay(content) => {
|
|
||||||
synonyms_addition.add_synonym(content.input, content.synonyms.into_iter())
|
|
||||||
}
|
|
||||||
Synonym::MultiWay { mut synonyms } => {
|
|
||||||
if synonyms.len() > 1 {
|
|
||||||
for _ in 0..synonyms.len() {
|
|
||||||
let (first, elems) = synonyms.split_first().unwrap();
|
|
||||||
synonyms_addition.add_synonym(first, elems.iter());
|
|
||||||
synonyms.rotate_left(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let update_id = synonyms_addition
|
|
||||||
.finalize(&mut writer)
|
|
||||||
.map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
writer.commit().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let response_body = IndexUpdateResponse { update_id };
|
|
||||||
Ok(tide::response::json(response_body)
|
|
||||||
.with_status(StatusCode::ACCEPTED)
|
|
||||||
.into_response())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn clear(ctx: Context<Data>) -> SResult<Response> {
|
|
||||||
ctx.is_allowed(SettingsWrite)?;
|
|
||||||
let index = ctx.index()?;
|
|
||||||
|
|
||||||
let db = &ctx.state().db;
|
|
||||||
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
|
|
||||||
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let synonyms_fst = index
|
|
||||||
.main
|
|
||||||
.synonyms_fst(&reader)
|
|
||||||
.map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let synonyms_fst = synonyms_fst.unwrap_or_default();
|
|
||||||
let synonyms_list = synonyms_fst.stream().into_strs().map_err(ResponseError::internal)?;
|
|
||||||
|
|
||||||
let mut synonyms_deletion = index.synonyms_deletion();
|
|
||||||
for synonym in synonyms_list {
|
|
||||||
synonyms_deletion.delete_all_alternatives_of(synonym);
|
|
||||||
}
|
|
||||||
let update_id = synonyms_deletion
|
|
||||||
.finalize(&mut writer)
|
.finalize(&mut writer)
|
||||||
.map_err(ResponseError::internal)?;
|
.map_err(ResponseError::internal)?;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user