2021-12-02 23:03:26 +08:00
|
|
|
pub mod error;
|
|
|
|
pub mod index_store;
|
|
|
|
pub mod meta_store;
|
|
|
|
|
2022-01-19 18:21:19 +08:00
|
|
|
use std::convert::{TryFrom, TryInto};
|
2022-05-19 18:43:46 +08:00
|
|
|
use std::path::Path;
|
2022-05-17 22:08:23 +08:00
|
|
|
use std::str::FromStr;
|
2021-12-16 00:55:39 +08:00
|
|
|
use std::sync::Arc;
|
2021-12-02 23:03:26 +08:00
|
|
|
|
|
|
|
use error::{IndexResolverError, Result};
|
|
|
|
use index_store::{IndexStore, MapIndexStore};
|
|
|
|
use meilisearch_error::ResponseError;
|
|
|
|
use meta_store::{HeedMetaStore, IndexMetaStore};
|
2022-03-16 20:45:58 +08:00
|
|
|
use milli::heed::Env;
|
2022-01-19 18:21:19 +08:00
|
|
|
use milli::update::{DocumentDeletionResult, IndexerConfig};
|
2021-12-02 23:03:26 +08:00
|
|
|
use serde::{Deserialize, Serialize};
|
2022-02-14 22:32:41 +08:00
|
|
|
use time::OffsetDateTime;
|
2021-12-02 23:03:26 +08:00
|
|
|
use tokio::task::spawn_blocking;
|
|
|
|
use uuid::Uuid;
|
|
|
|
|
2022-01-19 18:21:19 +08:00
|
|
|
use crate::index::{error::Result as IndexResult, Index};
|
2021-12-02 23:03:26 +08:00
|
|
|
use crate::options::IndexerOpts;
|
2022-05-19 18:43:46 +08:00
|
|
|
use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult};
|
2021-12-02 23:03:26 +08:00
|
|
|
use crate::update_file_store::UpdateFileStore;
|
|
|
|
|
|
|
|
use self::meta_store::IndexMeta;
|
|
|
|
|
|
|
|
pub type HardStateIndexResolver = IndexResolver<HeedMetaStore, MapIndexStore>;
|
|
|
|
|
|
|
|
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
|
|
|
/// bytes long
|
|
|
|
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
|
|
|
|
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
|
|
|
pub struct IndexUid(#[cfg_attr(test, proptest(regex("[a-zA-Z0-9_-]{1,400}")))] String);
|
|
|
|
|
|
|
|
pub fn create_index_resolver(
|
|
|
|
path: impl AsRef<Path>,
|
|
|
|
index_size: usize,
|
|
|
|
indexer_opts: &IndexerOpts,
|
2022-03-16 20:45:58 +08:00
|
|
|
meta_env: Arc<milli::heed::Env>,
|
2021-12-02 23:03:26 +08:00
|
|
|
file_store: UpdateFileStore,
|
|
|
|
) -> anyhow::Result<HardStateIndexResolver> {
|
|
|
|
let uuid_store = HeedMetaStore::new(meta_env)?;
|
|
|
|
let index_store = MapIndexStore::new(&path, index_size, indexer_opts)?;
|
|
|
|
Ok(IndexResolver::new(uuid_store, index_store, file_store))
|
|
|
|
}
|
|
|
|
|
|
|
|
impl IndexUid {
|
|
|
|
pub fn new(uid: String) -> Result<Self> {
|
|
|
|
if !uid
|
|
|
|
.chars()
|
|
|
|
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
|
|
|
|| !(1..=400).contains(&uid.len())
|
|
|
|
{
|
|
|
|
Err(IndexResolverError::BadlyFormatted(uid))
|
|
|
|
} else {
|
|
|
|
Ok(Self(uid))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
pub fn new_unchecked(s: impl AsRef<str>) -> Self {
|
|
|
|
Self(s.as_ref().to_string())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn into_inner(self) -> String {
|
|
|
|
self.0
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Return a reference over the inner str.
|
|
|
|
pub fn as_str(&self) -> &str {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl std::ops::Deref for IndexUid {
|
|
|
|
type Target = str;
|
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TryInto<IndexUid> for String {
|
|
|
|
type Error = IndexResolverError;
|
|
|
|
|
|
|
|
fn try_into(self) -> Result<IndexUid> {
|
|
|
|
IndexUid::new(self)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-17 22:08:23 +08:00
|
|
|
impl FromStr for IndexUid {
|
|
|
|
type Err = IndexResolverError;
|
|
|
|
|
|
|
|
fn from_str(s: &str) -> Result<IndexUid> {
|
|
|
|
IndexUid::new(s.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-12-02 23:03:26 +08:00
|
|
|
pub struct IndexResolver<U, I> {
|
|
|
|
index_uuid_store: U,
|
|
|
|
index_store: I,
|
2022-05-19 18:43:46 +08:00
|
|
|
pub file_store: UpdateFileStore,
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
impl IndexResolver<HeedMetaStore, MapIndexStore> {
|
|
|
|
pub fn load_dump(
|
|
|
|
src: impl AsRef<Path>,
|
|
|
|
dst: impl AsRef<Path>,
|
|
|
|
index_db_size: usize,
|
2021-12-16 00:55:39 +08:00
|
|
|
env: Arc<Env>,
|
2021-12-02 23:03:26 +08:00
|
|
|
indexer_opts: &IndexerOpts,
|
|
|
|
) -> anyhow::Result<()> {
|
|
|
|
HeedMetaStore::load_dump(&src, env)?;
|
|
|
|
let indexes_path = src.as_ref().join("indexes");
|
|
|
|
let indexes = indexes_path.read_dir()?;
|
2022-01-19 18:21:19 +08:00
|
|
|
let indexer_config = IndexerConfig::try_from(indexer_opts)?;
|
2021-12-02 23:03:26 +08:00
|
|
|
for index in indexes {
|
2022-01-19 18:21:19 +08:00
|
|
|
Index::load_dump(&index?.path(), &dst, index_db_size, &indexer_config)?;
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<U, I> IndexResolver<U, I>
|
|
|
|
where
|
|
|
|
U: IndexMetaStore,
|
|
|
|
I: IndexStore,
|
|
|
|
{
|
|
|
|
pub fn new(index_uuid_store: U, index_store: I, file_store: UpdateFileStore) -> Self {
|
|
|
|
Self {
|
|
|
|
index_uuid_store,
|
|
|
|
index_store,
|
|
|
|
file_store,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-19 18:43:46 +08:00
|
|
|
pub async fn process_document_addition_batch(&self, mut tasks: Vec<Task>) -> Vec<Task> {
|
2022-01-19 18:21:19 +08:00
|
|
|
fn get_content_uuid(task: &Task) -> Uuid {
|
|
|
|
match task {
|
|
|
|
Task {
|
|
|
|
content: TaskContent::DocumentAddition { content_uuid, .. },
|
|
|
|
..
|
|
|
|
} => *content_uuid,
|
|
|
|
_ => panic!("unexpected task in the document addition batch"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-19 18:43:46 +08:00
|
|
|
let content_uuids = tasks.iter().map(get_content_uuid).collect::<Vec<_>>();
|
2022-01-19 18:21:19 +08:00
|
|
|
|
2022-05-19 18:43:46 +08:00
|
|
|
match tasks.first() {
|
2022-01-19 18:21:19 +08:00
|
|
|
Some(Task {
|
2022-05-17 01:50:45 +08:00
|
|
|
index_uid: Some(ref index_uid),
|
2022-01-19 18:21:19 +08:00
|
|
|
id,
|
|
|
|
content:
|
|
|
|
TaskContent::DocumentAddition {
|
|
|
|
merge_strategy,
|
|
|
|
primary_key,
|
|
|
|
allow_index_creation,
|
|
|
|
..
|
|
|
|
},
|
2021-12-02 23:03:26 +08:00
|
|
|
..
|
2022-01-19 18:21:19 +08:00
|
|
|
}) => {
|
2021-12-02 23:03:26 +08:00
|
|
|
let primary_key = primary_key.clone();
|
|
|
|
let method = *merge_strategy;
|
|
|
|
|
2021-12-15 21:52:33 +08:00
|
|
|
let index = if *allow_index_creation {
|
2022-01-19 18:21:19 +08:00
|
|
|
self.get_or_create_index(index_uid.clone(), *id).await
|
2021-12-15 21:52:33 +08:00
|
|
|
} else {
|
2022-01-19 18:21:19 +08:00
|
|
|
self.get_index(index_uid.as_str().to_string()).await
|
2021-12-15 21:52:33 +08:00
|
|
|
};
|
2022-01-19 18:21:19 +08:00
|
|
|
|
|
|
|
// If the index doesn't exist and we are not allowed to create it with the first
|
|
|
|
// task, we must fails the whole batch.
|
2022-02-14 22:32:41 +08:00
|
|
|
let now = OffsetDateTime::now_utc();
|
2022-01-19 18:21:19 +08:00
|
|
|
let index = match index {
|
|
|
|
Ok(index) => index,
|
|
|
|
Err(e) => {
|
|
|
|
let error = ResponseError::from(e);
|
2022-05-19 18:43:46 +08:00
|
|
|
for task in tasks.iter_mut() {
|
2022-01-19 18:21:19 +08:00
|
|
|
task.events.push(TaskEvent::Failed {
|
|
|
|
error: error.clone(),
|
|
|
|
timestamp: now,
|
|
|
|
});
|
|
|
|
}
|
2022-05-19 18:43:46 +08:00
|
|
|
return tasks;
|
2022-01-19 18:21:19 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-12-02 23:03:26 +08:00
|
|
|
let file_store = self.file_store.clone();
|
|
|
|
let result = spawn_blocking(move || {
|
2022-01-19 18:21:19 +08:00
|
|
|
index.update_documents(
|
|
|
|
method,
|
|
|
|
primary_key,
|
|
|
|
file_store,
|
|
|
|
content_uuids.into_iter(),
|
|
|
|
)
|
2021-12-02 23:03:26 +08:00
|
|
|
})
|
2022-01-19 18:21:19 +08:00
|
|
|
.await;
|
|
|
|
|
|
|
|
let event = match result {
|
2022-05-30 22:42:51 +08:00
|
|
|
Ok(Ok(result)) => TaskEvent::Succeeded {
|
2022-02-14 22:32:41 +08:00
|
|
|
timestamp: OffsetDateTime::now_utc(),
|
2022-01-19 18:21:19 +08:00
|
|
|
result: TaskResult::DocumentAddition {
|
|
|
|
indexed_documents: result.indexed_documents,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Ok(Err(e)) => TaskEvent::Failed {
|
2022-02-14 22:32:41 +08:00
|
|
|
timestamp: OffsetDateTime::now_utc(),
|
2022-01-19 18:21:19 +08:00
|
|
|
error: e.into(),
|
|
|
|
},
|
|
|
|
Err(e) => TaskEvent::Failed {
|
2022-02-14 22:32:41 +08:00
|
|
|
timestamp: OffsetDateTime::now_utc(),
|
2022-01-19 18:21:19 +08:00
|
|
|
error: IndexResolverError::from(e).into(),
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2022-05-19 18:43:46 +08:00
|
|
|
for task in tasks.iter_mut() {
|
2022-01-19 18:21:19 +08:00
|
|
|
task.events.push(event.clone());
|
|
|
|
}
|
2021-12-02 23:03:26 +08:00
|
|
|
|
2022-05-19 18:43:46 +08:00
|
|
|
tasks
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
2022-01-19 18:21:19 +08:00
|
|
|
_ => panic!("invalid batch!"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-19 18:43:46 +08:00
|
|
|
pub async fn process_task(&self, task: &Task) -> Result<TaskResult> {
|
2022-01-19 18:21:19 +08:00
|
|
|
let index_uid = task.index_uid.clone();
|
|
|
|
match &task.content {
|
|
|
|
TaskContent::DocumentAddition { .. } => panic!("updates should be handled by batch"),
|
2021-12-02 23:03:26 +08:00
|
|
|
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => {
|
|
|
|
let ids = ids.clone();
|
2022-05-17 01:50:45 +08:00
|
|
|
let index = self.get_index(index_uid.unwrap().into_inner()).await?;
|
2021-12-02 23:03:26 +08:00
|
|
|
|
|
|
|
let DocumentDeletionResult {
|
|
|
|
deleted_documents, ..
|
|
|
|
} = spawn_blocking(move || index.delete_documents(&ids)).await??;
|
|
|
|
|
|
|
|
Ok(TaskResult::DocumentDeletion { deleted_documents })
|
|
|
|
}
|
|
|
|
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => {
|
2022-05-17 01:50:45 +08:00
|
|
|
let index = self.get_index(index_uid.unwrap().into_inner()).await?;
|
2021-12-02 23:03:26 +08:00
|
|
|
let deleted_documents = spawn_blocking(move || -> IndexResult<u64> {
|
|
|
|
let number_documents = index.stats()?.number_of_documents;
|
|
|
|
index.clear_documents()?;
|
|
|
|
Ok(number_documents)
|
|
|
|
})
|
|
|
|
.await??;
|
|
|
|
|
|
|
|
Ok(TaskResult::ClearAll { deleted_documents })
|
|
|
|
}
|
|
|
|
TaskContent::SettingsUpdate {
|
|
|
|
settings,
|
|
|
|
is_deletion,
|
2021-12-15 21:52:33 +08:00
|
|
|
allow_index_creation,
|
2021-12-02 23:03:26 +08:00
|
|
|
} => {
|
2021-12-15 21:52:33 +08:00
|
|
|
let index = if *is_deletion || !*allow_index_creation {
|
2022-05-17 01:50:45 +08:00
|
|
|
self.get_index(index_uid.unwrap().into_inner()).await?
|
2021-12-02 23:03:26 +08:00
|
|
|
} else {
|
2022-05-17 01:50:45 +08:00
|
|
|
self.get_or_create_index(index_uid.unwrap(), task.id)
|
|
|
|
.await?
|
2021-12-02 23:03:26 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
let settings = settings.clone();
|
|
|
|
spawn_blocking(move || index.update_settings(&settings.check())).await??;
|
|
|
|
|
|
|
|
Ok(TaskResult::Other)
|
|
|
|
}
|
|
|
|
TaskContent::IndexDeletion => {
|
2022-05-17 01:50:45 +08:00
|
|
|
let index = self.delete_index(index_uid.unwrap().into_inner()).await?;
|
2021-12-02 23:03:26 +08:00
|
|
|
|
|
|
|
let deleted_documents = spawn_blocking(move || -> IndexResult<u64> {
|
|
|
|
Ok(index.stats()?.number_of_documents)
|
|
|
|
})
|
|
|
|
.await??;
|
|
|
|
|
|
|
|
Ok(TaskResult::ClearAll { deleted_documents })
|
|
|
|
}
|
|
|
|
TaskContent::IndexCreation { primary_key } => {
|
2022-05-17 01:50:45 +08:00
|
|
|
let index = self.create_index(index_uid.unwrap(), task.id).await?;
|
2021-12-02 23:03:26 +08:00
|
|
|
|
|
|
|
if let Some(primary_key) = primary_key {
|
|
|
|
let primary_key = primary_key.clone();
|
|
|
|
spawn_blocking(move || index.update_primary_key(primary_key)).await??;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(TaskResult::Other)
|
|
|
|
}
|
|
|
|
TaskContent::IndexUpdate { primary_key } => {
|
2022-05-17 01:50:45 +08:00
|
|
|
let index = self.get_index(index_uid.unwrap().into_inner()).await?;
|
2021-12-02 23:03:26 +08:00
|
|
|
|
|
|
|
if let Some(primary_key) = primary_key {
|
|
|
|
let primary_key = primary_key.clone();
|
|
|
|
spawn_blocking(move || index.update_primary_key(primary_key)).await??;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(TaskResult::Other)
|
|
|
|
}
|
2022-05-19 18:43:46 +08:00
|
|
|
_ => unreachable!("Invalid task for index resolver"),
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
|
|
|
|
for (_, index) in self.list().await? {
|
|
|
|
index.dump(&path)?;
|
|
|
|
}
|
|
|
|
self.index_uuid_store.dump(path.as_ref().to_owned()).await?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn create_index(&self, uid: IndexUid, creation_task_id: TaskId) -> Result<Index> {
|
|
|
|
match self.index_uuid_store.get(uid.into_inner()).await? {
|
|
|
|
(uid, Some(_)) => Err(IndexResolverError::IndexAlreadyExists(uid)),
|
|
|
|
(uid, None) => {
|
|
|
|
let uuid = Uuid::new_v4();
|
|
|
|
let index = self.index_store.create(uuid).await?;
|
|
|
|
match self
|
|
|
|
.index_uuid_store
|
|
|
|
.insert(
|
|
|
|
uid,
|
|
|
|
IndexMeta {
|
|
|
|
uuid,
|
|
|
|
creation_task_id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
.await
|
|
|
|
{
|
|
|
|
Err(e) => {
|
|
|
|
match self.index_store.delete(uuid).await {
|
|
|
|
Ok(Some(index)) => {
|
|
|
|
index.close();
|
|
|
|
}
|
|
|
|
Ok(None) => (),
|
|
|
|
Err(e) => log::error!("Error while deleting index: {:?}", e),
|
|
|
|
}
|
|
|
|
Err(e)
|
|
|
|
}
|
|
|
|
Ok(()) => Ok(index),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Get or create an index with name `uid`.
|
|
|
|
pub async fn get_or_create_index(&self, uid: IndexUid, task_id: TaskId) -> Result<Index> {
|
|
|
|
match self.create_index(uid, task_id).await {
|
|
|
|
Ok(index) => Ok(index),
|
|
|
|
Err(IndexResolverError::IndexAlreadyExists(uid)) => self.get_index(uid).await,
|
|
|
|
Err(e) => Err(e),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn list(&self) -> Result<Vec<(String, Index)>> {
|
|
|
|
let uuids = self.index_uuid_store.list().await?;
|
|
|
|
let mut indexes = Vec::new();
|
|
|
|
for (name, IndexMeta { uuid, .. }) in uuids {
|
|
|
|
match self.index_store.get(uuid).await? {
|
|
|
|
Some(index) => indexes.push((name, index)),
|
|
|
|
None => {
|
|
|
|
// we found an unexisting index, we remove it from the uuid store
|
|
|
|
let _ = self.index_uuid_store.delete(name).await;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(indexes)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn delete_index(&self, uid: String) -> Result<Index> {
|
|
|
|
match self.index_uuid_store.delete(uid.clone()).await? {
|
|
|
|
Some(IndexMeta { uuid, .. }) => match self.index_store.delete(uuid).await? {
|
|
|
|
Some(index) => {
|
|
|
|
index.clone().close();
|
|
|
|
Ok(index)
|
|
|
|
}
|
|
|
|
None => Err(IndexResolverError::UnexistingIndex(uid)),
|
|
|
|
},
|
|
|
|
None => Err(IndexResolverError::UnexistingIndex(uid)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_index(&self, uid: String) -> Result<Index> {
|
|
|
|
match self.index_uuid_store.get(uid).await? {
|
|
|
|
(name, Some(IndexMeta { uuid, .. })) => {
|
|
|
|
match self.index_store.get(uuid).await? {
|
|
|
|
Some(index) => Ok(index),
|
|
|
|
None => {
|
|
|
|
// For some reason we got a uuid to an unexisting index, we return an error,
|
|
|
|
// and remove the uuid from the uuid store.
|
|
|
|
let _ = self.index_uuid_store.delete(name.clone()).await;
|
|
|
|
Err(IndexResolverError::UnexistingIndex(name))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
(name, _) => Err(IndexResolverError::UnexistingIndex(name)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_index_creation_task_id(&self, index_uid: String) -> Result<TaskId> {
|
|
|
|
let (uid, meta) = self.index_uuid_store.get(index_uid).await?;
|
|
|
|
meta.map(
|
|
|
|
|IndexMeta {
|
|
|
|
creation_task_id, ..
|
|
|
|
}| creation_task_id,
|
|
|
|
)
|
|
|
|
.ok_or(IndexResolverError::UnexistingIndex(uid))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
2022-01-19 18:21:19 +08:00
|
|
|
use std::{collections::BTreeMap, vec::IntoIter};
|
2021-12-02 23:03:26 +08:00
|
|
|
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
use futures::future::ok;
|
|
|
|
use milli::update::{DocumentAdditionResult, IndexDocumentsMethod};
|
|
|
|
use nelson::Mocker;
|
|
|
|
use proptest::prelude::*;
|
|
|
|
|
2022-05-24 22:47:10 +08:00
|
|
|
use crate::{
|
|
|
|
index::{
|
|
|
|
error::{IndexError, Result as IndexResult},
|
|
|
|
Checked, IndexMeta, IndexStats, Settings,
|
|
|
|
},
|
|
|
|
tasks::{batch::Batch, BatchHandler},
|
2021-12-02 23:03:26 +08:00
|
|
|
};
|
|
|
|
use index_store::MockIndexStore;
|
|
|
|
use meta_store::MockIndexMetaStore;
|
|
|
|
|
2022-05-24 22:47:10 +08:00
|
|
|
// TODO: ignoring this test, it has become too complex to maintain, and rather implement
|
|
|
|
// handler logic test.
|
2021-12-02 23:03:26 +08:00
|
|
|
proptest! {
|
|
|
|
#[test]
|
2022-05-24 22:47:10 +08:00
|
|
|
#[ignore]
|
2021-12-02 23:03:26 +08:00
|
|
|
fn test_process_task(
|
2022-05-17 02:16:23 +08:00
|
|
|
task in any::<Task>().prop_filter("IndexUid should be Some", |s| s.index_uid.is_some()),
|
2021-12-02 23:03:26 +08:00
|
|
|
index_exists in any::<bool>(),
|
|
|
|
index_op_fails in any::<bool>(),
|
|
|
|
any_int in any::<u64>(),
|
|
|
|
) {
|
|
|
|
actix_rt::System::new().block_on(async move {
|
|
|
|
let uuid = Uuid::new_v4();
|
|
|
|
let mut index_store = MockIndexStore::new();
|
|
|
|
|
|
|
|
let mocker = Mocker::default();
|
|
|
|
|
|
|
|
// Return arbitrary data from index call.
|
|
|
|
match &task.content {
|
|
|
|
TaskContent::DocumentAddition{primary_key, ..} => {
|
|
|
|
let result = move || if !index_op_fails {
|
|
|
|
Ok(DocumentAdditionResult { indexed_documents: any_int, number_of_documents: any_int })
|
|
|
|
} else {
|
|
|
|
// return this error because it's easy to generate...
|
|
|
|
Err(IndexError::DocumentNotFound("a doc".into()))
|
|
|
|
};
|
|
|
|
if primary_key.is_some() {
|
|
|
|
mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key")
|
2022-02-14 22:32:41 +08:00
|
|
|
.then(move |_| Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None }));
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
2022-01-19 18:21:19 +08:00
|
|
|
mocker.when::<(IndexDocumentsMethod, Option<String>, UpdateFileStore, IntoIter<Uuid>), IndexResult<DocumentAdditionResult>>("update_documents")
|
2021-12-02 23:03:26 +08:00
|
|
|
.then(move |(_, _, _, _)| result());
|
|
|
|
}
|
|
|
|
TaskContent::SettingsUpdate{..} => {
|
|
|
|
let result = move || if !index_op_fails {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
// return this error because it's easy to generate...
|
|
|
|
Err(IndexError::DocumentNotFound("a doc".into()))
|
|
|
|
};
|
|
|
|
mocker.when::<&Settings<Checked>, IndexResult<()>>("update_settings")
|
|
|
|
.then(move |_| result());
|
|
|
|
}
|
|
|
|
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_ids)) => {
|
|
|
|
let result = move || if !index_op_fails {
|
2022-01-19 18:21:19 +08:00
|
|
|
Ok(DocumentDeletionResult { deleted_documents: any_int as u64, remaining_documents: any_int as u64 })
|
2021-12-02 23:03:26 +08:00
|
|
|
} else {
|
|
|
|
// return this error because it's easy to generate...
|
|
|
|
Err(IndexError::DocumentNotFound("a doc".into()))
|
|
|
|
};
|
|
|
|
|
2022-01-19 18:21:19 +08:00
|
|
|
mocker.when::<&[String], IndexResult<DocumentDeletionResult>>("delete_documents")
|
2021-12-02 23:03:26 +08:00
|
|
|
.then(move |_| result());
|
|
|
|
},
|
|
|
|
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => {
|
|
|
|
let result = move || if !index_op_fails {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
// return this error because it's easy to generate...
|
|
|
|
Err(IndexError::DocumentNotFound("a doc".into()))
|
|
|
|
};
|
|
|
|
mocker.when::<(), IndexResult<()>>("clear_documents")
|
|
|
|
.then(move |_| result());
|
|
|
|
},
|
|
|
|
TaskContent::IndexDeletion => {
|
|
|
|
mocker.when::<(), ()>("close")
|
|
|
|
.times(index_exists as usize)
|
|
|
|
.then(move |_| ());
|
|
|
|
}
|
|
|
|
TaskContent::IndexUpdate { primary_key }
|
|
|
|
| TaskContent::IndexCreation { primary_key } => {
|
|
|
|
if primary_key.is_some() {
|
|
|
|
let result = move || if !index_op_fails {
|
2022-02-14 22:32:41 +08:00
|
|
|
Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None })
|
2021-12-02 23:03:26 +08:00
|
|
|
} else {
|
|
|
|
// return this error because it's easy to generate...
|
|
|
|
Err(IndexError::DocumentNotFound("a doc".into()))
|
|
|
|
};
|
|
|
|
mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key")
|
|
|
|
.then(move |_| result());
|
|
|
|
}
|
|
|
|
}
|
2022-05-24 22:47:10 +08:00
|
|
|
TaskContent::Dump { .. } => { }
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
mocker.when::<(), IndexResult<IndexStats>>("stats")
|
|
|
|
.then(|()| Ok(IndexStats { size: 0, number_of_documents: 0, is_indexing: Some(false), field_distribution: BTreeMap::new() }));
|
|
|
|
|
|
|
|
let index = Index::mock(mocker);
|
|
|
|
|
|
|
|
match &task.content {
|
|
|
|
// an unexisting index should trigger an index creation in the folllowing cases:
|
2021-12-15 21:52:33 +08:00
|
|
|
TaskContent::DocumentAddition { allow_index_creation: true, .. }
|
|
|
|
| TaskContent::SettingsUpdate { allow_index_creation: true, is_deletion: false, .. }
|
2021-12-02 23:03:26 +08:00
|
|
|
| TaskContent::IndexCreation { .. } if !index_exists => {
|
|
|
|
index_store
|
|
|
|
.expect_create()
|
|
|
|
.once()
|
|
|
|
.withf(move |&found| !index_exists || found == uuid)
|
|
|
|
.returning(move |_| Box::pin(ok(index.clone())));
|
|
|
|
},
|
|
|
|
TaskContent::IndexDeletion => {
|
|
|
|
index_store
|
|
|
|
.expect_delete()
|
|
|
|
// this is called only if the index.exists
|
|
|
|
.times(index_exists as usize)
|
|
|
|
.withf(move |&found| !index_exists || found == uuid)
|
|
|
|
.returning(move |_| Box::pin(ok(Some(index.clone()))));
|
|
|
|
}
|
|
|
|
// if index already exists, create index will return an error
|
|
|
|
TaskContent::IndexCreation { .. } if index_exists => (),
|
2022-05-17 02:16:23 +08:00
|
|
|
TaskContent::Dump { .. } => (),
|
2021-12-02 23:03:26 +08:00
|
|
|
// The index exists and get should be called
|
|
|
|
_ if index_exists => {
|
|
|
|
index_store
|
|
|
|
.expect_get()
|
|
|
|
.once()
|
|
|
|
.withf(move |&found| found == uuid)
|
|
|
|
.returning(move |_| Box::pin(ok(Some(index.clone()))));
|
|
|
|
},
|
|
|
|
// the index doesn't exist and shouldn't be created, the uuidstore will return an error, and get_index will never be called.
|
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut uuid_store = MockIndexMetaStore::new();
|
|
|
|
uuid_store
|
|
|
|
.expect_get()
|
|
|
|
.returning(move |uid| {
|
|
|
|
Box::pin(ok((uid, index_exists.then(|| crate::index_resolver::meta_store::IndexMeta {uuid, creation_task_id: 0 }))))
|
|
|
|
});
|
|
|
|
|
|
|
|
// we sould only be creating an index if the index doesn't alredy exist
|
|
|
|
uuid_store
|
|
|
|
.expect_insert()
|
|
|
|
.withf(move |_, _| !index_exists)
|
|
|
|
.returning(|_, _| Box::pin(ok(())));
|
|
|
|
|
|
|
|
uuid_store
|
|
|
|
.expect_delete()
|
|
|
|
.times(matches!(task.content, TaskContent::IndexDeletion) as usize)
|
|
|
|
.returning(move |_| Box::pin(ok(index_exists.then(|| crate::index_resolver::meta_store::IndexMeta { uuid, creation_task_id: 0}))));
|
|
|
|
|
|
|
|
let mocker = Mocker::default();
|
|
|
|
let update_file_store = UpdateFileStore::mock(mocker);
|
|
|
|
let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store);
|
|
|
|
|
2022-05-24 22:47:10 +08:00
|
|
|
let batch = Batch { id: Some(1), created_at: OffsetDateTime::now_utc(), content: crate::tasks::batch::BatchContent::IndexUpdate(task.clone()) };
|
|
|
|
if index_resolver.accept(&batch) {
|
|
|
|
let result = index_resolver.process_batch(batch).await;
|
|
|
|
|
|
|
|
// Test for some expected output scenarios:
|
|
|
|
// Index creation and deletion cannot fail because of a failed index op, since they
|
|
|
|
// don't perform index ops.
|
|
|
|
if index_op_fails && !matches!(task.content, TaskContent::IndexDeletion | TaskContent::IndexCreation { primary_key: None } | TaskContent::IndexUpdate { primary_key: None } | TaskContent::Dump { .. })
|
|
|
|
|| (index_exists && matches!(task.content, TaskContent::IndexCreation { .. }))
|
|
|
|
|| (!index_exists && matches!(task.content, TaskContent::IndexDeletion
|
|
|
|
| TaskContent::DocumentDeletion(_)
|
|
|
|
| TaskContent::SettingsUpdate { is_deletion: true, ..}
|
|
|
|
| TaskContent::SettingsUpdate { allow_index_creation: false, ..}
|
|
|
|
| TaskContent::DocumentAddition { allow_index_creation: false, ..}
|
|
|
|
| TaskContent::IndexUpdate { .. } ))
|
|
|
|
{
|
|
|
|
assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Failed { .. }), "{:?}", result);
|
|
|
|
} else {
|
2022-05-30 22:42:51 +08:00
|
|
|
assert!(matches!(result.content.first().unwrap().events.last().unwrap(), TaskEvent::Succeeded { .. }), "{:?}", result);
|
2022-05-24 22:47:10 +08:00
|
|
|
}
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|