implement index mock

This commit is contained in:
mpostma 2021-10-04 12:15:21 +02:00
parent 607e28749a
commit 4835d82a0b
10 changed files with 386 additions and 377 deletions

View File

@ -13,7 +13,7 @@ use crate::index::update_handler::UpdateHandler;
use crate::index::updates::apply_settings_to_builder; use crate::index::updates::apply_settings_to_builder;
use super::error::Result; use super::error::Result;
use super::{Index, Settings, Unchecked}; use super::{index::Index, Settings, Unchecked};
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
struct DumpMeta { struct DumpMeta {

View File

@ -1,97 +1,159 @@
use std::collections::{BTreeSet, HashSet};
use std::fs::create_dir_all;
use std::marker::PhantomData;
use std::ops::Deref;
use std::path::Path;
use std::sync::Arc;
use chrono::{DateTime, Utc};
use heed::{EnvOpenOptions, RoTxn};
use milli::update::Setting;
use milli::{obkv_to_json, FieldDistribution, FieldId};
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use error::Result;
pub use search::{default_crop_length, SearchQuery, SearchResult, DEFAULT_SEARCH_LIMIT}; pub use search::{default_crop_length, SearchQuery, SearchResult, DEFAULT_SEARCH_LIMIT};
pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecked}; pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecked};
use uuid::Uuid;
use crate::index_controller::update_file_store::UpdateFileStore;
use crate::EnvSizer;
use self::error::IndexError;
use self::update_handler::UpdateHandler;
pub mod error; pub mod error;
pub mod update_handler; pub mod update_handler;
mod dump; mod dump;
mod search; mod search;
mod updates; mod updates;
mod index;
pub type Document = Map<String, Value>; pub use index::{Document, IndexMeta, IndexStats};
#[derive(Debug, Serialize, Deserialize, Clone)] #[cfg(not(test))]
#[serde(rename_all = "camelCase")] pub use index::Index;
pub struct IndexMeta {
created_at: DateTime<Utc>, #[cfg(test)]
pub updated_at: DateTime<Utc>, pub use test::MockIndex as Index;
pub primary_key: Option<String>,
#[cfg(test)]
mod test {
use std::any::Any;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Mutex;
use std::{path::Path, sync::Arc};
use serde_json::{Map, Value};
use uuid::Uuid;
use crate::index_controller::update_file_store::UpdateFileStore;
use crate::index_controller::updates::status::{Failed, Processed, Processing};
use super::{Checked, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings};
use super::index::Index;
use super::error::Result;
use super::update_handler::UpdateHandler;
#[derive(Debug, Clone)]
pub enum MockIndex {
Vrai(Index),
Faux(Arc<FauxIndex>),
} }
#[derive(Serialize, Debug)] pub struct Stub<A, R> {
#[serde(rename_all = "camelCase")] name: String,
pub struct IndexStats { times: Option<usize>,
#[serde(skip)] stub: Box<dyn Fn(A) -> R + Sync + Send>,
pub size: u64, exact: bool,
pub number_of_documents: u64,
/// Whether the current index is performing an update. It is initially `None` when the
/// index returns it, since it is the `UpdateStore` that knows what index is currently indexing. It is
/// later set to either true or false, we we retrieve the information from the `UpdateStore`
pub is_indexing: Option<bool>,
pub field_distribution: FieldDistribution,
} }
impl IndexMeta { impl<A, R> Drop for Stub<A, R> {
pub fn new(index: &Index) -> Result<Self> { fn drop(&mut self) {
let txn = index.read_txn()?; if self.exact {
Self::new_txn(index, &txn) if !matches!(self.times, Some(0)) {
panic!("{} not called the correct amount of times", self.name);
}
} }
fn new_txn(index: &Index, txn: &heed::RoTxn) -> Result<Self> {
let created_at = index.created_at(txn)?;
let updated_at = index.updated_at(txn)?;
let primary_key = index.primary_key(txn)?.map(String::from);
Ok(Self {
created_at,
updated_at,
primary_key,
})
} }
} }
#[derive(Clone, derivative::Derivative)] impl<A, R> Stub<A, R> {
#[derivative(Debug)] fn call(&mut self, args: A) -> R {
pub struct Index { match self.times {
pub uuid: Uuid, Some(0) => panic!("{} called to many times", self.name),
#[derivative(Debug = "ignore")] Some(ref mut times) => { *times -= 1; },
pub inner: Arc<milli::Index>, None => (),
#[derivative(Debug = "ignore")]
update_file_store: Arc<UpdateFileStore>,
#[derivative(Debug = "ignore")]
update_handler: Arc<UpdateHandler>,
} }
impl Deref for Index { (self.stub)(args)
type Target = milli::Index;
fn deref(&self) -> &Self::Target {
self.inner.as_ref()
} }
} }
impl Index { #[derive(Debug, Default)]
struct StubStore {
inner: Arc<Mutex<HashMap<String, Box<dyn Any + Sync + Send>>>>
}
#[derive(Debug, Default)]
pub struct FauxIndex {
store: StubStore,
}
impl StubStore {
pub fn insert<A: 'static, R: 'static>(&self, name: String, stub: Stub<A, R>) {
let mut lock = self.inner.lock().unwrap();
lock.insert(name, Box::new(stub));
}
pub fn get_mut<A, B>(&self, name: &str) -> Option<&mut Stub<A, B>> {
let mut lock = self.inner.lock().unwrap();
match lock.get_mut(name) {
Some(s) => {
let s = s.as_mut() as *mut dyn Any as *mut Stub<A, B>;
Some(unsafe { &mut *s })
}
None => None,
}
}
}
pub struct StubBuilder<'a> {
name: String,
store: &'a StubStore,
times: Option<usize>,
exact: bool,
}
impl<'a> StubBuilder<'a> {
#[must_use]
pub fn times(mut self, times: usize) -> Self {
self.times = Some(times);
self
}
#[must_use]
pub fn exact(mut self, times: usize) -> Self {
self.times = Some(times);
self.exact = true;
self
}
pub fn then<A: 'static, R: 'static>(self, f: impl Fn(A) -> R + Sync + Send + 'static) {
let stub = Stub {
stub: Box::new(f),
times: self.times,
exact: self.exact,
name: self.name.clone(),
};
self.store.insert(self.name, stub);
}
}
impl FauxIndex {
pub fn when(&self, name: &str) -> StubBuilder {
StubBuilder {
name: name.to_string(),
store: &self.store,
times: None,
exact: false,
}
}
pub fn get<'a, A, R>(&'a self, name: &str) -> &'a mut Stub<A, R> {
match self.store.get_mut(name) {
Some(stub) => stub,
None => panic!("unexpected call to {}", name),
}
}
}
impl MockIndex {
pub fn faux(faux: FauxIndex) -> Self {
Self::Faux(Arc::new(faux))
}
pub fn open( pub fn open(
path: impl AsRef<Path>, path: impl AsRef<Path>,
size: usize, size: usize,
@ -99,98 +161,52 @@ impl Index {
uuid: Uuid, uuid: Uuid,
update_handler: Arc<UpdateHandler>, update_handler: Arc<UpdateHandler>,
) -> Result<Self> { ) -> Result<Self> {
create_dir_all(&path)?; let index = Index::open(path, size, update_file_store, uuid, update_handler)?;
let mut options = EnvOpenOptions::new(); Ok(Self::Vrai(index))
options.map_size(size); }
let inner = Arc::new(milli::Index::new(options, &path)?);
Ok(Index { pub fn load_dump(
inner, src: impl AsRef<Path>,
update_file_store, dst: impl AsRef<Path>,
uuid, size: usize,
update_handler, update_handler: &UpdateHandler,
}) ) -> anyhow::Result<()> {
Index::load_dump(src, dst, size, update_handler)?;
Ok(())
}
pub fn handle_update(&self, update: Processing) -> std::result::Result<Processed, Failed> {
match self {
MockIndex::Vrai(index) => index.handle_update(update),
MockIndex::Faux(_) => todo!(),
}
}
pub fn uuid(&self) -> Uuid {
match self {
MockIndex::Vrai(index) => index.uuid(),
MockIndex::Faux(_) => todo!(),
}
} }
pub fn stats(&self) -> Result<IndexStats> { pub fn stats(&self) -> Result<IndexStats> {
let rtxn = self.read_txn()?; match self {
MockIndex::Vrai(index) => index.stats(),
Ok(IndexStats { MockIndex::Faux(_) => todo!(),
size: self.size(), }
number_of_documents: self.number_of_documents(&rtxn)?,
is_indexing: None,
field_distribution: self.field_distribution(&rtxn)?,
})
} }
pub fn meta(&self) -> Result<IndexMeta> { pub fn meta(&self) -> Result<IndexMeta> {
IndexMeta::new(self) match self {
MockIndex::Vrai(index) => index.meta(),
MockIndex::Faux(_) => todo!(),
}
} }
pub fn settings(&self) -> Result<Settings<Checked>> { pub fn settings(&self) -> Result<Settings<Checked>> {
let txn = self.read_txn()?; match self {
self.settings_txn(&txn) MockIndex::Vrai(index) => index.settings(),
MockIndex::Faux(_) => todo!(),
} }
pub fn settings_txn(&self, txn: &RoTxn) -> Result<Settings<Checked>> {
let displayed_attributes = self
.displayed_fields(txn)?
.map(|fields| fields.into_iter().map(String::from).collect());
let searchable_attributes = self
.searchable_fields(txn)?
.map(|fields| fields.into_iter().map(String::from).collect());
let filterable_attributes = self.filterable_fields(txn)?.into_iter().collect();
let sortable_attributes = self.sortable_fields(txn)?.into_iter().collect();
let criteria = self
.criteria(txn)?
.into_iter()
.map(|c| c.to_string())
.collect();
let stop_words = self
.stop_words(txn)?
.map(|stop_words| -> Result<BTreeSet<_>> {
Ok(stop_words.stream().into_strs()?.into_iter().collect())
})
.transpose()?
.unwrap_or_else(BTreeSet::new);
let distinct_field = self.distinct_field(txn)?.map(String::from);
// in milli each word in the synonyms map were split on their separator. Since we lost
// this information we are going to put space between words.
let synonyms = self
.synonyms(txn)?
.iter()
.map(|(key, values)| {
(
key.join(" "),
values.iter().map(|value| value.join(" ")).collect(),
)
})
.collect();
Ok(Settings {
displayed_attributes: match displayed_attributes {
Some(attrs) => Setting::Set(attrs),
None => Setting::Reset,
},
searchable_attributes: match searchable_attributes {
Some(attrs) => Setting::Set(attrs),
None => Setting::Reset,
},
filterable_attributes: Setting::Set(filterable_attributes),
sortable_attributes: Setting::Set(sortable_attributes),
ranking_rules: Setting::Set(criteria),
stop_words: Setting::Set(stop_words),
distinct_attribute: match distinct_field {
Some(field) => Setting::Set(field),
None => Setting::Reset,
},
synonyms: Setting::Set(synonyms),
_kind: PhantomData,
})
} }
pub fn retrieve_documents<S: AsRef<str>>( pub fn retrieve_documents<S: AsRef<str>>(
@ -199,23 +215,10 @@ impl Index {
limit: usize, limit: usize,
attributes_to_retrieve: Option<Vec<S>>, attributes_to_retrieve: Option<Vec<S>>,
) -> Result<Vec<Map<String, Value>>> { ) -> Result<Vec<Map<String, Value>>> {
let txn = self.read_txn()?; match self {
MockIndex::Vrai(index) => index.retrieve_documents(offset, limit, attributes_to_retrieve),
let fields_ids_map = self.fields_ids_map(&txn)?; MockIndex::Faux(_) => todo!(),
let fields_to_display =
self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?;
let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit);
let mut documents = Vec::new();
for entry in iter {
let (_id, obkv) = entry?;
let object = obkv_to_json(&fields_to_display, &fields_ids_map, obkv)?;
documents.push(object);
} }
Ok(documents)
} }
pub fn retrieve_document<S: AsRef<str>>( pub fn retrieve_document<S: AsRef<str>>(
@ -223,65 +226,69 @@ impl Index {
doc_id: String, doc_id: String,
attributes_to_retrieve: Option<Vec<S>>, attributes_to_retrieve: Option<Vec<S>>,
) -> Result<Map<String, Value>> { ) -> Result<Map<String, Value>> {
let txn = self.read_txn()?; match self {
MockIndex::Vrai(index) => index.retrieve_document(doc_id, attributes_to_retrieve),
let fields_ids_map = self.fields_ids_map(&txn)?; MockIndex::Faux(_) => todo!(),
}
let fields_to_display =
self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?;
let internal_id = self
.external_documents_ids(&txn)?
.get(doc_id.as_bytes())
.ok_or_else(|| IndexError::DocumentNotFound(doc_id.clone()))?;
let document = self
.documents(&txn, std::iter::once(internal_id))?
.into_iter()
.next()
.map(|(_, d)| d)
.ok_or(IndexError::DocumentNotFound(doc_id))?;
let document = obkv_to_json(&fields_to_display, &fields_ids_map, document)?;
Ok(document)
} }
pub fn size(&self) -> u64 { pub fn size(&self) -> u64 {
self.env.size() match self {
MockIndex::Vrai(index) => index.size(),
MockIndex::Faux(_) => todo!(),
} }
fn fields_to_display<S: AsRef<str>>(
&self,
txn: &heed::RoTxn,
attributes_to_retrieve: &Option<Vec<S>>,
fields_ids_map: &milli::FieldsIdsMap,
) -> Result<Vec<FieldId>> {
let mut displayed_fields_ids = match self.displayed_fields_ids(txn)? {
Some(ids) => ids.into_iter().collect::<Vec<_>>(),
None => fields_ids_map.iter().map(|(id, _)| id).collect(),
};
let attributes_to_retrieve_ids = match attributes_to_retrieve {
Some(attrs) => attrs
.iter()
.filter_map(|f| fields_ids_map.id(f.as_ref()))
.collect::<HashSet<_>>(),
None => fields_ids_map.iter().map(|(id, _)| id).collect(),
};
displayed_fields_ids.retain(|fid| attributes_to_retrieve_ids.contains(fid));
Ok(displayed_fields_ids)
} }
pub fn snapshot(&self, path: impl AsRef<Path>) -> Result<()> { pub fn snapshot(&self, path: impl AsRef<Path>) -> Result<()> {
let mut dst = path.as_ref().join(format!("indexes/{}/", self.uuid)); match self {
create_dir_all(&dst)?; MockIndex::Vrai(index) => index.snapshot(path),
dst.push("data.mdb"); MockIndex::Faux(faux) => faux.get("snapshot").call(path.as_ref())
let _txn = self.write_txn()?; }
self.inner }
.env
.copy_to_path(dst, heed::CompactionOption::Enabled)?; pub fn inner(&self) -> &milli::Index {
match self {
MockIndex::Vrai(index) => index.inner(),
MockIndex::Faux(_) => todo!(),
}
}
pub fn update_primary_key(&self, primary_key: Option<String>) -> Result<IndexMeta> {
match self {
MockIndex::Vrai(index) => index.update_primary_key(primary_key),
MockIndex::Faux(_) => todo!(),
}
}
pub fn perform_search(&self, query: SearchQuery) -> Result<SearchResult> {
match self {
MockIndex::Vrai(index) => index.perform_search(query),
MockIndex::Faux(_) => todo!(),
}
}
pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
match self {
MockIndex::Vrai(index) => index.dump(path),
MockIndex::Faux(_) => todo!(),
}
}
}
#[test]
fn test_faux_index() {
let faux = FauxIndex::default();
faux
.when("snapshot")
.exact(2)
.then(|path: &Path| -> Result<()> {
println!("path: {}", path.display());
Ok(()) Ok(())
});
let index = MockIndex::faux(faux);
let path = PathBuf::from("hello");
index.snapshot(&path).unwrap();
index.snapshot(&path).unwrap();
} }
} }

View File

@ -12,10 +12,9 @@ use serde::{Deserialize, Serialize};
use serde_json::{json, Value}; use serde_json::{json, Value};
use crate::index::error::FacetError; use crate::index::error::FacetError;
use crate::index::IndexError;
use super::error::Result; use super::error::{Result, IndexError};
use super::Index; use super::index::Index;
pub type Document = IndexMap<String, Value>; pub type Document = IndexMap<String, Value>;
type MatchesInfo = BTreeMap<String, Vec<MatchInfo>>; type MatchesInfo = BTreeMap<String, Vec<MatchInfo>>;

View File

@ -12,7 +12,7 @@ use crate::index_controller::updates::status::{Failed, Processed, Processing, Up
use crate::Update; use crate::Update;
use super::error::{IndexError, Result}; use super::error::{IndexError, Result};
use super::{Index, IndexMeta}; use super::index::{Index, IndexMeta};
fn serialize_with_wildcard<S>( fn serialize_with_wildcard<S>(
field: &Setting<Vec<String>>, field: &Setting<Vec<String>>,

View File

@ -17,6 +17,7 @@ use crate::options::IndexerOpts;
type AsyncMap<K, V> = Arc<RwLock<HashMap<K, V>>>; type AsyncMap<K, V> = Arc<RwLock<HashMap<K, V>>>;
#[async_trait::async_trait] #[async_trait::async_trait]
#[cfg_attr(test, mockall::automock)]
pub trait IndexStore { pub trait IndexStore {
async fn create(&self, uuid: Uuid, primary_key: Option<String>) -> Result<Index>; async fn create(&self, uuid: Uuid, primary_key: Option<String>) -> Result<Index>;
async fn get(&self, uuid: Uuid) -> Result<Option<Index>>; async fn get(&self, uuid: Uuid) -> Result<Option<Index>>;
@ -72,9 +73,10 @@ impl IndexStore for MapIndexStore {
let index = spawn_blocking(move || -> Result<Index> { let index = spawn_blocking(move || -> Result<Index> {
let index = Index::open(path, index_size, file_store, uuid, update_handler)?; let index = Index::open(path, index_size, file_store, uuid, update_handler)?;
if let Some(primary_key) = primary_key { if let Some(primary_key) = primary_key {
let mut txn = index.write_txn()?; let inner = index.inner();
let mut txn = inner.write_txn()?;
let mut builder = UpdateBuilder::new(0).settings(&mut txn, &index); let mut builder = UpdateBuilder::new(0).settings(&mut txn, index.inner());
builder.set_primary_key(primary_key); builder.set_primary_key(primary_key);
builder.execute(|_, _| ())?; builder.execute(|_, _| ())?;

View File

@ -22,6 +22,7 @@ struct DumpEntry {
const UUIDS_DB_PATH: &str = "index_uuids"; const UUIDS_DB_PATH: &str = "index_uuids";
#[async_trait::async_trait] #[async_trait::async_trait]
#[cfg_attr(test, mockall::automock)]
pub trait UuidStore: Sized { pub trait UuidStore: Sized {
// Create a new entry for `name`. Return an error if `err` and the entry already exists, return // Create a new entry for `name`. Return an error if `err` and the entry already exists, return
// the uuid otherwise. // the uuid otherwise.

View File

@ -314,7 +314,7 @@ impl IndexController {
for (uid, index) in indexes { for (uid, index) in indexes {
let meta = index.meta()?; let meta = index.meta()?;
let meta = IndexMetadata { let meta = IndexMetadata {
uuid: index.uuid, uuid: index.uuid(),
name: uid.clone(), name: uid.clone(),
uid, uid,
meta, meta,
@ -366,7 +366,7 @@ impl IndexController {
index_settings.uid.take(); index_settings.uid.take();
let index = self.index_resolver.get_index(uid.clone()).await?; let index = self.index_resolver.get_index(uid.clone()).await?;
let uuid = index.uuid; let uuid = index.uuid();
let meta = let meta =
spawn_blocking(move || index.update_primary_key(index_settings.primary_key)).await??; spawn_blocking(move || index.update_primary_key(index_settings.primary_key)).await??;
let meta = IndexMetadata { let meta = IndexMetadata {
@ -386,7 +386,7 @@ impl IndexController {
pub async fn get_index(&self, uid: String) -> Result<IndexMetadata> { pub async fn get_index(&self, uid: String) -> Result<IndexMetadata> {
let index = self.index_resolver.get_index(uid.clone()).await?; let index = self.index_resolver.get_index(uid.clone()).await?;
let uuid = index.uuid; let uuid = index.uuid();
let meta = spawn_blocking(move || index.meta()).await??; let meta = spawn_blocking(move || index.meta()).await??;
let meta = IndexMetadata { let meta = IndexMetadata {
uuid, uuid,
@ -400,7 +400,7 @@ impl IndexController {
pub async fn get_index_stats(&self, uid: String) -> Result<IndexStats> { pub async fn get_index_stats(&self, uid: String) -> Result<IndexStats> {
let update_infos = UpdateMsg::get_info(&self.update_sender).await?; let update_infos = UpdateMsg::get_info(&self.update_sender).await?;
let index = self.index_resolver.get_index(uid).await?; let index = self.index_resolver.get_index(uid).await?;
let uuid = index.uuid; let uuid = index.uuid();
let mut stats = spawn_blocking(move || index.stats()).await??; let mut stats = spawn_blocking(move || index.stats()).await??;
// Check if the currently indexing update is from our index. // Check if the currently indexing update is from our index.
stats.is_indexing = Some(Some(uuid) == update_infos.processing); stats.is_indexing = Some(Some(uuid) == update_infos.processing);
@ -414,7 +414,7 @@ impl IndexController {
let mut indexes = BTreeMap::new(); let mut indexes = BTreeMap::new();
for (index_uid, index) in self.index_resolver.list().await? { for (index_uid, index) in self.index_resolver.list().await? {
let uuid = index.uuid; let uuid = index.uuid();
let (mut stats, meta) = spawn_blocking::<_, IndexResult<_>>(move || { let (mut stats, meta) = spawn_blocking::<_, IndexResult<_>>(move || {
let stats = index.stats()?; let stats = index.stats()?;
let meta = index.meta()?; let meta = index.meta()?;
@ -461,7 +461,7 @@ impl IndexController {
let meta = spawn_blocking(move || -> IndexResult<_> { let meta = spawn_blocking(move || -> IndexResult<_> {
let meta = index.meta()?; let meta = index.meta()?;
let meta = IndexMetadata { let meta = IndexMetadata {
uuid: index.uuid, uuid: index.uuid(),
uid: uid.clone(), uid: uid.clone(),
name: uid, name: uid,
meta, meta,

View File

@ -125,8 +125,8 @@ pub fn load_snapshot(
} }
} }
//#[cfg(test)] #[cfg(test)]
//mod test { mod test {
//use std::iter::FromIterator; //use std::iter::FromIterator;
//use std::{collections::HashSet, sync::Arc}; //use std::{collections::HashSet, sync::Arc};
@ -254,4 +254,4 @@ pub fn load_snapshot(
//let _ = timeout(Duration::from_millis(300), snapshot_service.run()).await; //let _ = timeout(Duration::from_millis(300), snapshot_service.run()).await;
//} //}
//} }

View File

@ -34,7 +34,7 @@ impl UpdateStore {
// txn must *always* be acquired after state lock, or it will dead lock. // txn must *always* be acquired after state lock, or it will dead lock.
let txn = self.env.write_txn()?; let txn = self.env.write_txn()?;
let uuids = indexes.iter().map(|i| i.uuid).collect(); let uuids = indexes.iter().map(|i| i.uuid()).collect();
self.dump_updates(&txn, &uuids, &path)?; self.dump_updates(&txn, &uuids, &path)?;

View File

@ -509,7 +509,7 @@ impl UpdateStore {
let pendings = self.pending_queue.iter(&txn)?.lazily_decode_data(); let pendings = self.pending_queue.iter(&txn)?.lazily_decode_data();
let uuids: HashSet<_> = indexes.iter().map(|i| i.uuid).collect(); let uuids: HashSet<_> = indexes.iter().map(|i| i.uuid()).collect();
for entry in pendings { for entry in pendings {
let ((_, uuid, _), pending) = entry?; let ((_, uuid, _), pending) = entry?;
if uuids.contains(&uuid) { if uuids.contains(&uuid) {
@ -528,7 +528,7 @@ impl UpdateStore {
let path = path.as_ref().to_owned(); let path = path.as_ref().to_owned();
indexes indexes
.par_iter() .par_iter()
.try_for_each(|index| index.snapshot(path.clone())) .try_for_each(|index| index.snapshot(&path))
.unwrap(); .unwrap();
Ok(()) Ok(())