2021-05-11 02:25:09 +08:00
|
|
|
mod actor;
|
2021-05-11 02:20:36 +08:00
|
|
|
mod handle_impl;
|
2021-05-11 02:25:09 +08:00
|
|
|
mod message;
|
2021-05-11 02:20:36 +08:00
|
|
|
mod v1;
|
|
|
|
mod v2;
|
2021-05-11 02:25:09 +08:00
|
|
|
|
2021-05-11 02:20:36 +08:00
|
|
|
use std::{fs::File, path::Path, sync::Arc};
|
2021-05-11 02:25:09 +08:00
|
|
|
|
2021-04-28 22:43:49 +08:00
|
|
|
use anyhow::bail;
|
|
|
|
use heed::EnvOpenOptions;
|
|
|
|
use log::{error, info};
|
|
|
|
use milli::update::{IndexDocumentsMethod, UpdateBuilder, UpdateFormat};
|
2021-05-11 02:20:36 +08:00
|
|
|
#[cfg(test)]
|
|
|
|
use mockall::automock;
|
2021-05-11 02:23:12 +08:00
|
|
|
use serde::{Deserialize, Serialize};
|
2021-05-11 02:25:09 +08:00
|
|
|
use serde_json::json;
|
2021-04-28 22:43:49 +08:00
|
|
|
use tempfile::TempDir;
|
2021-05-11 02:20:36 +08:00
|
|
|
use thiserror::Error;
|
2021-05-11 06:20:55 +08:00
|
|
|
use uuid::Uuid;
|
2021-04-28 22:43:49 +08:00
|
|
|
|
2021-05-11 02:25:09 +08:00
|
|
|
use super::IndexMetadata;
|
|
|
|
use crate::helpers::compression;
|
2021-04-28 22:43:49 +08:00
|
|
|
use crate::index::Index;
|
2021-05-11 06:20:55 +08:00
|
|
|
use crate::index_controller::uuid_resolver;
|
2021-05-11 02:25:09 +08:00
|
|
|
|
|
|
|
pub use actor::DumpActor;
|
2021-05-11 02:20:36 +08:00
|
|
|
pub use handle_impl::*;
|
2021-05-11 02:25:09 +08:00
|
|
|
pub use message::DumpMsg;
|
|
|
|
|
|
|
|
pub type DumpResult<T> = std::result::Result<T, DumpError>;
|
|
|
|
|
|
|
|
#[derive(Error, Debug)]
|
|
|
|
pub enum DumpError {
|
|
|
|
#[error("error with index: {0}")]
|
|
|
|
Error(#[from] anyhow::Error),
|
|
|
|
#[error("Heed error: {0}")]
|
|
|
|
HeedError(#[from] heed::Error),
|
|
|
|
#[error("dump already running")]
|
|
|
|
DumpAlreadyRunning,
|
|
|
|
#[error("dump `{0}` does not exist")]
|
|
|
|
DumpDoesNotExist(String),
|
|
|
|
}
|
2021-04-28 22:43:49 +08:00
|
|
|
|
|
|
|
#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
|
|
|
|
enum DumpVersion {
|
|
|
|
V1,
|
2021-04-27 16:27:43 +08:00
|
|
|
V2,
|
2021-04-28 22:43:49 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
impl DumpVersion {
|
2021-05-05 20:11:56 +08:00
|
|
|
const CURRENT: Self = Self::V2;
|
2021-04-27 16:27:43 +08:00
|
|
|
|
|
|
|
/// Select the good importation function from the `DumpVersion` of metadata
|
2021-05-11 02:25:09 +08:00
|
|
|
pub fn import_index(
|
|
|
|
self,
|
|
|
|
size: usize,
|
2021-05-11 06:20:55 +08:00
|
|
|
uuid: Uuid,
|
2021-05-11 02:25:09 +08:00
|
|
|
dump_path: &Path,
|
2021-05-11 06:20:55 +08:00
|
|
|
db_path: &Path,
|
2021-05-07 00:44:16 +08:00
|
|
|
primary_key: Option<&str>,
|
2021-05-11 02:25:09 +08:00
|
|
|
) -> anyhow::Result<()> {
|
2021-04-27 16:27:43 +08:00
|
|
|
match self {
|
2021-05-11 06:20:55 +08:00
|
|
|
Self::V1 => v1::import_index(size, uuid, dump_path, db_path, primary_key),
|
|
|
|
Self::V2 => v2::import_index(size, uuid, dump_path, db_path, primary_key),
|
2021-04-27 16:27:43 +08:00
|
|
|
}
|
|
|
|
}
|
2021-04-28 22:43:49 +08:00
|
|
|
}
|
|
|
|
|
2021-05-11 02:25:09 +08:00
|
|
|
#[async_trait::async_trait]
|
|
|
|
#[cfg_attr(test, automock)]
|
|
|
|
pub trait DumpActorHandle {
|
|
|
|
/// Start the creation of a dump
|
|
|
|
/// Implementation: [handle_impl::DumpActorHandleImpl::create_dump]
|
|
|
|
async fn create_dump(&self) -> DumpResult<DumpInfo>;
|
|
|
|
|
|
|
|
/// Return the status of an already created dump
|
|
|
|
/// Implementation: [handle_impl::DumpActorHandleImpl::dump_status]
|
|
|
|
async fn dump_info(&self, uid: String) -> DumpResult<DumpInfo>;
|
|
|
|
}
|
|
|
|
|
2021-04-28 22:43:49 +08:00
|
|
|
#[derive(Debug, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
2021-04-29 20:45:08 +08:00
|
|
|
pub struct Metadata {
|
2021-04-28 22:43:49 +08:00
|
|
|
indexes: Vec<IndexMetadata>,
|
|
|
|
db_version: String,
|
|
|
|
dump_version: DumpVersion,
|
|
|
|
}
|
|
|
|
|
2021-04-29 20:45:08 +08:00
|
|
|
impl Metadata {
|
|
|
|
/// Create a Metadata with the current dump version of meilisearch.
|
2021-04-28 22:43:49 +08:00
|
|
|
pub fn new(indexes: Vec<IndexMetadata>, db_version: String) -> Self {
|
2021-04-29 20:45:08 +08:00
|
|
|
Metadata {
|
2021-04-28 22:43:49 +08:00
|
|
|
indexes,
|
|
|
|
db_version,
|
|
|
|
dump_version: DumpVersion::CURRENT,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-29 20:45:08 +08:00
|
|
|
/// Extract Metadata from `metadata.json` file present at provided `dir_path`
|
2021-04-28 22:43:49 +08:00
|
|
|
fn from_path(dir_path: &Path) -> anyhow::Result<Self> {
|
|
|
|
let path = dir_path.join("metadata.json");
|
|
|
|
let file = File::open(path)?;
|
|
|
|
let reader = std::io::BufReader::new(file);
|
|
|
|
let metadata = serde_json::from_reader(reader)?;
|
|
|
|
|
|
|
|
Ok(metadata)
|
|
|
|
}
|
|
|
|
|
2021-04-29 20:45:08 +08:00
|
|
|
/// Write Metadata in `metadata.json` file at provided `dir_path`
|
2021-05-05 20:11:56 +08:00
|
|
|
pub async fn to_path(&self, dir_path: &Path) -> anyhow::Result<()> {
|
2021-04-28 22:43:49 +08:00
|
|
|
let path = dir_path.join("metadata.json");
|
2021-05-05 20:11:56 +08:00
|
|
|
tokio::fs::write(path, serde_json::to_string(self)?).await?;
|
2021-04-28 22:43:49 +08:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-11 02:25:09 +08:00
|
|
|
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
|
|
|
#[serde(rename_all = "snake_case")]
|
|
|
|
pub enum DumpStatus {
|
|
|
|
Done,
|
|
|
|
InProgress,
|
|
|
|
Failed,
|
2021-04-28 22:43:49 +08:00
|
|
|
}
|
|
|
|
|
2021-05-11 02:25:09 +08:00
|
|
|
#[derive(Debug, Serialize, Clone)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct DumpInfo {
|
|
|
|
pub uid: String,
|
|
|
|
pub status: DumpStatus,
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none", flatten)]
|
|
|
|
pub error: Option<serde_json::Value>,
|
|
|
|
}
|
2021-04-28 22:43:49 +08:00
|
|
|
|
2021-05-11 02:25:09 +08:00
|
|
|
impl DumpInfo {
|
|
|
|
pub fn new(uid: String, status: DumpStatus) -> Self {
|
|
|
|
Self {
|
|
|
|
uid,
|
|
|
|
status,
|
|
|
|
error: None,
|
|
|
|
}
|
2021-05-05 20:11:56 +08:00
|
|
|
}
|
2021-04-28 22:43:49 +08:00
|
|
|
|
2021-05-11 02:25:09 +08:00
|
|
|
pub fn with_error(&mut self, error: String) {
|
|
|
|
self.status = DumpStatus::Failed;
|
|
|
|
self.error = Some(json!(error));
|
2021-05-05 20:11:56 +08:00
|
|
|
}
|
2021-04-28 22:43:49 +08:00
|
|
|
|
2021-05-11 02:25:09 +08:00
|
|
|
pub fn done(&mut self) {
|
|
|
|
self.status = DumpStatus::Done;
|
|
|
|
}
|
2021-04-28 22:43:49 +08:00
|
|
|
|
2021-05-11 02:25:09 +08:00
|
|
|
pub fn dump_already_in_progress(&self) -> bool {
|
|
|
|
self.status == DumpStatus::InProgress
|
|
|
|
}
|
2021-04-28 22:43:49 +08:00
|
|
|
}
|
|
|
|
|
2021-05-11 02:23:12 +08:00
|
|
|
pub fn load_dump(
|
2021-04-28 22:43:49 +08:00
|
|
|
db_path: impl AsRef<Path>,
|
|
|
|
dump_path: impl AsRef<Path>,
|
|
|
|
size: usize,
|
|
|
|
) -> anyhow::Result<()> {
|
|
|
|
info!("Importing dump from {}...", dump_path.as_ref().display());
|
|
|
|
let db_path = db_path.as_ref();
|
|
|
|
let dump_path = dump_path.as_ref();
|
2021-05-11 02:23:12 +08:00
|
|
|
let uuid_resolver = uuid_resolver::HeedUuidStore::new(&db_path)?;
|
2021-04-28 22:43:49 +08:00
|
|
|
|
|
|
|
// extract the dump in a temporary directory
|
2021-04-29 20:45:08 +08:00
|
|
|
let tmp_dir = TempDir::new_in(db_path)?;
|
2021-04-28 22:43:49 +08:00
|
|
|
let tmp_dir_path = tmp_dir.path();
|
|
|
|
compression::from_tar_gz(dump_path, tmp_dir_path)?;
|
|
|
|
|
|
|
|
// read dump metadata
|
2021-04-29 20:45:08 +08:00
|
|
|
let metadata = Metadata::from_path(&tmp_dir_path)?;
|
2021-04-28 22:43:49 +08:00
|
|
|
|
|
|
|
// remove indexes which have same `uuid` than indexes to import and create empty indexes
|
2021-05-11 02:23:12 +08:00
|
|
|
let existing_index_uids = uuid_resolver.list()?;
|
2021-04-28 22:43:49 +08:00
|
|
|
|
2021-04-27 16:27:43 +08:00
|
|
|
info!("Deleting indexes already present in the db and provided in the dump...");
|
2021-04-28 22:43:49 +08:00
|
|
|
for idx in &metadata.indexes {
|
|
|
|
if let Some((_, uuid)) = existing_index_uids.iter().find(|(s, _)| s == &idx.uid) {
|
|
|
|
// if we find the index in the `uuid_resolver` it's supposed to exist on the file system
|
|
|
|
// and we want to delete it
|
|
|
|
let path = db_path.join(&format!("indexes/index-{}", uuid));
|
|
|
|
info!("Deleting {}", path.display());
|
|
|
|
use std::io::ErrorKind::*;
|
|
|
|
match std::fs::remove_dir_all(path) {
|
|
|
|
Ok(()) => (),
|
|
|
|
// if an index was present in the metadata but missing of the fs we can ignore the
|
|
|
|
// problem because we are going to create it later
|
|
|
|
Err(e) if e.kind() == NotFound => (),
|
|
|
|
Err(e) => bail!(e),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// if the index does not exist in the `uuid_resolver` we create it
|
2021-05-11 02:23:12 +08:00
|
|
|
uuid_resolver.create_uuid(idx.uid.clone(), false)?;
|
2021-04-28 22:43:49 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// import each indexes content
|
|
|
|
for idx in metadata.indexes {
|
|
|
|
let dump_path = tmp_dir_path.join(&idx.uid);
|
2021-05-11 02:23:12 +08:00
|
|
|
// this cannot fail since we created all the missing uuid in the previous loop
|
|
|
|
let uuid = uuid_resolver.get_uuid(idx.uid)?.unwrap();
|
2021-05-07 00:44:16 +08:00
|
|
|
|
2021-05-11 02:20:36 +08:00
|
|
|
info!(
|
|
|
|
"Importing dump from {} into {}...",
|
|
|
|
dump_path.display(),
|
2021-05-11 06:20:55 +08:00
|
|
|
db_path.display()
|
2021-05-11 02:20:36 +08:00
|
|
|
);
|
|
|
|
metadata
|
|
|
|
.dump_version
|
|
|
|
.import_index(
|
|
|
|
size,
|
2021-05-11 06:20:55 +08:00
|
|
|
uuid,
|
2021-05-11 02:20:36 +08:00
|
|
|
&dump_path,
|
2021-05-11 06:20:55 +08:00
|
|
|
&db_path,
|
2021-05-11 02:20:36 +08:00
|
|
|
idx.meta.primary_key.as_ref().map(|s| s.as_ref()),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
info!("Dump importation from {} succeed", dump_path.display());
|
2021-04-28 22:43:49 +08:00
|
|
|
}
|
|
|
|
|
2021-05-11 02:20:36 +08:00
|
|
|
// finally we can move all the unprocessed update file into our new DB
|
|
|
|
let update_path = tmp_dir_path.join("update_files");
|
|
|
|
let db_update_path = db_path.join("updates/update_files");
|
|
|
|
eprintln!("path {:?} exists: {:?}", update_path, update_path.exists());
|
|
|
|
eprintln!(
|
|
|
|
"path {:?} exists: {:?}",
|
|
|
|
db_update_path,
|
|
|
|
db_update_path.exists()
|
|
|
|
);
|
|
|
|
let _ = std::fs::remove_dir_all(db_update_path);
|
|
|
|
std::fs::rename(
|
|
|
|
tmp_dir_path.join("update_files"),
|
|
|
|
db_path.join("updates/update_files"),
|
|
|
|
)
|
|
|
|
.unwrap();
|
2021-05-07 00:44:16 +08:00
|
|
|
|
2021-04-28 22:43:49 +08:00
|
|
|
info!("Dump importation from {} succeed", dump_path.display());
|
|
|
|
Ok(())
|
|
|
|
}
|