meilisearch/meilisearch-http/src/data.rs

163 lines
4.3 KiB
Rust
Raw Normal View History

use std::error::Error;
2019-10-31 22:00:36 +08:00
use std::ops::Deref;
use std::path::PathBuf;
2019-10-31 22:00:36 +08:00
use std::sync::Arc;
use meilisearch_core::{Database, DatabaseOptions, Index};
2020-02-06 22:41:11 +08:00
use sha2::Digest;
2019-10-31 22:00:36 +08:00
use crate::error::{Error as MSError, ResponseError};
2020-04-15 16:51:15 +08:00
use crate::index_update_callback;
2019-10-31 22:00:36 +08:00
use crate::option::Opt;
#[derive(Clone)]
pub struct Data {
inner: Arc<DataInner>,
}
impl Deref for Data {
type Target = DataInner;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
#[derive(Clone)]
pub struct DataInner {
pub db: Arc<Database>,
pub db_path: String,
2020-10-13 17:17:02 +08:00
pub dumps_dir: PathBuf,
pub dump_batch_size: usize,
2020-02-06 22:41:11 +08:00
pub api_keys: ApiKeys,
2020-06-30 19:11:49 +08:00
pub server_pid: u32,
2020-05-14 23:52:10 +08:00
pub http_payload_size_limit: usize,
2019-10-31 22:00:36 +08:00
}
2020-04-24 21:00:52 +08:00
#[derive(Clone)]
2020-02-06 22:41:11 +08:00
pub struct ApiKeys {
pub public: Option<String>,
pub private: Option<String>,
pub master: Option<String>,
}
impl ApiKeys {
pub fn generate_missing_api_keys(&mut self) {
if let Some(master_key) = &self.master {
if self.private.is_none() {
let key = format!("{}-private", master_key);
let sha = sha2::Sha256::digest(key.as_bytes());
self.private = Some(format!("{:x}", sha));
}
if self.public.is_none() {
let key = format!("{}-public", master_key);
let sha = sha2::Sha256::digest(key.as_bytes());
self.public = Some(format!("{:x}", sha));
}
}
}
}
2019-10-31 22:00:36 +08:00
impl Data {
pub fn new(opt: Opt) -> Result<Data, Box<dyn Error>> {
let db_path = opt.db_path.clone();
2020-10-13 17:17:02 +08:00
let dumps_dir = opt.dumps_dir.clone();
let dump_batch_size = opt.dump_batch_size;
2020-06-30 19:11:49 +08:00
let server_pid = std::process::id();
2019-10-31 22:00:36 +08:00
let db_opt = DatabaseOptions {
2020-08-24 20:14:11 +08:00
main_map_size: opt.max_mdb_size,
update_map_size: opt.max_udb_size,
};
2020-05-14 23:52:10 +08:00
let http_payload_size_limit = opt.http_payload_size_limit;
let db = Arc::new(Database::open_or_create(opt.db_path, db_opt)?);
2019-10-31 22:00:36 +08:00
2020-02-06 22:41:11 +08:00
let mut api_keys = ApiKeys {
2020-04-11 01:05:05 +08:00
master: opt.master_key,
2020-02-06 22:41:11 +08:00
private: None,
public: None,
};
api_keys.generate_missing_api_keys();
2019-10-31 22:00:36 +08:00
let inner_data = DataInner {
db: db.clone(),
db_path,
2020-10-13 17:17:02 +08:00
dumps_dir,
dump_batch_size,
2020-02-06 22:41:11 +08:00
api_keys,
2019-10-31 22:00:36 +08:00
server_pid,
2020-05-14 23:52:10 +08:00
http_payload_size_limit,
2019-10-31 22:00:36 +08:00
};
let data = Data {
inner: Arc::new(inner_data),
};
let callback_context = data.clone();
2019-11-19 23:15:49 +08:00
db.set_update_callback(Box::new(move |index_uid, status| {
index_update_callback(&index_uid, &callback_context, status);
}));
2019-10-31 22:00:36 +08:00
Ok(data)
2019-10-31 22:00:36 +08:00
}
fn create_index(&self, uid: &str) -> Result<Index, ResponseError> {
if !uid
.chars()
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
{
return Err(MSError::InvalidIndexUid.into());
}
let created_index = self.db.create_index(&uid).map_err(|e| match e {
meilisearch_core::Error::IndexAlreadyExists => e.into(),
_ => ResponseError::from(MSError::create_index(e)),
})?;
self.db.main_write::<_, _, ResponseError>(|mut writer| {
created_index.main.put_name(&mut writer, uid)?;
created_index
.main
.created_at(&writer)?
.ok_or(MSError::internal("Impossible to read created at"))?;
created_index
.main
.updated_at(&writer)?
.ok_or(MSError::internal("Impossible to read updated at"))?;
Ok(())
})?;
Ok(created_index)
}
pub fn get_or_create_index<F, R>(&self, uid: &str, f: F) -> Result<R, ResponseError>
where
F: FnOnce(&Index) -> Result<R, ResponseError>,
{
let mut index_has_been_created = false;
let index = match self.db.open_index(&uid) {
Some(index) => index,
None => {
index_has_been_created = true;
self.create_index(&uid)?
}
};
match f(&index) {
Ok(r) => Ok(r),
Err(err) => {
if index_has_been_created {
let _ = self.db.delete_index(&uid);
}
Err(err)
}
}
}
2019-10-31 22:00:36 +08:00
}