2019-10-31 22:00:36 +08:00
|
|
|
use std::collections::HashMap;
|
|
|
|
use std::ops::Deref;
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
|
|
|
use chrono::{DateTime, Utc};
|
|
|
|
use heed::types::{SerdeBincode, Str};
|
2019-11-21 00:28:46 +08:00
|
|
|
use log::error;
|
2020-01-23 18:30:18 +08:00
|
|
|
use meilisearch_core::{Database, Error as MError, MResult, MainT, UpdateT};
|
2019-10-31 22:00:36 +08:00
|
|
|
use sysinfo::Pid;
|
2020-02-06 22:41:11 +08:00
|
|
|
use sha2::Digest;
|
2019-10-31 22:00:36 +08:00
|
|
|
|
|
|
|
use crate::option::Opt;
|
|
|
|
use crate::routes::index::index_update_callback;
|
|
|
|
|
2019-11-21 00:28:46 +08:00
|
|
|
const LAST_UPDATE_KEY: &str = "last-update";
|
|
|
|
|
2019-10-31 22:00:36 +08:00
|
|
|
type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct Data {
|
|
|
|
inner: Arc<DataInner>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Deref for Data {
|
|
|
|
type Target = DataInner;
|
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.inner
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct DataInner {
|
|
|
|
pub db: Arc<Database>,
|
|
|
|
pub db_path: String,
|
2020-02-06 22:41:11 +08:00
|
|
|
pub api_keys: ApiKeys,
|
2019-10-31 22:00:36 +08:00
|
|
|
pub server_pid: Pid,
|
|
|
|
}
|
|
|
|
|
2020-02-06 22:41:11 +08:00
|
|
|
#[derive(Default, Clone)]
|
|
|
|
pub struct ApiKeys {
|
|
|
|
pub public: Option<String>,
|
|
|
|
pub private: Option<String>,
|
|
|
|
pub master: Option<String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ApiKeys {
|
|
|
|
pub fn generate_missing_api_keys(&mut self) {
|
|
|
|
if let Some(master_key) = &self.master {
|
|
|
|
if self.private.is_none() {
|
|
|
|
let key = format!("{}-private", master_key);
|
|
|
|
let sha = sha2::Sha256::digest(key.as_bytes());
|
|
|
|
self.private = Some(format!("{:x}", sha));
|
|
|
|
}
|
|
|
|
if self.public.is_none() {
|
|
|
|
let key = format!("{}-public", master_key);
|
|
|
|
let sha = sha2::Sha256::digest(key.as_bytes());
|
|
|
|
self.public = Some(format!("{:x}", sha));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-31 22:00:36 +08:00
|
|
|
impl DataInner {
|
2019-11-26 23:12:06 +08:00
|
|
|
pub fn is_indexing(&self, reader: &heed::RoTxn<UpdateT>, index: &str) -> MResult<Option<bool>> {
|
2019-10-31 22:00:36 +08:00
|
|
|
match self.db.open_index(&index) {
|
|
|
|
Some(index) => index.current_update_id(&reader).map(|u| Some(u.is_some())),
|
|
|
|
None => Ok(None),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-26 23:12:06 +08:00
|
|
|
pub fn last_update(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<DateTime<Utc>>> {
|
2019-10-31 22:00:36 +08:00
|
|
|
match self
|
|
|
|
.db
|
|
|
|
.common_store()
|
2019-11-26 23:12:06 +08:00
|
|
|
.get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)?
|
2019-10-31 22:00:36 +08:00
|
|
|
{
|
|
|
|
Some(datetime) => Ok(Some(datetime)),
|
|
|
|
None => Ok(None),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-26 23:12:06 +08:00
|
|
|
pub fn set_last_update(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
|
2019-10-31 22:00:36 +08:00
|
|
|
self.db
|
|
|
|
.common_store()
|
2019-11-26 23:12:06 +08:00
|
|
|
.put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, &Utc::now())
|
2019-10-31 22:00:36 +08:00
|
|
|
.map_err(Into::into)
|
|
|
|
}
|
|
|
|
|
2019-11-26 23:12:06 +08:00
|
|
|
pub fn compute_stats(&self, writer: &mut heed::RwTxn<MainT>, index_uid: &str) -> MResult<()> {
|
2019-11-19 23:15:49 +08:00
|
|
|
let index = match self.db.open_index(&index_uid) {
|
2019-10-31 22:00:36 +08:00
|
|
|
Some(index) => index,
|
|
|
|
None => {
|
2019-11-19 23:15:49 +08:00
|
|
|
error!("Impossible to retrieve index {}", index_uid);
|
2019-10-31 22:00:36 +08:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let schema = match index.main.schema(&writer)? {
|
|
|
|
Some(schema) => schema,
|
|
|
|
None => return Ok(()),
|
|
|
|
};
|
|
|
|
|
|
|
|
let all_documents_fields = index
|
|
|
|
.documents_fields_counts
|
|
|
|
.all_documents_fields_counts(&writer)?;
|
|
|
|
|
|
|
|
// count fields frequencies
|
|
|
|
let mut fields_frequency = HashMap::<_, usize>::new();
|
|
|
|
for result in all_documents_fields {
|
|
|
|
let (_, attr, _) = result?;
|
2020-01-22 21:29:39 +08:00
|
|
|
if let Some(field_id) = schema.indexed_pos_to_field_id(attr) {
|
|
|
|
*fields_frequency.entry(field_id).or_default() += 1;
|
|
|
|
}
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// convert attributes to their names
|
|
|
|
let frequency: HashMap<_, _> = fields_frequency
|
|
|
|
.into_iter()
|
2020-02-11 22:16:02 +08:00
|
|
|
.filter_map(|(a, c)| {
|
2020-02-13 17:25:37 +08:00
|
|
|
schema.name(a).map(|name| (name.to_string(), c))
|
2020-02-11 22:16:02 +08:00
|
|
|
})
|
2019-10-31 22:00:36 +08:00
|
|
|
.collect();
|
|
|
|
|
2019-11-20 18:24:08 +08:00
|
|
|
index
|
|
|
|
.main
|
2019-11-21 00:28:46 +08:00
|
|
|
.put_fields_frequency(writer, &frequency)
|
2019-11-20 18:24:08 +08:00
|
|
|
.map_err(MError::Zlmdb)
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Data {
|
|
|
|
pub fn new(opt: Opt) -> Data {
|
2019-11-20 00:09:06 +08:00
|
|
|
let db_path = opt.db_path.clone();
|
2019-10-31 22:00:36 +08:00
|
|
|
let server_pid = sysinfo::get_current_pid().unwrap();
|
|
|
|
|
2020-01-31 17:50:28 +08:00
|
|
|
let db = Arc::new(Database::open_or_create(opt.db_path).unwrap());
|
2019-10-31 22:00:36 +08:00
|
|
|
|
2020-02-06 22:41:11 +08:00
|
|
|
let mut api_keys = ApiKeys {
|
|
|
|
master: opt.master_key.clone(),
|
|
|
|
private: None,
|
|
|
|
public: None,
|
|
|
|
};
|
|
|
|
|
|
|
|
api_keys.generate_missing_api_keys();
|
|
|
|
|
2019-10-31 22:00:36 +08:00
|
|
|
let inner_data = DataInner {
|
|
|
|
db: db.clone(),
|
|
|
|
db_path,
|
2020-02-06 22:41:11 +08:00
|
|
|
api_keys,
|
2019-10-31 22:00:36 +08:00
|
|
|
server_pid,
|
|
|
|
};
|
|
|
|
|
|
|
|
let data = Data {
|
|
|
|
inner: Arc::new(inner_data),
|
|
|
|
};
|
|
|
|
|
2019-11-16 00:33:06 +08:00
|
|
|
let callback_context = data.clone();
|
2019-11-19 23:15:49 +08:00
|
|
|
db.set_update_callback(Box::new(move |index_uid, status| {
|
|
|
|
index_update_callback(&index_uid, &callback_context, status);
|
2019-11-16 00:33:06 +08:00
|
|
|
}));
|
2019-10-31 22:00:36 +08:00
|
|
|
|
|
|
|
data
|
|
|
|
}
|
|
|
|
}
|