mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-26 20:15:07 +08:00
Rename index_name by index_uid
This commit is contained in:
parent
5527457655
commit
a90facaa41
@ -22,7 +22,7 @@ struct IndexCommand {
|
|||||||
database_path: PathBuf,
|
database_path: PathBuf,
|
||||||
|
|
||||||
#[structopt(long, default_value = "default")]
|
#[structopt(long, default_value = "default")]
|
||||||
index_name: String,
|
index_uid: String,
|
||||||
|
|
||||||
/// The csv file to index.
|
/// The csv file to index.
|
||||||
#[structopt(parse(from_os_str))]
|
#[structopt(parse(from_os_str))]
|
||||||
@ -46,7 +46,7 @@ struct SearchCommand {
|
|||||||
database_path: PathBuf,
|
database_path: PathBuf,
|
||||||
|
|
||||||
#[structopt(long, default_value = "default")]
|
#[structopt(long, default_value = "default")]
|
||||||
index_name: String,
|
index_uid: String,
|
||||||
|
|
||||||
/// Timeout after which the search will return results.
|
/// Timeout after which the search will return results.
|
||||||
#[structopt(long)]
|
#[structopt(long)]
|
||||||
@ -76,7 +76,7 @@ struct ShowUpdatesCommand {
|
|||||||
database_path: PathBuf,
|
database_path: PathBuf,
|
||||||
|
|
||||||
#[structopt(long, default_value = "default")]
|
#[structopt(long, default_value = "default")]
|
||||||
index_name: String,
|
index_uid: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, StructOpt)]
|
#[derive(Debug, StructOpt)]
|
||||||
@ -106,9 +106,9 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box<dy
|
|||||||
let (sender, receiver) = mpsc::sync_channel(100);
|
let (sender, receiver) = mpsc::sync_channel(100);
|
||||||
let update_fn =
|
let update_fn =
|
||||||
move |_name: &str, update: ProcessedUpdateResult| sender.send(update.update_id).unwrap();
|
move |_name: &str, update: ProcessedUpdateResult| sender.send(update.update_id).unwrap();
|
||||||
let index = match database.open_index(&command.index_name) {
|
let index = match database.open_index(&command.index_uid) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
None => database.create_index(&command.index_name).unwrap(),
|
None => database.create_index(&command.index_uid).unwrap(),
|
||||||
};
|
};
|
||||||
|
|
||||||
database.set_update_callback(Box::new(update_fn));
|
database.set_update_callback(Box::new(update_fn));
|
||||||
@ -318,7 +318,7 @@ fn crop_text(
|
|||||||
fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<dyn Error>> {
|
fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<dyn Error>> {
|
||||||
let env = &database.env;
|
let env = &database.env;
|
||||||
let index = database
|
let index = database
|
||||||
.open_index(&command.index_name)
|
.open_index(&command.index_uid)
|
||||||
.expect("Could not find index");
|
.expect("Could not find index");
|
||||||
|
|
||||||
let reader = env.read_txn().unwrap();
|
let reader = env.read_txn().unwrap();
|
||||||
@ -446,7 +446,7 @@ fn show_updates_command(
|
|||||||
) -> Result<(), Box<dyn Error>> {
|
) -> Result<(), Box<dyn Error>> {
|
||||||
let env = &database.env;
|
let env = &database.env;
|
||||||
let index = database
|
let index = database
|
||||||
.open_index(&command.index_name)
|
.open_index(&command.index_uid)
|
||||||
.expect("Could not find index");
|
.expect("Could not find index");
|
||||||
|
|
||||||
let reader = env.read_txn().unwrap();
|
let reader = env.read_txn().unwrap();
|
||||||
|
@ -45,7 +45,7 @@ pub type UpdateEventsEmitter = Sender<UpdateEvent>;
|
|||||||
fn update_awaiter(
|
fn update_awaiter(
|
||||||
receiver: UpdateEvents,
|
receiver: UpdateEvents,
|
||||||
env: heed::Env,
|
env: heed::Env,
|
||||||
index_name: &str,
|
index_uid: &str,
|
||||||
update_fn: Arc<ArcSwapFn>,
|
update_fn: Arc<ArcSwapFn>,
|
||||||
index: Index,
|
index: Index,
|
||||||
) {
|
) {
|
||||||
@ -91,7 +91,7 @@ fn update_awaiter(
|
|||||||
|
|
||||||
// call the user callback when the update and the result are written consistently
|
// call the user callback when the update and the result are written consistently
|
||||||
if let Some(ref callback) = *update_fn.load() {
|
if let Some(ref callback) = *update_fn.load() {
|
||||||
(callback)(index_name, status);
|
(callback)(index_uid, status);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -116,22 +116,22 @@ impl Database {
|
|||||||
let mut must_open = Vec::new();
|
let mut must_open = Vec::new();
|
||||||
let reader = env.read_txn()?;
|
let reader = env.read_txn()?;
|
||||||
for result in indexes_store.iter(&reader)? {
|
for result in indexes_store.iter(&reader)? {
|
||||||
let (index_name, _) = result?;
|
let (index_uid, _) = result?;
|
||||||
must_open.push(index_name.to_owned());
|
must_open.push(index_uid.to_owned());
|
||||||
}
|
}
|
||||||
|
|
||||||
reader.abort();
|
reader.abort();
|
||||||
|
|
||||||
// open the previously aggregated indexes
|
// open the previously aggregated indexes
|
||||||
let mut indexes = HashMap::new();
|
let mut indexes = HashMap::new();
|
||||||
for index_name in must_open {
|
for index_uid in must_open {
|
||||||
let (sender, receiver) = crossbeam_channel::bounded(100);
|
let (sender, receiver) = crossbeam_channel::bounded(100);
|
||||||
let index = match store::open(&env, &index_name, sender.clone())? {
|
let index = match store::open(&env, &index_uid, sender.clone())? {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
None => {
|
None => {
|
||||||
log::warn!(
|
log::warn!(
|
||||||
"the index {} doesn't exist or has not all the databases",
|
"the index {} doesn't exist or has not all the databases",
|
||||||
index_name
|
index_uid
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -139,7 +139,7 @@ impl Database {
|
|||||||
|
|
||||||
let env_clone = env.clone();
|
let env_clone = env.clone();
|
||||||
let index_clone = index.clone();
|
let index_clone = index.clone();
|
||||||
let name_clone = index_name.clone();
|
let name_clone = index_uid.clone();
|
||||||
let update_fn_clone = update_fn.clone();
|
let update_fn_clone = update_fn.clone();
|
||||||
|
|
||||||
let handle = thread::spawn(move || {
|
let handle = thread::spawn(move || {
|
||||||
@ -156,7 +156,7 @@ impl Database {
|
|||||||
// possible pre-boot updates are consumed
|
// possible pre-boot updates are consumed
|
||||||
sender.send(UpdateEvent::NewUpdate).unwrap();
|
sender.send(UpdateEvent::NewUpdate).unwrap();
|
||||||
|
|
||||||
let result = indexes.insert(index_name, (index, handle));
|
let result = indexes.insert(index_uid, (index, handle));
|
||||||
assert!(
|
assert!(
|
||||||
result.is_none(),
|
result.is_none(),
|
||||||
"The index should not have been already open"
|
"The index should not have been already open"
|
||||||
|
@ -47,9 +47,9 @@ impl DataInner {
|
|||||||
pub fn last_update(
|
pub fn last_update(
|
||||||
&self,
|
&self,
|
||||||
reader: &heed::RoTxn,
|
reader: &heed::RoTxn,
|
||||||
index_name: &str,
|
index_uid: &str,
|
||||||
) -> MResult<Option<DateTime<Utc>>> {
|
) -> MResult<Option<DateTime<Utc>>> {
|
||||||
let key = format!("last-update-{}", index_name);
|
let key = format!("last-update-{}", index_uid);
|
||||||
match self
|
match self
|
||||||
.db
|
.db
|
||||||
.common_store()
|
.common_store()
|
||||||
@ -60,8 +60,8 @@ impl DataInner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_last_update(&self, writer: &mut heed::RwTxn, index_name: &str) -> MResult<()> {
|
pub fn set_last_update(&self, writer: &mut heed::RwTxn, index_uid: &str) -> MResult<()> {
|
||||||
let key = format!("last-update-{}", index_name);
|
let key = format!("last-update-{}", index_uid);
|
||||||
self.db
|
self.db
|
||||||
.common_store()
|
.common_store()
|
||||||
.put::<Str, SerdeDatetime>(writer, &key, &Utc::now())
|
.put::<Str, SerdeDatetime>(writer, &key, &Utc::now())
|
||||||
@ -71,9 +71,9 @@ impl DataInner {
|
|||||||
pub fn fields_frequency(
|
pub fn fields_frequency(
|
||||||
&self,
|
&self,
|
||||||
reader: &heed::RoTxn,
|
reader: &heed::RoTxn,
|
||||||
index_name: &str,
|
index_uid: &str,
|
||||||
) -> MResult<Option<FreqsMap>> {
|
) -> MResult<Option<FreqsMap>> {
|
||||||
let key = format!("fields-frequency-{}", index_name);
|
let key = format!("fields-frequency-{}", index_uid);
|
||||||
match self
|
match self
|
||||||
.db
|
.db
|
||||||
.common_store()
|
.common_store()
|
||||||
@ -84,11 +84,11 @@ impl DataInner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_stats(&self, writer: &mut heed::RwTxn, index_name: &str) -> MResult<()> {
|
pub fn compute_stats(&self, writer: &mut heed::RwTxn, index_uid: &str) -> MResult<()> {
|
||||||
let index = match self.db.open_index(&index_name) {
|
let index = match self.db.open_index(&index_uid) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
None => {
|
None => {
|
||||||
error!("Impossible to retrieve index {}", index_name);
|
error!("Impossible to retrieve index {}", index_uid);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -115,7 +115,7 @@ impl DataInner {
|
|||||||
.map(|(a, c)| (schema.attribute_name(a).to_owned(), c))
|
.map(|(a, c)| (schema.attribute_name(a).to_owned(), c))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let key = format!("fields-frequency-{}", index_name);
|
let key = format!("fields-frequency-{}", index_uid);
|
||||||
self.db
|
self.db
|
||||||
.common_store()
|
.common_store()
|
||||||
.put::<Str, SerdeFreqsMap>(writer, &key, &frequency)?;
|
.put::<Str, SerdeFreqsMap>(writer, &key, &frequency)?;
|
||||||
@ -144,8 +144,8 @@ impl Data {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let callback_context = data.clone();
|
let callback_context = data.clone();
|
||||||
db.set_update_callback(Box::new(move |index_name, status| {
|
db.set_update_callback(Box::new(move |index_uid, status| {
|
||||||
index_update_callback(&index_name, &callback_context, status);
|
index_update_callback(&index_uid, &callback_context, status);
|
||||||
}));
|
}));
|
||||||
|
|
||||||
data
|
data
|
||||||
|
@ -93,12 +93,12 @@ impl ContextExt for Context<Data> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn index(&self) -> Result<Index, ResponseError> {
|
fn index(&self) -> Result<Index, ResponseError> {
|
||||||
let index_name = self.url_param("index")?;
|
let index_uid = self.url_param("index")?;
|
||||||
let index = self
|
let index = self
|
||||||
.state()
|
.state()
|
||||||
.db
|
.db
|
||||||
.open_index(&index_name)
|
.open_index(&index_uid)
|
||||||
.ok_or(ResponseError::index_not_found(index_name))?;
|
.ok_or(ResponseError::index_not_found(index_uid))?;
|
||||||
Ok(index)
|
Ok(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,7 +131,7 @@ pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> {
|
|||||||
pub async fn update_schema(mut ctx: Context<Data>) -> SResult<Response> {
|
pub async fn update_schema(mut ctx: Context<Data>) -> SResult<Response> {
|
||||||
ctx.is_allowed(IndexesWrite)?;
|
ctx.is_allowed(IndexesWrite)?;
|
||||||
|
|
||||||
let index_name = ctx.url_param("index")?;
|
let index_uid = ctx.url_param("index")?;
|
||||||
|
|
||||||
let schema = ctx
|
let schema = ctx
|
||||||
.body_json::<SchemaBody>()
|
.body_json::<SchemaBody>()
|
||||||
@ -143,8 +143,8 @@ pub async fn update_schema(mut ctx: Context<Data>) -> SResult<Response> {
|
|||||||
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
|
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
|
||||||
|
|
||||||
let index = db
|
let index = db
|
||||||
.open_index(&index_name)
|
.open_index(&index_uid)
|
||||||
.ok_or(ResponseError::index_not_found(index_name))?;
|
.ok_or(ResponseError::index_not_found(index_uid))?;
|
||||||
|
|
||||||
let schema: meilidb_schema::Schema = schema.into();
|
let schema: meilidb_schema::Schema = schema.into();
|
||||||
let update_id = index
|
let update_id = index
|
||||||
@ -206,12 +206,12 @@ pub async fn get_all_updates_status(ctx: Context<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
pub async fn delete_index(ctx: Context<Data>) -> SResult<StatusCode> {
|
pub async fn delete_index(ctx: Context<Data>) -> SResult<StatusCode> {
|
||||||
ctx.is_allowed(IndexesWrite)?;
|
ctx.is_allowed(IndexesWrite)?;
|
||||||
let index_name = ctx.url_param("index")?;
|
let index_uid = ctx.url_param("index")?;
|
||||||
|
|
||||||
let found = ctx
|
let found = ctx
|
||||||
.state()
|
.state()
|
||||||
.db
|
.db
|
||||||
.delete_index(&index_name)
|
.delete_index(&index_uid)
|
||||||
.map_err(ResponseError::internal)?;
|
.map_err(ResponseError::internal)?;
|
||||||
|
|
||||||
if found {
|
if found {
|
||||||
@ -221,12 +221,12 @@ pub async fn delete_index(ctx: Context<Data>) -> SResult<StatusCode> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index_update_callback(index_name: &str, data: &Data, _status: ProcessedUpdateResult) {
|
pub fn index_update_callback(index_uid: &str, data: &Data, _status: ProcessedUpdateResult) {
|
||||||
let env = &data.db.env;
|
let env = &data.db.env;
|
||||||
let mut writer = env.write_txn().unwrap();
|
let mut writer = env.write_txn().unwrap();
|
||||||
|
|
||||||
data.compute_stats(&mut writer, &index_name).unwrap();
|
data.compute_stats(&mut writer, &index_uid).unwrap();
|
||||||
data.set_last_update(&mut writer, &index_name).unwrap();
|
data.set_last_update(&mut writer, &index_uid).unwrap();
|
||||||
|
|
||||||
writer.commit().unwrap();
|
writer.commit().unwrap();
|
||||||
}
|
}
|
||||||
|
@ -181,10 +181,10 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
|
|||||||
let par_body = body.clone();
|
let par_body = body.clone();
|
||||||
let responses_per_index: Vec<SResult<_>> = index_list
|
let responses_per_index: Vec<SResult<_>> = index_list
|
||||||
.into_par_iter()
|
.into_par_iter()
|
||||||
.map(move |index_name| {
|
.map(move |index_uid| {
|
||||||
let index: Index = db
|
let index: Index = db
|
||||||
.open_index(&index_name)
|
.open_index(&index_uid)
|
||||||
.ok_or(ResponseError::index_not_found(&index_name))?;
|
.ok_or(ResponseError::index_not_found(&index_uid))?;
|
||||||
|
|
||||||
let mut search_builder = index.new_search(par_body.query.clone());
|
let mut search_builder = index.new_search(par_body.query.clone());
|
||||||
|
|
||||||
@ -221,7 +221,7 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
|
|||||||
let response = search_builder
|
let response = search_builder
|
||||||
.search(&reader)
|
.search(&reader)
|
||||||
.map_err(ResponseError::internal)?;
|
.map_err(ResponseError::internal)?;
|
||||||
Ok((index_name, response))
|
Ok((index_uid, response))
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
@ -230,11 +230,11 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
|
|||||||
let mut max_query_time = 0;
|
let mut max_query_time = 0;
|
||||||
|
|
||||||
for response in responses_per_index {
|
for response in responses_per_index {
|
||||||
if let Ok((index_name, response)) = response {
|
if let Ok((index_uid, response)) = response {
|
||||||
if response.processing_time_ms > max_query_time {
|
if response.processing_time_ms > max_query_time {
|
||||||
max_query_time = response.processing_time_ms;
|
max_query_time = response.processing_time_ms;
|
||||||
}
|
}
|
||||||
hits_map.insert(index_name, response.hits);
|
hits_map.insert(index_uid, response.hits);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ struct IndexStatsResponse {
|
|||||||
|
|
||||||
pub async fn index_stat(ctx: Context<Data>) -> SResult<Response> {
|
pub async fn index_stat(ctx: Context<Data>) -> SResult<Response> {
|
||||||
ctx.is_allowed(Admin)?;
|
ctx.is_allowed(Admin)?;
|
||||||
let index_name = ctx.url_param("index")?;
|
let index_uid = ctx.url_param("index")?;
|
||||||
let index = ctx.index()?;
|
let index = ctx.index()?;
|
||||||
|
|
||||||
let env = &ctx.state().db.env;
|
let env = &ctx.state().db.env;
|
||||||
@ -36,19 +36,19 @@ pub async fn index_stat(ctx: Context<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
let fields_frequency = ctx
|
let fields_frequency = ctx
|
||||||
.state()
|
.state()
|
||||||
.fields_frequency(&reader, &index_name)
|
.fields_frequency(&reader, &index_uid)
|
||||||
.map_err(ResponseError::internal)?
|
.map_err(ResponseError::internal)?
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let is_indexing = ctx
|
let is_indexing = ctx
|
||||||
.state()
|
.state()
|
||||||
.is_indexing(&reader, &index_name)
|
.is_indexing(&reader, &index_uid)
|
||||||
.map_err(ResponseError::internal)?
|
.map_err(ResponseError::internal)?
|
||||||
.ok_or(ResponseError::not_found("Index not found"))?;
|
.ok_or(ResponseError::not_found("Index not found"))?;
|
||||||
|
|
||||||
let last_update = ctx
|
let last_update = ctx
|
||||||
.state()
|
.state()
|
||||||
.last_update(&reader, &index_name)
|
.last_update(&reader, &index_uid)
|
||||||
.map_err(ResponseError::internal)?;
|
.map_err(ResponseError::internal)?;
|
||||||
|
|
||||||
let response = IndexStatsResponse {
|
let response = IndexStatsResponse {
|
||||||
@ -73,11 +73,11 @@ pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
|
|||||||
let mut index_list = HashMap::new();
|
let mut index_list = HashMap::new();
|
||||||
|
|
||||||
if let Ok(indexes_set) = ctx.state().db.indexes_names() {
|
if let Ok(indexes_set) = ctx.state().db.indexes_names() {
|
||||||
for index_name in indexes_set {
|
for index_uid in indexes_set {
|
||||||
let db = &ctx.state().db;
|
let db = &ctx.state().db;
|
||||||
let env = &db.env;
|
let env = &db.env;
|
||||||
|
|
||||||
let index = db.open_index(&index_name).unwrap();
|
let index = db.open_index(&index_uid).unwrap();
|
||||||
let reader = env.read_txn().map_err(ResponseError::internal)?;
|
let reader = env.read_txn().map_err(ResponseError::internal)?;
|
||||||
|
|
||||||
let number_of_documents = index
|
let number_of_documents = index
|
||||||
@ -87,19 +87,19 @@ pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
let fields_frequency = ctx
|
let fields_frequency = ctx
|
||||||
.state()
|
.state()
|
||||||
.fields_frequency(&reader, &index_name)
|
.fields_frequency(&reader, &index_uid)
|
||||||
.map_err(ResponseError::internal)?
|
.map_err(ResponseError::internal)?
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let is_indexing = ctx
|
let is_indexing = ctx
|
||||||
.state()
|
.state()
|
||||||
.is_indexing(&reader, &index_name)
|
.is_indexing(&reader, &index_uid)
|
||||||
.map_err(ResponseError::internal)?
|
.map_err(ResponseError::internal)?
|
||||||
.ok_or(ResponseError::not_found("Index not found"))?;
|
.ok_or(ResponseError::not_found("Index not found"))?;
|
||||||
|
|
||||||
let last_update = ctx
|
let last_update = ctx
|
||||||
.state()
|
.state()
|
||||||
.last_update(&reader, &index_name)
|
.last_update(&reader, &index_uid)
|
||||||
.map_err(ResponseError::internal)?;
|
.map_err(ResponseError::internal)?;
|
||||||
|
|
||||||
let response = IndexStatsResponse {
|
let response = IndexStatsResponse {
|
||||||
@ -108,7 +108,7 @@ pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
|
|||||||
last_update,
|
last_update,
|
||||||
fields_frequency,
|
fields_frequency,
|
||||||
};
|
};
|
||||||
index_list.insert(index_name, response);
|
index_list.insert(index_uid, response);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user