2022-10-13 17:09:00 +08:00
|
|
|
use std::collections::HashSet;
|
2022-10-13 21:02:59 +08:00
|
|
|
use std::fs::File;
|
|
|
|
use std::io::BufWriter;
|
2022-10-13 17:09:00 +08:00
|
|
|
|
2022-10-12 09:21:25 +08:00
|
|
|
use crate::{autobatcher::BatchKind, Error, IndexScheduler, Result, TaskId};
|
2022-10-12 06:43:24 +08:00
|
|
|
|
2022-10-13 21:02:59 +08:00
|
|
|
use dump::IndexMetadata;
|
|
|
|
use meilisearch_types::milli::documents::obkv_to_object;
|
2022-10-12 09:21:25 +08:00
|
|
|
use meilisearch_types::tasks::{Details, Kind, KindWithContent, Status, Task};
|
2022-10-12 06:43:24 +08:00
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
use log::{debug, info};
|
2022-10-11 23:42:43 +08:00
|
|
|
use meilisearch_types::milli::update::IndexDocumentsConfig;
|
|
|
|
use meilisearch_types::milli::update::{
|
|
|
|
DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsMethod,
|
|
|
|
};
|
|
|
|
use meilisearch_types::milli::{
|
|
|
|
self, documents::DocumentsBatchReader, update::Settings as MilliSettings, BEU32,
|
|
|
|
};
|
|
|
|
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
|
|
|
|
use meilisearch_types::{
|
|
|
|
heed::{RoTxn, RwTxn},
|
|
|
|
Index,
|
|
|
|
};
|
2022-10-06 22:53:21 +08:00
|
|
|
use roaring::RoaringBitmap;
|
2022-09-09 07:09:50 +08:00
|
|
|
use uuid::Uuid;
|
2022-09-07 06:10:14 +08:00
|
|
|
|
2022-10-13 18:48:23 +08:00
|
|
|
#[derive(Debug)]
|
2022-09-08 02:08:07 +08:00
|
|
|
pub(crate) enum Batch {
|
2022-09-07 06:10:14 +08:00
|
|
|
Cancel(Task),
|
2022-10-13 17:09:00 +08:00
|
|
|
TaskDeletion(Task),
|
2022-09-07 06:10:14 +08:00
|
|
|
Snapshot(Vec<Task>),
|
2022-10-13 21:02:59 +08:00
|
|
|
Dump(Task),
|
2022-09-30 00:15:50 +08:00
|
|
|
IndexOperation(IndexOperation),
|
|
|
|
IndexCreation {
|
|
|
|
index_uid: String,
|
|
|
|
primary_key: Option<String>,
|
|
|
|
task: Task,
|
|
|
|
},
|
|
|
|
IndexUpdate {
|
|
|
|
index_uid: String,
|
|
|
|
primary_key: Option<String>,
|
|
|
|
task: Task,
|
|
|
|
},
|
|
|
|
IndexDeletion {
|
|
|
|
index_uid: String,
|
|
|
|
tasks: Vec<Task>,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2022-10-13 18:48:23 +08:00
|
|
|
#[derive(Debug)]
|
2022-09-30 00:15:50 +08:00
|
|
|
pub(crate) enum IndexOperation {
|
2022-09-29 21:49:54 +08:00
|
|
|
DocumentImport {
|
2022-09-16 22:31:16 +08:00
|
|
|
index_uid: String,
|
|
|
|
primary_key: Option<String>,
|
2022-09-29 21:49:54 +08:00
|
|
|
method: IndexDocumentsMethod,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation: bool,
|
2022-10-05 00:50:18 +08:00
|
|
|
documents_counts: Vec<u64>,
|
2022-09-16 22:31:16 +08:00
|
|
|
content_files: Vec<Uuid>,
|
|
|
|
tasks: Vec<Task>,
|
|
|
|
},
|
|
|
|
DocumentDeletion {
|
|
|
|
index_uid: String,
|
2022-09-22 18:14:51 +08:00
|
|
|
documents: Vec<String>,
|
2022-09-16 22:31:16 +08:00
|
|
|
tasks: Vec<Task>,
|
|
|
|
},
|
|
|
|
DocumentClear {
|
|
|
|
index_uid: String,
|
|
|
|
tasks: Vec<Task>,
|
|
|
|
},
|
|
|
|
Settings {
|
|
|
|
index_uid: String,
|
2022-09-29 19:57:28 +08:00
|
|
|
// TODO what's that boolean, does it mean that it removes things or what?
|
2022-09-16 22:31:16 +08:00
|
|
|
settings: Vec<(bool, Settings<Unchecked>)>,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation: bool,
|
2022-09-16 22:31:16 +08:00
|
|
|
tasks: Vec<Task>,
|
|
|
|
},
|
|
|
|
DocumentClearAndSetting {
|
|
|
|
index_uid: String,
|
|
|
|
cleared_tasks: Vec<Task>,
|
|
|
|
|
2022-09-29 20:31:01 +08:00
|
|
|
// TODO what's that boolean, does it mean that it removes things or what?
|
2022-09-16 22:31:16 +08:00
|
|
|
settings: Vec<(bool, Settings<Unchecked>)>,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation: bool,
|
2022-09-16 22:31:16 +08:00
|
|
|
settings_tasks: Vec<Task>,
|
|
|
|
},
|
2022-09-29 21:49:54 +08:00
|
|
|
SettingsAndDocumentImport {
|
2022-09-16 22:31:16 +08:00
|
|
|
index_uid: String,
|
|
|
|
|
|
|
|
primary_key: Option<String>,
|
2022-09-29 21:49:54 +08:00
|
|
|
method: IndexDocumentsMethod,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation: bool,
|
2022-10-05 00:50:18 +08:00
|
|
|
documents_counts: Vec<u64>,
|
2022-09-16 22:31:16 +08:00
|
|
|
content_files: Vec<Uuid>,
|
2022-09-29 21:49:54 +08:00
|
|
|
document_import_tasks: Vec<Task>,
|
2022-09-16 22:31:16 +08:00
|
|
|
|
2022-09-29 20:31:01 +08:00
|
|
|
// TODO what's that boolean, does it mean that it removes things or what?
|
2022-09-16 22:31:16 +08:00
|
|
|
settings: Vec<(bool, Settings<Unchecked>)>,
|
2022-09-13 17:46:07 +08:00
|
|
|
settings_tasks: Vec<Task>,
|
|
|
|
},
|
2022-09-09 07:09:50 +08:00
|
|
|
}
|
|
|
|
|
2022-09-16 07:58:08 +08:00
|
|
|
impl Batch {
|
|
|
|
pub fn ids(&self) -> Vec<TaskId> {
|
|
|
|
match self {
|
2022-09-17 03:24:49 +08:00
|
|
|
Batch::Cancel(task)
|
2022-10-13 17:09:00 +08:00
|
|
|
| Batch::TaskDeletion(task)
|
2022-10-13 21:02:59 +08:00
|
|
|
| Batch::Dump(task)
|
2022-09-17 03:24:49 +08:00
|
|
|
| Batch::IndexCreation { task, .. }
|
|
|
|
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
2022-10-13 21:02:59 +08:00
|
|
|
Batch::Snapshot(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
2022-09-30 00:15:50 +08:00
|
|
|
tasks.iter().map(|task| task.uid).collect()
|
2022-09-16 22:31:16 +08:00
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
Batch::IndexOperation(operation) => match operation {
|
|
|
|
IndexOperation::DocumentImport { tasks, .. }
|
|
|
|
| IndexOperation::DocumentDeletion { tasks, .. }
|
|
|
|
| IndexOperation::Settings { tasks, .. }
|
|
|
|
| IndexOperation::DocumentClear { tasks, .. } => {
|
|
|
|
tasks.iter().map(|task| task.uid).collect()
|
|
|
|
}
|
|
|
|
IndexOperation::SettingsAndDocumentImport {
|
|
|
|
document_import_tasks: tasks,
|
|
|
|
settings_tasks: other,
|
|
|
|
..
|
|
|
|
}
|
|
|
|
| IndexOperation::DocumentClearAndSetting {
|
|
|
|
cleared_tasks: tasks,
|
|
|
|
settings_tasks: other,
|
|
|
|
..
|
|
|
|
} => tasks.iter().chain(other).map(|task| task.uid).collect(),
|
|
|
|
},
|
2022-09-16 07:58:08 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-07 06:10:14 +08:00
|
|
|
impl IndexScheduler {
|
2022-09-13 17:46:07 +08:00
|
|
|
pub(crate) fn create_next_batch_index(
|
|
|
|
&self,
|
|
|
|
rtxn: &RoTxn,
|
|
|
|
index_uid: String,
|
|
|
|
batch: BatchKind,
|
|
|
|
) -> Result<Option<Batch>> {
|
|
|
|
match batch {
|
2022-09-30 00:15:50 +08:00
|
|
|
BatchKind::DocumentClear { ids } => {
|
|
|
|
Ok(Some(Batch::IndexOperation(IndexOperation::DocumentClear {
|
|
|
|
tasks: self.get_existing_tasks(rtxn, ids)?,
|
|
|
|
index_uid,
|
|
|
|
})))
|
|
|
|
}
|
2022-10-06 21:55:48 +08:00
|
|
|
BatchKind::DocumentImport {
|
|
|
|
method,
|
|
|
|
import_ids,
|
|
|
|
allow_index_creation,
|
|
|
|
} => {
|
2022-09-29 21:49:54 +08:00
|
|
|
let tasks = self.get_existing_tasks(rtxn, import_ids)?;
|
2022-09-16 22:31:16 +08:00
|
|
|
let primary_key = match &tasks[0].kind {
|
2022-09-29 21:49:54 +08:00
|
|
|
KindWithContent::DocumentImport { primary_key, .. } => primary_key.clone(),
|
2022-09-13 17:46:07 +08:00
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
2022-10-05 00:50:18 +08:00
|
|
|
|
|
|
|
let mut documents_counts = Vec::new();
|
|
|
|
let mut content_files = Vec::new();
|
|
|
|
for task in &tasks {
|
2022-10-05 19:46:45 +08:00
|
|
|
match task.kind {
|
|
|
|
KindWithContent::DocumentImport {
|
|
|
|
content_file,
|
|
|
|
documents_count,
|
|
|
|
..
|
|
|
|
} => {
|
|
|
|
documents_counts.push(documents_count);
|
|
|
|
content_files.push(content_file);
|
|
|
|
}
|
|
|
|
_ => unreachable!(),
|
2022-10-05 00:50:18 +08:00
|
|
|
}
|
|
|
|
}
|
2022-09-13 17:46:07 +08:00
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
Ok(Some(Batch::IndexOperation(
|
|
|
|
IndexOperation::DocumentImport {
|
|
|
|
index_uid,
|
|
|
|
primary_key,
|
|
|
|
method,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-10-05 00:50:18 +08:00
|
|
|
documents_counts,
|
2022-09-30 00:15:50 +08:00
|
|
|
content_files,
|
|
|
|
tasks,
|
|
|
|
},
|
|
|
|
)))
|
2022-09-16 22:31:16 +08:00
|
|
|
}
|
|
|
|
BatchKind::DocumentDeletion { deletion_ids } => {
|
|
|
|
let tasks = self.get_existing_tasks(rtxn, deletion_ids)?;
|
|
|
|
|
|
|
|
let mut documents = Vec::new();
|
|
|
|
for task in &tasks {
|
|
|
|
match task.kind {
|
|
|
|
KindWithContent::DocumentDeletion {
|
|
|
|
ref documents_ids, ..
|
|
|
|
} => documents.extend_from_slice(documents_ids),
|
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
Ok(Some(Batch::IndexOperation(
|
|
|
|
IndexOperation::DocumentDeletion {
|
|
|
|
index_uid,
|
|
|
|
documents,
|
|
|
|
tasks,
|
|
|
|
},
|
|
|
|
)))
|
2022-09-16 22:31:16 +08:00
|
|
|
}
|
2022-10-06 21:55:48 +08:00
|
|
|
BatchKind::Settings {
|
|
|
|
settings_ids,
|
|
|
|
allow_index_creation,
|
|
|
|
} => {
|
2022-09-16 22:31:16 +08:00
|
|
|
let tasks = self.get_existing_tasks(rtxn, settings_ids)?;
|
|
|
|
|
|
|
|
let mut settings = Vec::new();
|
|
|
|
for task in &tasks {
|
|
|
|
match task.kind {
|
|
|
|
KindWithContent::Settings {
|
|
|
|
ref new_settings,
|
|
|
|
is_deletion,
|
|
|
|
..
|
|
|
|
} => settings.push((is_deletion, new_settings.clone())),
|
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
Ok(Some(Batch::IndexOperation(IndexOperation::Settings {
|
2022-09-16 22:31:16 +08:00
|
|
|
index_uid,
|
|
|
|
settings,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-09-16 22:31:16 +08:00
|
|
|
tasks,
|
2022-09-30 00:15:50 +08:00
|
|
|
})))
|
2022-09-16 22:31:16 +08:00
|
|
|
}
|
|
|
|
BatchKind::ClearAndSettings {
|
|
|
|
other,
|
|
|
|
settings_ids,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-09-16 22:31:16 +08:00
|
|
|
} => {
|
|
|
|
let (index_uid, settings, settings_tasks) = match self
|
2022-10-06 21:55:48 +08:00
|
|
|
.create_next_batch_index(
|
|
|
|
rtxn,
|
|
|
|
index_uid,
|
|
|
|
BatchKind::Settings {
|
|
|
|
settings_ids,
|
|
|
|
allow_index_creation,
|
|
|
|
},
|
|
|
|
)?
|
2022-09-16 22:31:16 +08:00
|
|
|
.unwrap()
|
|
|
|
{
|
2022-09-30 00:15:50 +08:00
|
|
|
Batch::IndexOperation(IndexOperation::Settings {
|
2022-09-16 22:31:16 +08:00
|
|
|
index_uid,
|
|
|
|
settings,
|
|
|
|
tasks,
|
2022-10-06 21:51:26 +08:00
|
|
|
..
|
2022-09-30 00:15:50 +08:00
|
|
|
}) => (index_uid, settings, tasks),
|
2022-09-16 22:31:16 +08:00
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
let (index_uid, cleared_tasks) = match self
|
|
|
|
.create_next_batch_index(
|
|
|
|
rtxn,
|
|
|
|
index_uid,
|
|
|
|
BatchKind::DocumentClear { ids: other },
|
|
|
|
)?
|
|
|
|
.unwrap()
|
|
|
|
{
|
2022-09-30 00:15:50 +08:00
|
|
|
Batch::IndexOperation(IndexOperation::DocumentClear { index_uid, tasks }) => {
|
|
|
|
(index_uid, tasks)
|
|
|
|
}
|
2022-09-16 22:31:16 +08:00
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
Ok(Some(Batch::IndexOperation(
|
|
|
|
IndexOperation::DocumentClearAndSetting {
|
|
|
|
index_uid,
|
|
|
|
cleared_tasks,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-09-30 00:15:50 +08:00
|
|
|
settings,
|
|
|
|
settings_tasks,
|
|
|
|
},
|
|
|
|
)))
|
2022-09-16 22:31:16 +08:00
|
|
|
}
|
2022-09-29 21:49:54 +08:00
|
|
|
BatchKind::SettingsAndDocumentImport {
|
2022-09-16 22:31:16 +08:00
|
|
|
settings_ids,
|
2022-09-29 21:49:54 +08:00
|
|
|
method,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-09-29 21:49:54 +08:00
|
|
|
import_ids,
|
2022-09-16 22:31:16 +08:00
|
|
|
} => {
|
|
|
|
let settings = self.create_next_batch_index(
|
|
|
|
rtxn,
|
|
|
|
index_uid.clone(),
|
2022-10-06 21:55:48 +08:00
|
|
|
BatchKind::Settings {
|
|
|
|
settings_ids,
|
|
|
|
allow_index_creation,
|
|
|
|
},
|
2022-09-16 22:31:16 +08:00
|
|
|
)?;
|
|
|
|
|
2022-09-29 21:49:54 +08:00
|
|
|
let document_import = self.create_next_batch_index(
|
2022-09-16 22:31:16 +08:00
|
|
|
rtxn,
|
|
|
|
index_uid.clone(),
|
2022-10-06 21:55:48 +08:00
|
|
|
BatchKind::DocumentImport {
|
|
|
|
method,
|
|
|
|
allow_index_creation,
|
|
|
|
import_ids,
|
|
|
|
},
|
2022-09-16 22:31:16 +08:00
|
|
|
)?;
|
|
|
|
|
2022-09-29 21:49:54 +08:00
|
|
|
match (document_import, settings) {
|
2022-09-16 22:31:16 +08:00
|
|
|
(
|
2022-09-30 00:15:50 +08:00
|
|
|
Some(Batch::IndexOperation(IndexOperation::DocumentImport {
|
2022-09-16 22:31:16 +08:00
|
|
|
primary_key,
|
2022-10-05 00:50:18 +08:00
|
|
|
documents_counts,
|
2022-09-16 22:31:16 +08:00
|
|
|
content_files,
|
2022-09-29 21:49:54 +08:00
|
|
|
tasks: document_import_tasks,
|
2022-09-16 22:31:16 +08:00
|
|
|
..
|
2022-09-30 00:15:50 +08:00
|
|
|
})),
|
|
|
|
Some(Batch::IndexOperation(IndexOperation::Settings {
|
2022-09-16 22:31:16 +08:00
|
|
|
settings,
|
|
|
|
tasks: settings_tasks,
|
|
|
|
..
|
2022-09-30 00:15:50 +08:00
|
|
|
})),
|
|
|
|
) => Ok(Some(Batch::IndexOperation(
|
|
|
|
IndexOperation::SettingsAndDocumentImport {
|
|
|
|
index_uid,
|
|
|
|
primary_key,
|
|
|
|
method,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-10-05 00:50:18 +08:00
|
|
|
documents_counts,
|
2022-09-30 00:15:50 +08:00
|
|
|
content_files,
|
|
|
|
document_import_tasks,
|
|
|
|
settings,
|
|
|
|
settings_tasks,
|
|
|
|
},
|
|
|
|
))),
|
2022-09-16 22:31:16 +08:00
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
}
|
2022-09-17 03:24:49 +08:00
|
|
|
BatchKind::IndexCreation { id } => {
|
|
|
|
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
|
|
|
let (index_uid, primary_key) = match &task.kind {
|
|
|
|
KindWithContent::IndexCreation {
|
|
|
|
index_uid,
|
|
|
|
primary_key,
|
|
|
|
} => (index_uid.clone(), primary_key.clone()),
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
Ok(Some(Batch::IndexCreation {
|
|
|
|
index_uid,
|
|
|
|
primary_key,
|
|
|
|
task,
|
|
|
|
}))
|
|
|
|
}
|
|
|
|
BatchKind::IndexUpdate { id } => {
|
|
|
|
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
|
|
|
let primary_key = match &task.kind {
|
|
|
|
KindWithContent::IndexUpdate { primary_key, .. } => primary_key.clone(),
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
Ok(Some(Batch::IndexUpdate {
|
|
|
|
index_uid,
|
|
|
|
primary_key,
|
|
|
|
task,
|
|
|
|
}))
|
|
|
|
}
|
|
|
|
BatchKind::IndexDeletion { ids } => Ok(Some(Batch::IndexDeletion {
|
|
|
|
index_uid,
|
|
|
|
tasks: self.get_existing_tasks(rtxn, ids)?,
|
|
|
|
})),
|
2022-09-14 19:13:44 +08:00
|
|
|
BatchKind::IndexSwap { id: _ } => todo!(),
|
2022-09-13 17:46:07 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-07 06:10:14 +08:00
|
|
|
/// Create the next batch to be processed;
|
|
|
|
/// 1. We get the *last* task to cancel.
|
2022-10-06 22:53:21 +08:00
|
|
|
/// 2. We get the *next* task to delete.
|
|
|
|
/// 3. We get the *next* snapshot to process.
|
|
|
|
/// 4. We get the *next* dump to process.
|
|
|
|
/// 5. We get the *next* tasks to process for a specific index.
|
2022-09-09 07:40:28 +08:00
|
|
|
pub(crate) fn create_next_batch(&self, rtxn: &RoTxn) -> Result<Option<Batch>> {
|
2022-09-07 06:10:14 +08:00
|
|
|
let enqueued = &self.get_status(rtxn, Status::Enqueued)?;
|
|
|
|
let to_cancel = self.get_kind(rtxn, Kind::CancelTask)? & enqueued;
|
|
|
|
|
|
|
|
// 1. we get the last task to cancel.
|
|
|
|
if let Some(task_id) = to_cancel.max() {
|
2022-09-09 07:40:28 +08:00
|
|
|
return Ok(Some(Batch::Cancel(
|
2022-09-07 06:10:14 +08:00
|
|
|
self.get_task(rtxn, task_id)?
|
|
|
|
.ok_or(Error::CorruptedTaskQueue)?,
|
2022-09-09 07:40:28 +08:00
|
|
|
)));
|
2022-09-07 06:10:14 +08:00
|
|
|
}
|
|
|
|
|
2022-10-06 22:53:21 +08:00
|
|
|
// 2. we get the next task to delete
|
2022-10-13 18:48:23 +08:00
|
|
|
let to_delete = self.get_kind(rtxn, Kind::TaskDeletion)? & enqueued;
|
2022-10-06 22:53:21 +08:00
|
|
|
if let Some(task_id) = to_delete.min() {
|
|
|
|
let task = self
|
|
|
|
.get_task(rtxn, task_id)?
|
|
|
|
.ok_or(Error::CorruptedTaskQueue)?;
|
2022-10-10 18:57:17 +08:00
|
|
|
|
2022-10-13 17:09:00 +08:00
|
|
|
return Ok(Some(Batch::TaskDeletion(task)));
|
2022-10-06 22:53:21 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// 3. we batch the snapshot.
|
2022-09-07 06:10:14 +08:00
|
|
|
let to_snapshot = self.get_kind(rtxn, Kind::Snapshot)? & enqueued;
|
|
|
|
if !to_snapshot.is_empty() {
|
2022-09-09 07:40:28 +08:00
|
|
|
return Ok(Some(Batch::Snapshot(
|
|
|
|
self.get_existing_tasks(rtxn, to_snapshot)?,
|
|
|
|
)));
|
2022-09-07 06:10:14 +08:00
|
|
|
}
|
|
|
|
|
2022-10-06 22:53:21 +08:00
|
|
|
// 4. we batch the dumps.
|
2022-09-07 06:10:14 +08:00
|
|
|
let to_dump = self.get_kind(rtxn, Kind::DumpExport)? & enqueued;
|
2022-10-13 21:02:59 +08:00
|
|
|
if let Some(to_dump) = to_dump.min() {
|
|
|
|
return Ok(Some(Batch::Dump(
|
|
|
|
self.get_task(rtxn, to_dump)?
|
|
|
|
.ok_or(Error::CorruptedTaskQueue)?,
|
|
|
|
)));
|
2022-09-07 06:10:14 +08:00
|
|
|
}
|
|
|
|
|
2022-10-06 22:53:21 +08:00
|
|
|
// 5. We take the next task and try to batch all the tasks associated with this index.
|
2022-09-07 06:10:14 +08:00
|
|
|
if let Some(task_id) = enqueued.min() {
|
|
|
|
let task = self
|
|
|
|
.get_task(rtxn, task_id)?
|
|
|
|
.ok_or(Error::CorruptedTaskQueue)?;
|
2022-09-09 07:40:28 +08:00
|
|
|
|
|
|
|
// This is safe because all the remaining task are associated with
|
|
|
|
// AT LEAST one index. We can use the right or left one it doesn't
|
|
|
|
// matter.
|
|
|
|
let index_name = task.indexes().unwrap()[0];
|
|
|
|
|
2022-10-13 17:07:36 +08:00
|
|
|
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
2022-09-09 07:40:28 +08:00
|
|
|
|
2022-10-13 17:07:36 +08:00
|
|
|
// If autobatching is disabled we only take one task at a time.
|
2022-10-10 23:01:49 +08:00
|
|
|
let tasks_limit = if self.autobatching_enabled {
|
|
|
|
usize::MAX
|
|
|
|
} else {
|
|
|
|
1
|
|
|
|
};
|
|
|
|
|
2022-10-13 17:07:36 +08:00
|
|
|
let enqueued = index_tasks
|
2022-09-09 07:40:28 +08:00
|
|
|
.into_iter()
|
2022-10-10 23:01:49 +08:00
|
|
|
.take(tasks_limit)
|
2022-09-09 07:40:28 +08:00
|
|
|
.map(|task_id| {
|
|
|
|
self.get_task(rtxn, task_id)
|
|
|
|
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))
|
2022-10-12 09:21:25 +08:00
|
|
|
.map(|task| (task.uid, task.kind))
|
2022-09-09 07:40:28 +08:00
|
|
|
})
|
|
|
|
.collect::<Result<Vec<_>>>()?;
|
|
|
|
|
2022-09-13 17:46:07 +08:00
|
|
|
if let Some(batchkind) = crate::autobatcher::autobatch(enqueued) {
|
|
|
|
return self.create_next_batch_index(rtxn, index_name.to_string(), batchkind);
|
|
|
|
}
|
2022-09-07 06:10:14 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// If we found no tasks then we were notified for something that got autobatched
|
|
|
|
// somehow and there is nothing to do.
|
2022-09-09 07:40:28 +08:00
|
|
|
Ok(None)
|
2022-09-07 06:10:14 +08:00
|
|
|
}
|
|
|
|
|
2022-09-27 04:26:30 +08:00
|
|
|
pub(crate) fn process_batch(&self, batch: Batch) -> Result<Vec<Task>> {
|
2022-09-09 18:16:19 +08:00
|
|
|
match batch {
|
2022-09-13 17:46:07 +08:00
|
|
|
Batch::Cancel(_) => todo!(),
|
2022-10-13 17:09:00 +08:00
|
|
|
Batch::TaskDeletion(mut task) => {
|
2022-10-11 15:53:08 +08:00
|
|
|
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
2022-10-06 22:53:21 +08:00
|
|
|
let matched_tasks =
|
2022-10-13 17:09:00 +08:00
|
|
|
if let KindWithContent::TaskDeletion { tasks, query: _ } = &task.kind {
|
2022-10-06 22:53:21 +08:00
|
|
|
tasks
|
|
|
|
} else {
|
|
|
|
unreachable!()
|
|
|
|
};
|
2022-10-10 18:57:17 +08:00
|
|
|
|
2022-10-06 22:53:21 +08:00
|
|
|
let mut wtxn = self.env.write_txn()?;
|
|
|
|
let nbr_deleted_tasks = self.delete_matched_tasks(&mut wtxn, matched_tasks)?;
|
2022-10-10 18:57:17 +08:00
|
|
|
|
2022-10-06 22:53:21 +08:00
|
|
|
task.status = Status::Succeeded;
|
|
|
|
match &mut task.details {
|
2022-10-13 17:09:00 +08:00
|
|
|
Some(Details::TaskDeletion {
|
2022-10-06 22:53:21 +08:00
|
|
|
matched_tasks: _,
|
|
|
|
deleted_tasks,
|
|
|
|
original_query: _,
|
|
|
|
}) => {
|
|
|
|
*deleted_tasks = Some(nbr_deleted_tasks);
|
|
|
|
}
|
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
|
|
|
|
wtxn.commit()?;
|
|
|
|
Ok(vec![task])
|
|
|
|
}
|
2022-09-13 17:46:07 +08:00
|
|
|
Batch::Snapshot(_) => todo!(),
|
2022-10-13 21:02:59 +08:00
|
|
|
Batch::Dump(mut task) => {
|
|
|
|
let KindWithContent::DumpExport { keys, instance_uid, dump_uid } = &task.kind else {
|
|
|
|
unreachable!();
|
|
|
|
};
|
|
|
|
let dump = dump::DumpWriter::new(instance_uid.clone())?;
|
|
|
|
let mut d_keys = dump.create_keys()?;
|
|
|
|
|
|
|
|
// 1. dump the keys
|
|
|
|
for key in keys {
|
|
|
|
d_keys.push_key(key)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
let rtxn = self.env.read_txn()?;
|
|
|
|
|
|
|
|
// 2. dump the tasks
|
|
|
|
let mut tasks = dump.create_tasks_queue()?;
|
|
|
|
for ret in self.all_tasks.iter(&rtxn)? {
|
|
|
|
let (_, task) = ret?;
|
2022-10-13 22:21:54 +08:00
|
|
|
let content_file = task.content_uuid().map(|uuid| uuid.clone());
|
|
|
|
let mut dump_content_file = tasks.push_task(&task.into())?;
|
2022-10-13 21:02:59 +08:00
|
|
|
|
|
|
|
// 2.1. Dump the `content_file` associated with the task if there is one.
|
2022-10-13 22:21:54 +08:00
|
|
|
if let Some(content_file) = content_file {
|
|
|
|
let content_file = self.file_store.get_update(content_file)?;
|
2022-10-13 21:02:59 +08:00
|
|
|
|
|
|
|
let reader = DocumentsBatchReader::from_reader(content_file)
|
|
|
|
.map_err(milli::Error::from)?;
|
|
|
|
|
|
|
|
let (mut cursor, documents_batch_index) =
|
|
|
|
reader.into_cursor_and_fields_index();
|
|
|
|
|
|
|
|
while let Some(doc) = cursor.next_document().map_err(milli::Error::from)? {
|
|
|
|
dump_content_file
|
|
|
|
.push_document(&obkv_to_object(&doc, &documents_batch_index)?)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: maybe `self.indexes` could use this rtxn instead of creating its own
|
|
|
|
drop(rtxn);
|
|
|
|
|
|
|
|
// 3. Dump the indexes
|
|
|
|
for (uid, index) in self.indexes()? {
|
|
|
|
let rtxn = index.read_txn()?;
|
|
|
|
let metadata = IndexMetadata {
|
|
|
|
uid: uid.clone(),
|
|
|
|
primary_key: index.primary_key(&rtxn)?.map(String::from),
|
|
|
|
created_at: index.created_at(&rtxn)?,
|
|
|
|
updated_at: index.updated_at(&rtxn)?,
|
|
|
|
};
|
|
|
|
let mut index_dumper = dump.create_index(&uid, &metadata)?;
|
|
|
|
|
|
|
|
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
|
|
|
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
|
|
|
|
|
|
|
// 3.1. Dump the documents
|
|
|
|
for ret in index.all_documents(&rtxn)? {
|
|
|
|
let (_id, doc) = ret?;
|
|
|
|
let document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?;
|
|
|
|
index_dumper.push_document(&document)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
// 3.2. Dump the settings
|
|
|
|
let settings = meilisearch_types::settings::settings(&index, &rtxn)?;
|
|
|
|
index_dumper.settings(&settings)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
let path = self.dumps_path.join(format!("{}.dump", dump_uid));
|
|
|
|
let file = File::create(path).unwrap();
|
|
|
|
dump.persist_to(BufWriter::new(file)).unwrap();
|
|
|
|
|
|
|
|
task.status = Status::Succeeded;
|
|
|
|
|
|
|
|
Ok(vec![task])
|
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
Batch::IndexOperation(operation) => {
|
2022-10-06 21:51:26 +08:00
|
|
|
#[rustfmt::skip]
|
2022-09-30 00:15:50 +08:00
|
|
|
let index = match operation {
|
|
|
|
IndexOperation::DocumentDeletion { ref index_uid, .. }
|
|
|
|
| IndexOperation::DocumentClear { ref index_uid, .. } => {
|
|
|
|
// only get the index, don't create it
|
|
|
|
let rtxn = self.env.read_txn()?;
|
2022-10-03 21:29:37 +08:00
|
|
|
self.index_mapper.index(&rtxn, index_uid)?
|
2022-09-29 20:31:01 +08:00
|
|
|
}
|
2022-10-06 21:51:26 +08:00
|
|
|
IndexOperation::DocumentImport { ref index_uid, allow_index_creation, .. }
|
|
|
|
| IndexOperation::Settings { ref index_uid, allow_index_creation, .. }
|
|
|
|
| IndexOperation::DocumentClearAndSetting { ref index_uid, allow_index_creation, .. }
|
|
|
|
| IndexOperation::SettingsAndDocumentImport {ref index_uid, allow_index_creation, .. } => {
|
|
|
|
if allow_index_creation {
|
|
|
|
// create the index if it doesn't already exist
|
|
|
|
let mut wtxn = self.env.write_txn()?;
|
|
|
|
let index = self.index_mapper.create_index(&mut wtxn, index_uid)?;
|
|
|
|
wtxn.commit()?;
|
|
|
|
index
|
|
|
|
} else {
|
|
|
|
let rtxn = self.env.read_txn()?;
|
|
|
|
self.index_mapper.index(&rtxn, index_uid)?
|
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut index_wtxn = index.write_txn()?;
|
|
|
|
let tasks = self.apply_index_operation(&mut index_wtxn, &index, operation)?;
|
|
|
|
index_wtxn.commit()?;
|
2022-09-29 20:31:01 +08:00
|
|
|
|
|
|
|
Ok(tasks)
|
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
Batch::IndexCreation {
|
2022-09-19 04:04:36 +08:00
|
|
|
index_uid,
|
|
|
|
primary_key,
|
2022-09-30 00:15:50 +08:00
|
|
|
task,
|
2022-10-04 17:48:08 +08:00
|
|
|
} => {
|
|
|
|
let mut wtxn = self.env.write_txn()?;
|
2022-10-05 00:50:18 +08:00
|
|
|
self.index_mapper.create_index(&mut wtxn, &index_uid)?;
|
2022-10-04 17:51:39 +08:00
|
|
|
wtxn.commit()?;
|
|
|
|
|
|
|
|
self.process_batch(Batch::IndexUpdate {
|
|
|
|
index_uid,
|
|
|
|
primary_key,
|
|
|
|
task,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
Batch::IndexUpdate {
|
|
|
|
index_uid,
|
|
|
|
primary_key,
|
2022-10-05 00:50:18 +08:00
|
|
|
mut task,
|
2022-10-04 17:51:39 +08:00
|
|
|
} => {
|
|
|
|
let rtxn = self.env.read_txn()?;
|
|
|
|
let index = self.index_mapper.index(&rtxn, &index_uid)?;
|
2022-10-04 17:48:08 +08:00
|
|
|
|
2022-10-05 00:50:18 +08:00
|
|
|
if let Some(primary_key) = primary_key.clone() {
|
2022-10-04 17:48:08 +08:00
|
|
|
let mut index_wtxn = index.write_txn()?;
|
2022-10-11 23:42:43 +08:00
|
|
|
let mut builder = MilliSettings::new(
|
2022-10-04 17:48:08 +08:00
|
|
|
&mut index_wtxn,
|
|
|
|
&index,
|
|
|
|
self.index_mapper.indexer_config(),
|
|
|
|
);
|
|
|
|
builder.set_primary_key(primary_key);
|
|
|
|
builder.execute(|_| ())?;
|
|
|
|
index_wtxn.commit()?;
|
|
|
|
}
|
|
|
|
|
2022-10-05 00:50:18 +08:00
|
|
|
task.status = Status::Succeeded;
|
|
|
|
task.details = Some(Details::IndexInfo { primary_key });
|
|
|
|
|
2022-10-04 17:48:08 +08:00
|
|
|
Ok(vec![task])
|
|
|
|
}
|
2022-10-05 00:50:18 +08:00
|
|
|
Batch::IndexDeletion {
|
|
|
|
index_uid,
|
|
|
|
mut tasks,
|
|
|
|
} => {
|
2022-10-05 00:19:18 +08:00
|
|
|
let wtxn = self.env.write_txn()?;
|
2022-10-05 20:05:20 +08:00
|
|
|
|
|
|
|
let number_of_documents = {
|
|
|
|
let index = self.index_mapper.index(&wtxn, &index_uid)?;
|
|
|
|
let index_rtxn = index.read_txn()?;
|
|
|
|
index.number_of_documents(&index_rtxn)?
|
|
|
|
};
|
|
|
|
|
2022-10-05 00:50:18 +08:00
|
|
|
// The write transaction is directly owned and commited inside.
|
|
|
|
self.index_mapper.delete_index(wtxn, &index_uid)?;
|
|
|
|
|
|
|
|
// We set all the tasks details to the default value.
|
|
|
|
for task in &mut tasks {
|
|
|
|
task.status = Status::Succeeded;
|
2022-10-05 20:05:20 +08:00
|
|
|
task.details = match &task.kind {
|
|
|
|
KindWithContent::IndexDeletion { .. } => Some(Details::ClearAll {
|
|
|
|
deleted_documents: Some(number_of_documents),
|
|
|
|
}),
|
|
|
|
otherwise => otherwise.default_details(),
|
|
|
|
};
|
2022-10-05 00:50:18 +08:00
|
|
|
}
|
2022-10-05 00:19:18 +08:00
|
|
|
|
2022-10-05 00:50:18 +08:00
|
|
|
Ok(tasks)
|
2022-10-05 00:19:18 +08:00
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn apply_index_operation<'txn, 'i>(
|
|
|
|
&self,
|
|
|
|
index_wtxn: &'txn mut RwTxn<'i, '_>,
|
2022-10-11 23:42:43 +08:00
|
|
|
index: &'i Index,
|
2022-09-30 00:15:50 +08:00
|
|
|
operation: IndexOperation,
|
|
|
|
) -> Result<Vec<Task>> {
|
|
|
|
match operation {
|
2022-10-05 00:50:18 +08:00
|
|
|
IndexOperation::DocumentClear { mut tasks, .. } => {
|
2022-10-05 22:48:43 +08:00
|
|
|
let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?;
|
|
|
|
|
2022-10-05 22:54:06 +08:00
|
|
|
let mut first_clear_found = false;
|
2022-09-30 00:15:50 +08:00
|
|
|
for task in &mut tasks {
|
2022-10-05 22:48:43 +08:00
|
|
|
task.status = Status::Succeeded;
|
2022-10-05 22:54:06 +08:00
|
|
|
// The first document clear will effectively delete every documents
|
|
|
|
// in the database but the next ones will clear 0 documents.
|
2022-10-05 22:48:43 +08:00
|
|
|
task.details = match &task.kind {
|
2022-10-05 22:54:06 +08:00
|
|
|
KindWithContent::DocumentClear { .. } => {
|
|
|
|
let count = if first_clear_found { 0 } else { count };
|
|
|
|
first_clear_found = true;
|
|
|
|
Some(Details::ClearAll {
|
|
|
|
deleted_documents: Some(count),
|
|
|
|
})
|
|
|
|
}
|
2022-10-05 22:48:43 +08:00
|
|
|
otherwise => otherwise.default_details(),
|
|
|
|
};
|
2022-09-19 04:04:36 +08:00
|
|
|
}
|
|
|
|
|
2022-09-21 18:01:46 +08:00
|
|
|
Ok(tasks)
|
2022-09-19 04:04:36 +08:00
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
IndexOperation::DocumentImport {
|
2022-10-05 00:50:18 +08:00
|
|
|
index_uid: _,
|
2022-09-13 17:46:07 +08:00
|
|
|
primary_key,
|
2022-09-29 21:49:54 +08:00
|
|
|
method,
|
2022-10-06 21:51:26 +08:00
|
|
|
allow_index_creation: _,
|
2022-10-05 00:50:18 +08:00
|
|
|
documents_counts,
|
2022-09-13 17:46:07 +08:00
|
|
|
content_files,
|
2022-09-30 00:15:50 +08:00
|
|
|
mut tasks,
|
2022-09-13 17:46:07 +08:00
|
|
|
} => {
|
2022-09-30 00:15:50 +08:00
|
|
|
let indexer_config = self.index_mapper.indexer_config();
|
2022-10-04 17:48:08 +08:00
|
|
|
// TODO use the code from the IndexCreate operation
|
2022-09-30 00:15:50 +08:00
|
|
|
if let Some(primary_key) = primary_key {
|
|
|
|
if index.primary_key(index_wtxn)?.is_none() {
|
|
|
|
let mut builder =
|
|
|
|
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
|
|
|
builder.set_primary_key(primary_key);
|
|
|
|
builder.execute(|_| ())?;
|
2022-09-29 22:04:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
let config = IndexDocumentsConfig {
|
|
|
|
update_method: method,
|
|
|
|
..Default::default()
|
|
|
|
};
|
2022-09-29 22:04:23 +08:00
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
let mut builder = milli::update::IndexDocuments::new(
|
|
|
|
index_wtxn,
|
|
|
|
index,
|
|
|
|
indexer_config,
|
|
|
|
config,
|
|
|
|
|indexing_step| debug!("update: {:?}", indexing_step),
|
2022-09-29 22:04:23 +08:00
|
|
|
)?;
|
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
let mut results = Vec::new();
|
|
|
|
for content_uuid in content_files.into_iter() {
|
|
|
|
let content_file = self.file_store.get_update(content_uuid)?;
|
|
|
|
let reader = DocumentsBatchReader::from_reader(content_file)
|
2022-10-11 23:42:43 +08:00
|
|
|
.map_err(milli::Error::from)?;
|
2022-09-30 00:15:50 +08:00
|
|
|
let (new_builder, user_result) = builder.add_documents(reader)?;
|
|
|
|
builder = new_builder;
|
|
|
|
|
|
|
|
let user_result = match user_result {
|
2022-10-05 00:50:18 +08:00
|
|
|
Ok(count) => Ok(DocumentAdditionResult {
|
|
|
|
indexed_documents: count,
|
2022-10-16 07:39:01 +08:00
|
|
|
number_of_documents: count, // TODO: this is wrong, we should use the value stored in the Details.
|
2022-10-05 00:50:18 +08:00
|
|
|
}),
|
2022-10-11 23:42:43 +08:00
|
|
|
Err(e) => Err(milli::Error::from(e)),
|
2022-09-30 00:15:50 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
results.push(user_result);
|
|
|
|
}
|
|
|
|
|
|
|
|
if results.iter().any(|res| res.is_ok()) {
|
|
|
|
let addition = builder.execute()?;
|
|
|
|
info!("document addition done: {:?}", addition);
|
|
|
|
}
|
|
|
|
|
2022-10-05 00:50:18 +08:00
|
|
|
for (task, (ret, count)) in tasks
|
|
|
|
.iter_mut()
|
|
|
|
.zip(results.into_iter().zip(documents_counts))
|
|
|
|
{
|
2022-09-29 22:04:23 +08:00
|
|
|
match ret {
|
|
|
|
Ok(DocumentAdditionResult {
|
|
|
|
indexed_documents,
|
|
|
|
number_of_documents,
|
|
|
|
}) => {
|
2022-10-05 00:50:18 +08:00
|
|
|
task.status = Status::Succeeded;
|
2022-09-29 22:04:23 +08:00
|
|
|
task.details = Some(Details::DocumentAddition {
|
|
|
|
received_documents: number_of_documents,
|
2022-10-13 21:02:59 +08:00
|
|
|
indexed_documents: Some(indexed_documents),
|
2022-10-05 00:50:18 +08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
Err(error) => {
|
|
|
|
task.status = Status::Failed;
|
|
|
|
task.details = Some(Details::DocumentAddition {
|
|
|
|
received_documents: count,
|
2022-10-13 21:02:59 +08:00
|
|
|
indexed_documents: Some(count),
|
2022-10-05 00:50:18 +08:00
|
|
|
});
|
|
|
|
task.error = Some(error.into())
|
2022-09-29 22:04:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(tasks)
|
2022-09-09 18:16:19 +08:00
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
IndexOperation::DocumentDeletion {
|
2022-10-05 00:50:18 +08:00
|
|
|
index_uid: _,
|
2022-09-16 22:31:16 +08:00
|
|
|
documents,
|
2022-09-29 18:04:58 +08:00
|
|
|
mut tasks,
|
2022-09-29 17:49:47 +08:00
|
|
|
} => {
|
2022-09-30 00:15:50 +08:00
|
|
|
let mut builder = milli::update::DeleteDocuments::new(index_wtxn, index)?;
|
|
|
|
documents.iter().for_each(|id| {
|
|
|
|
builder.delete_external_id(id);
|
|
|
|
});
|
|
|
|
|
2022-10-05 22:48:43 +08:00
|
|
|
let DocumentDeletionResult {
|
|
|
|
deleted_documents, ..
|
|
|
|
} = builder.execute()?;
|
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
for (task, documents) in tasks.iter_mut().zip(documents) {
|
2022-10-05 22:48:43 +08:00
|
|
|
task.status = Status::Succeeded;
|
|
|
|
task.details = Some(Details::DocumentDeletion {
|
|
|
|
received_document_ids: documents.len(),
|
|
|
|
deleted_documents: Some(deleted_documents),
|
|
|
|
});
|
2022-09-29 17:49:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(tasks)
|
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
IndexOperation::Settings {
|
2022-10-05 00:50:18 +08:00
|
|
|
index_uid: _,
|
2022-09-16 22:31:16 +08:00
|
|
|
settings,
|
2022-10-06 21:51:26 +08:00
|
|
|
allow_index_creation: _,
|
2022-09-29 19:57:28 +08:00
|
|
|
mut tasks,
|
|
|
|
} => {
|
2022-09-30 00:15:50 +08:00
|
|
|
let indexer_config = self.index_mapper.indexer_config();
|
|
|
|
// TODO merge the settings to only do *one* reindexation.
|
2022-09-29 19:57:28 +08:00
|
|
|
for (task, (_, settings)) in tasks.iter_mut().zip(settings) {
|
|
|
|
let checked_settings = settings.clone().check();
|
|
|
|
task.details = Some(Details::Settings { settings });
|
2022-09-30 00:15:50 +08:00
|
|
|
|
|
|
|
let mut builder =
|
|
|
|
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
|
|
|
apply_settings_to_builder(&checked_settings, &mut builder);
|
2022-10-05 22:48:43 +08:00
|
|
|
builder.execute(|indexing_step| {
|
2022-09-30 00:15:50 +08:00
|
|
|
debug!("update: {:?}", indexing_step);
|
2022-10-05 22:48:43 +08:00
|
|
|
})?;
|
2022-09-30 00:15:50 +08:00
|
|
|
|
2022-10-05 22:48:43 +08:00
|
|
|
task.status = Status::Succeeded;
|
2022-09-29 19:57:28 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(tasks)
|
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
IndexOperation::SettingsAndDocumentImport {
|
2022-09-16 22:31:16 +08:00
|
|
|
index_uid,
|
2022-09-30 00:15:50 +08:00
|
|
|
primary_key,
|
|
|
|
method,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-10-05 00:50:18 +08:00
|
|
|
documents_counts,
|
2022-09-30 00:15:50 +08:00
|
|
|
content_files,
|
|
|
|
document_import_tasks,
|
2022-09-16 22:31:16 +08:00
|
|
|
settings,
|
2022-09-30 00:15:50 +08:00
|
|
|
settings_tasks,
|
2022-09-29 20:19:13 +08:00
|
|
|
} => {
|
2022-09-30 00:15:50 +08:00
|
|
|
let settings_tasks = self.apply_index_operation(
|
|
|
|
index_wtxn,
|
|
|
|
index,
|
|
|
|
IndexOperation::Settings {
|
|
|
|
index_uid: index_uid.clone(),
|
|
|
|
settings,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-09-30 00:15:50 +08:00
|
|
|
tasks: settings_tasks,
|
|
|
|
},
|
|
|
|
)?;
|
2022-09-29 20:19:13 +08:00
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
let mut import_tasks = self.apply_index_operation(
|
|
|
|
index_wtxn,
|
|
|
|
index,
|
|
|
|
IndexOperation::DocumentImport {
|
|
|
|
index_uid,
|
|
|
|
primary_key,
|
|
|
|
method,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-10-05 00:50:18 +08:00
|
|
|
documents_counts,
|
2022-09-30 00:15:50 +08:00
|
|
|
content_files,
|
|
|
|
tasks: document_import_tasks,
|
|
|
|
},
|
|
|
|
)?;
|
2022-09-29 20:19:13 +08:00
|
|
|
|
2022-09-30 00:15:50 +08:00
|
|
|
let mut tasks = settings_tasks;
|
|
|
|
tasks.append(&mut import_tasks);
|
2022-09-29 20:19:13 +08:00
|
|
|
Ok(tasks)
|
|
|
|
}
|
2022-09-30 00:15:50 +08:00
|
|
|
IndexOperation::DocumentClearAndSetting {
|
2022-09-17 03:24:49 +08:00
|
|
|
index_uid,
|
2022-09-30 00:15:50 +08:00
|
|
|
cleared_tasks,
|
|
|
|
settings,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-09-30 00:15:50 +08:00
|
|
|
settings_tasks,
|
|
|
|
} => {
|
|
|
|
let mut import_tasks = self.apply_index_operation(
|
|
|
|
index_wtxn,
|
|
|
|
index,
|
|
|
|
IndexOperation::DocumentClear {
|
|
|
|
index_uid: index_uid.clone(),
|
|
|
|
tasks: cleared_tasks,
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
|
|
|
|
let settings_tasks = self.apply_index_operation(
|
|
|
|
index_wtxn,
|
|
|
|
index,
|
|
|
|
IndexOperation::Settings {
|
|
|
|
index_uid,
|
|
|
|
settings,
|
2022-10-06 21:55:48 +08:00
|
|
|
allow_index_creation,
|
2022-09-30 00:15:50 +08:00
|
|
|
tasks: settings_tasks,
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
|
|
|
|
let mut tasks = settings_tasks;
|
|
|
|
tasks.append(&mut import_tasks);
|
|
|
|
Ok(tasks)
|
|
|
|
}
|
2022-09-07 06:10:14 +08:00
|
|
|
}
|
|
|
|
}
|
2022-10-06 22:53:21 +08:00
|
|
|
|
|
|
|
/// Delete each given task from all the databases (if it is deleteable).
|
|
|
|
///
|
2022-10-17 18:58:20 +08:00
|
|
|
/// Return the number of tasks that were actually deleted.
|
2022-10-13 17:09:00 +08:00
|
|
|
fn delete_matched_tasks(
|
|
|
|
&self,
|
|
|
|
wtxn: &mut RwTxn,
|
|
|
|
matched_tasks: &RoaringBitmap,
|
|
|
|
) -> Result<usize> {
|
2022-10-06 22:53:21 +08:00
|
|
|
// 1. Remove from this list the tasks that we are not allowed to delete
|
|
|
|
let enqueued_tasks = self.get_status(wtxn, Status::Enqueued)?;
|
|
|
|
|
|
|
|
let processing_tasks = &self.processing_tasks.read().unwrap().1;
|
|
|
|
|
2022-10-17 18:58:20 +08:00
|
|
|
let all_task_ids = self.all_task_ids(&wtxn)?;
|
|
|
|
let mut to_delete_tasks = all_task_ids & matched_tasks;
|
|
|
|
to_delete_tasks -= processing_tasks;
|
2022-10-13 17:09:00 +08:00
|
|
|
to_delete_tasks -= enqueued_tasks;
|
2022-10-06 22:53:21 +08:00
|
|
|
|
2022-10-13 17:09:00 +08:00
|
|
|
// 2. We now have a list of tasks to delete, delete them
|
2022-10-06 22:53:21 +08:00
|
|
|
|
2022-10-13 17:09:00 +08:00
|
|
|
let mut affected_indexes = HashSet::new();
|
|
|
|
let mut affected_statuses = HashSet::new();
|
|
|
|
let mut affected_kinds = HashSet::new();
|
2022-10-06 22:53:21 +08:00
|
|
|
|
2022-10-13 17:09:00 +08:00
|
|
|
for task_id in to_delete_tasks.iter() {
|
2022-10-17 18:58:20 +08:00
|
|
|
// This should never fail, but there is no harm done if it does. The function
|
|
|
|
// will still be 99% correct (the number of deleted tasks will be slightly incorrect).
|
2022-10-15 17:03:24 +08:00
|
|
|
if let Some(task) = self.get_task(wtxn, task_id)? {
|
2022-10-13 17:09:00 +08:00
|
|
|
if let Some(task_indexes) = task.indexes() {
|
|
|
|
affected_indexes.extend(task_indexes.into_iter().map(|x| x.to_owned()));
|
|
|
|
}
|
|
|
|
affected_statuses.insert(task.status);
|
|
|
|
affected_kinds.insert(task.kind.as_kind());
|
2022-10-06 22:53:21 +08:00
|
|
|
}
|
|
|
|
}
|
2022-10-13 17:09:00 +08:00
|
|
|
for index in affected_indexes {
|
|
|
|
self.update_index(wtxn, &index, |bitmap| {
|
|
|
|
*bitmap -= &to_delete_tasks;
|
|
|
|
})?;
|
|
|
|
}
|
|
|
|
for status in affected_statuses {
|
|
|
|
self.update_status(wtxn, status, |bitmap| {
|
|
|
|
*bitmap -= &to_delete_tasks;
|
|
|
|
})?;
|
|
|
|
}
|
|
|
|
for kind in affected_kinds {
|
|
|
|
self.update_kind(wtxn, kind, |bitmap| {
|
|
|
|
*bitmap -= &to_delete_tasks;
|
|
|
|
})?;
|
|
|
|
}
|
|
|
|
for task in to_delete_tasks.iter() {
|
|
|
|
self.all_tasks.delete(wtxn, &BEU32::new(task))?;
|
|
|
|
}
|
|
|
|
Ok(to_delete_tasks.len() as usize)
|
2022-10-06 22:53:21 +08:00
|
|
|
}
|
2022-09-07 06:10:14 +08:00
|
|
|
}
|