2022-02-14 22:32:41 +08:00
|
|
|
use std::fmt::Write;
|
2022-05-17 22:08:23 +08:00
|
|
|
use std::str::FromStr;
|
2022-02-14 22:32:41 +08:00
|
|
|
use std::write;
|
|
|
|
|
2021-12-02 23:03:26 +08:00
|
|
|
use meilisearch_error::ResponseError;
|
|
|
|
use meilisearch_lib::index::{Settings, Unchecked};
|
|
|
|
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
2022-01-19 18:21:19 +08:00
|
|
|
use meilisearch_lib::tasks::batch::BatchId;
|
2021-12-02 23:03:26 +08:00
|
|
|
use meilisearch_lib::tasks::task::{
|
|
|
|
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
|
|
|
|
};
|
2022-05-17 22:08:23 +08:00
|
|
|
use serde::{Deserialize, Serialize, Serializer};
|
2022-02-14 22:32:41 +08:00
|
|
|
use time::{Duration, OffsetDateTime};
|
2021-12-02 23:03:26 +08:00
|
|
|
|
2022-01-19 18:21:19 +08:00
|
|
|
use crate::AUTOBATCHING_ENABLED;
|
|
|
|
|
2022-05-17 22:08:23 +08:00
|
|
|
#[derive(Debug, Serialize, Deserialize)]
|
2021-12-02 23:03:26 +08:00
|
|
|
#[serde(rename_all = "camelCase")]
|
2022-05-17 22:08:23 +08:00
|
|
|
pub enum TaskType {
|
2021-12-02 23:03:26 +08:00
|
|
|
IndexCreation,
|
|
|
|
IndexUpdate,
|
|
|
|
IndexDeletion,
|
2021-12-15 16:49:39 +08:00
|
|
|
DocumentAddition,
|
|
|
|
DocumentPartial,
|
|
|
|
DocumentDeletion,
|
2021-12-02 23:03:26 +08:00
|
|
|
SettingsUpdate,
|
|
|
|
ClearAll,
|
2022-05-23 16:54:49 +08:00
|
|
|
DumpCreation,
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
impl From<TaskContent> for TaskType {
|
|
|
|
fn from(other: TaskContent) -> Self {
|
|
|
|
match other {
|
|
|
|
TaskContent::DocumentAddition {
|
|
|
|
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
|
|
|
|
..
|
2021-12-15 16:49:39 +08:00
|
|
|
} => TaskType::DocumentAddition,
|
2021-12-02 23:03:26 +08:00
|
|
|
TaskContent::DocumentAddition {
|
|
|
|
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
|
|
|
|
..
|
2021-12-15 16:49:39 +08:00
|
|
|
} => TaskType::DocumentPartial,
|
2021-12-02 23:03:26 +08:00
|
|
|
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
|
2021-12-15 16:49:39 +08:00
|
|
|
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion,
|
2021-12-02 23:03:26 +08:00
|
|
|
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
|
|
|
TaskContent::IndexDeletion => TaskType::IndexDeletion,
|
|
|
|
TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
|
|
|
|
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
|
2022-05-23 16:54:49 +08:00
|
|
|
TaskContent::Dump { .. } => TaskType::DumpCreation,
|
2021-12-02 23:03:26 +08:00
|
|
|
_ => unreachable!("unexpected task type"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-17 22:08:23 +08:00
|
|
|
impl FromStr for TaskType {
|
2022-05-18 18:07:06 +08:00
|
|
|
type Err = String;
|
2022-05-17 22:08:23 +08:00
|
|
|
|
2022-05-18 18:07:06 +08:00
|
|
|
fn from_str(status: &str) -> Result<Self, String> {
|
2022-05-17 22:08:23 +08:00
|
|
|
match status {
|
|
|
|
"indexCreation" => Ok(TaskType::IndexCreation),
|
|
|
|
"indexUpdate" => Ok(TaskType::IndexUpdate),
|
|
|
|
"indexDeletion" => Ok(TaskType::IndexDeletion),
|
|
|
|
"documentAddition" => Ok(TaskType::DocumentAddition),
|
|
|
|
"documentPartial" => Ok(TaskType::DocumentPartial),
|
|
|
|
"documentDeletion" => Ok(TaskType::DocumentDeletion),
|
|
|
|
"settingsUpdate" => Ok(TaskType::SettingsUpdate),
|
|
|
|
"clearAll" => Ok(TaskType::ClearAll),
|
2022-05-18 18:07:06 +08:00
|
|
|
unknown => Err(format!(
|
|
|
|
"invalid task type `{}` value, expecting one of: \
|
|
|
|
indexCreation, indexUpdate, indexDeletion, documentAddition, \
|
|
|
|
documentPartial, documentDeletion, settingsUpdate, or clearAll",
|
|
|
|
unknown
|
|
|
|
)),
|
2022-05-17 22:08:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize, Deserialize)]
|
2021-12-02 23:03:26 +08:00
|
|
|
#[serde(rename_all = "camelCase")]
|
2022-05-17 22:08:23 +08:00
|
|
|
pub enum TaskStatus {
|
2021-12-02 23:03:26 +08:00
|
|
|
Enqueued,
|
|
|
|
Processing,
|
|
|
|
Succeeded,
|
|
|
|
Failed,
|
|
|
|
}
|
|
|
|
|
2022-05-17 22:08:23 +08:00
|
|
|
impl FromStr for TaskStatus {
|
2022-05-18 18:07:06 +08:00
|
|
|
type Err = String;
|
2022-05-17 22:08:23 +08:00
|
|
|
|
2022-05-18 18:07:06 +08:00
|
|
|
fn from_str(status: &str) -> Result<Self, String> {
|
2022-05-17 22:08:23 +08:00
|
|
|
match status {
|
|
|
|
"enqueued" => Ok(TaskStatus::Enqueued),
|
|
|
|
"processing" => Ok(TaskStatus::Processing),
|
|
|
|
"succeeded" => Ok(TaskStatus::Succeeded),
|
|
|
|
"failed" => Ok(TaskStatus::Failed),
|
2022-05-18 18:07:06 +08:00
|
|
|
unknown => Err(format!(
|
|
|
|
"invalid task status `{}` value, expecting one of: \
|
|
|
|
enqueued, processing, succeeded, or failed",
|
|
|
|
unknown
|
|
|
|
)),
|
2022-05-17 22:08:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-12-02 23:03:26 +08:00
|
|
|
#[derive(Debug, Serialize)]
|
|
|
|
#[serde(untagged)]
|
|
|
|
#[allow(clippy::large_enum_variant)]
|
|
|
|
enum TaskDetails {
|
|
|
|
#[serde(rename_all = "camelCase")]
|
2021-12-15 16:49:39 +08:00
|
|
|
DocumentAddition {
|
2021-12-02 23:03:26 +08:00
|
|
|
received_documents: usize,
|
|
|
|
indexed_documents: Option<u64>,
|
|
|
|
},
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
Settings {
|
|
|
|
#[serde(flatten)]
|
|
|
|
settings: Settings<Unchecked>,
|
|
|
|
},
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
IndexInfo { primary_key: Option<String> },
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
DocumentDeletion {
|
|
|
|
received_document_ids: usize,
|
|
|
|
deleted_documents: Option<u64>,
|
|
|
|
},
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
ClearAll { deleted_documents: Option<u64> },
|
2022-05-20 02:18:43 +08:00
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
Dump { dump_uid: String },
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
|
2022-02-14 22:32:41 +08:00
|
|
|
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
|
|
|
|
/// https://github.com/time-rs/time/issues/378.
|
|
|
|
/// This code is a port of the old code of time that was removed in 0.2.
|
2021-12-02 23:03:26 +08:00
|
|
|
fn serialize_duration<S: Serializer>(
|
|
|
|
duration: &Option<Duration>,
|
|
|
|
serializer: S,
|
|
|
|
) -> Result<S::Ok, S::Error> {
|
|
|
|
match duration {
|
|
|
|
Some(duration) => {
|
2022-02-14 22:32:41 +08:00
|
|
|
// technically speaking, negative duration is not valid ISO 8601
|
|
|
|
if duration.is_negative() {
|
|
|
|
return serializer.serialize_none();
|
|
|
|
}
|
|
|
|
|
|
|
|
const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds();
|
|
|
|
let secs = duration.whole_seconds();
|
|
|
|
let days = secs / SECS_PER_DAY;
|
|
|
|
let secs = secs - days * SECS_PER_DAY;
|
|
|
|
let hasdate = days != 0;
|
|
|
|
let nanos = duration.subsec_nanoseconds();
|
|
|
|
let hastime = (secs != 0 || nanos != 0) || !hasdate;
|
|
|
|
|
|
|
|
// all the following unwrap can't fail
|
|
|
|
let mut res = String::new();
|
|
|
|
write!(&mut res, "P").unwrap();
|
|
|
|
|
|
|
|
if hasdate {
|
|
|
|
write!(&mut res, "{}D", days).unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds();
|
|
|
|
const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds();
|
|
|
|
|
|
|
|
if hastime {
|
|
|
|
if nanos == 0 {
|
|
|
|
write!(&mut res, "T{}S", secs).unwrap();
|
|
|
|
} else if nanos % NANOS_PER_MILLI == 0 {
|
|
|
|
write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap();
|
|
|
|
} else if nanos % NANOS_PER_MICRO == 0 {
|
|
|
|
write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap();
|
|
|
|
} else {
|
|
|
|
write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
serializer.serialize_str(&res)
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
None => serializer.serialize_none(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct TaskView {
|
|
|
|
uid: TaskId,
|
2022-05-17 01:50:45 +08:00
|
|
|
index_uid: Option<String>,
|
2021-12-02 23:03:26 +08:00
|
|
|
status: TaskStatus,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
task_type: TaskType,
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
details: Option<TaskDetails>,
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
error: Option<ResponseError>,
|
|
|
|
#[serde(serialize_with = "serialize_duration")]
|
|
|
|
duration: Option<Duration>,
|
2022-02-14 22:32:41 +08:00
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
|
|
|
enqueued_at: OffsetDateTime,
|
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
|
|
|
started_at: Option<OffsetDateTime>,
|
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
|
|
|
finished_at: Option<OffsetDateTime>,
|
2022-01-19 18:21:19 +08:00
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
batch_uid: Option<Option<BatchId>>,
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Task> for TaskView {
|
|
|
|
fn from(task: Task) -> Self {
|
|
|
|
let Task {
|
|
|
|
id,
|
|
|
|
index_uid,
|
|
|
|
content,
|
|
|
|
events,
|
|
|
|
} = task;
|
|
|
|
|
|
|
|
let (task_type, mut details) = match content {
|
|
|
|
TaskContent::DocumentAddition {
|
|
|
|
merge_strategy,
|
|
|
|
documents_count,
|
|
|
|
..
|
|
|
|
} => {
|
2021-12-15 16:49:39 +08:00
|
|
|
let details = TaskDetails::DocumentAddition {
|
2021-12-02 23:03:26 +08:00
|
|
|
received_documents: documents_count,
|
|
|
|
indexed_documents: None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let task_type = match merge_strategy {
|
2021-12-15 16:49:39 +08:00
|
|
|
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial,
|
|
|
|
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition,
|
2021-12-02 23:03:26 +08:00
|
|
|
_ => unreachable!("Unexpected document merge strategy."),
|
|
|
|
};
|
|
|
|
|
|
|
|
(task_type, Some(details))
|
|
|
|
}
|
|
|
|
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
|
2021-12-15 16:49:39 +08:00
|
|
|
TaskType::DocumentDeletion,
|
2021-12-02 23:03:26 +08:00
|
|
|
Some(TaskDetails::DocumentDeletion {
|
|
|
|
received_document_ids: ids.len(),
|
|
|
|
deleted_documents: None,
|
|
|
|
}),
|
|
|
|
),
|
|
|
|
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
|
|
|
|
TaskType::ClearAll,
|
|
|
|
Some(TaskDetails::ClearAll {
|
|
|
|
deleted_documents: None,
|
|
|
|
}),
|
|
|
|
),
|
|
|
|
TaskContent::IndexDeletion => (
|
|
|
|
TaskType::IndexDeletion,
|
|
|
|
Some(TaskDetails::ClearAll {
|
|
|
|
deleted_documents: None,
|
|
|
|
}),
|
|
|
|
),
|
|
|
|
TaskContent::SettingsUpdate { settings, .. } => (
|
|
|
|
TaskType::SettingsUpdate,
|
|
|
|
Some(TaskDetails::Settings { settings }),
|
|
|
|
),
|
|
|
|
TaskContent::IndexCreation { primary_key } => (
|
|
|
|
TaskType::IndexCreation,
|
|
|
|
Some(TaskDetails::IndexInfo { primary_key }),
|
|
|
|
),
|
|
|
|
TaskContent::IndexUpdate { primary_key } => (
|
|
|
|
TaskType::IndexUpdate,
|
|
|
|
Some(TaskDetails::IndexInfo { primary_key }),
|
|
|
|
),
|
2022-05-23 16:54:49 +08:00
|
|
|
TaskContent::Dump { uid } => (
|
|
|
|
TaskType::DumpCreation,
|
|
|
|
Some(TaskDetails::Dump { dump_uid: uid }),
|
|
|
|
),
|
2021-12-02 23:03:26 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
// An event always has at least one event: "Created"
|
|
|
|
let (status, error, finished_at) = match events.last().unwrap() {
|
|
|
|
TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None),
|
|
|
|
TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None),
|
|
|
|
TaskEvent::Processing(_) => (TaskStatus::Processing, None, None),
|
|
|
|
TaskEvent::Succeded { timestamp, result } => {
|
|
|
|
match (result, &mut details) {
|
|
|
|
(
|
|
|
|
TaskResult::DocumentAddition {
|
|
|
|
indexed_documents: num,
|
|
|
|
..
|
|
|
|
},
|
2021-12-15 16:49:39 +08:00
|
|
|
Some(TaskDetails::DocumentAddition {
|
2021-12-02 23:03:26 +08:00
|
|
|
ref mut indexed_documents,
|
|
|
|
..
|
|
|
|
}),
|
|
|
|
) => {
|
|
|
|
indexed_documents.replace(*num);
|
|
|
|
}
|
|
|
|
(
|
|
|
|
TaskResult::DocumentDeletion {
|
|
|
|
deleted_documents: docs,
|
|
|
|
..
|
|
|
|
},
|
|
|
|
Some(TaskDetails::DocumentDeletion {
|
|
|
|
ref mut deleted_documents,
|
|
|
|
..
|
|
|
|
}),
|
|
|
|
) => {
|
|
|
|
deleted_documents.replace(*docs);
|
|
|
|
}
|
|
|
|
(
|
|
|
|
TaskResult::ClearAll {
|
|
|
|
deleted_documents: docs,
|
|
|
|
},
|
|
|
|
Some(TaskDetails::ClearAll {
|
|
|
|
ref mut deleted_documents,
|
|
|
|
}),
|
|
|
|
) => {
|
|
|
|
deleted_documents.replace(*docs);
|
|
|
|
}
|
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
(TaskStatus::Succeeded, None, Some(*timestamp))
|
|
|
|
}
|
|
|
|
TaskEvent::Failed { timestamp, error } => {
|
2022-01-06 20:18:47 +08:00
|
|
|
match details {
|
|
|
|
Some(TaskDetails::DocumentDeletion {
|
|
|
|
ref mut deleted_documents,
|
|
|
|
..
|
|
|
|
}) => {
|
|
|
|
deleted_documents.replace(0);
|
|
|
|
}
|
|
|
|
Some(TaskDetails::ClearAll {
|
|
|
|
ref mut deleted_documents,
|
|
|
|
..
|
|
|
|
}) => {
|
|
|
|
deleted_documents.replace(0);
|
|
|
|
}
|
|
|
|
Some(TaskDetails::DocumentAddition {
|
|
|
|
ref mut indexed_documents,
|
|
|
|
..
|
|
|
|
}) => {
|
|
|
|
indexed_documents.replace(0);
|
|
|
|
}
|
|
|
|
_ => (),
|
|
|
|
}
|
2021-12-02 23:03:26 +08:00
|
|
|
(TaskStatus::Failed, Some(error.clone()), Some(*timestamp))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let enqueued_at = match events.first() {
|
|
|
|
Some(TaskEvent::Created(ts)) => *ts,
|
|
|
|
_ => unreachable!("A task must always have a creation event."),
|
|
|
|
};
|
|
|
|
|
|
|
|
let started_at = events.iter().find_map(|e| match e {
|
|
|
|
TaskEvent::Processing(ts) => Some(*ts),
|
|
|
|
_ => None,
|
|
|
|
});
|
|
|
|
|
2022-01-12 01:01:25 +08:00
|
|
|
let duration = finished_at.zip(started_at).map(|(tf, ts)| (tf - ts));
|
|
|
|
|
2022-01-19 18:21:19 +08:00
|
|
|
let batch_uid = if AUTOBATCHING_ENABLED.load(std::sync::atomic::Ordering::Relaxed) {
|
|
|
|
let id = events.iter().find_map(|e| match e {
|
|
|
|
TaskEvent::Batched { batch_id, .. } => Some(*batch_id),
|
|
|
|
_ => None,
|
|
|
|
});
|
|
|
|
Some(id)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2021-12-02 23:03:26 +08:00
|
|
|
Self {
|
|
|
|
uid: id,
|
2022-05-17 01:50:45 +08:00
|
|
|
index_uid: index_uid.map(|u| u.into_inner()),
|
2021-12-02 23:03:26 +08:00
|
|
|
status,
|
|
|
|
task_type,
|
|
|
|
details,
|
|
|
|
error,
|
|
|
|
duration,
|
|
|
|
enqueued_at,
|
|
|
|
started_at,
|
|
|
|
finished_at,
|
2022-01-19 18:21:19 +08:00
|
|
|
batch_uid,
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize)]
|
|
|
|
pub struct TaskListView {
|
|
|
|
results: Vec<TaskView>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Vec<TaskView>> for TaskListView {
|
|
|
|
fn from(results: Vec<TaskView>) -> Self {
|
|
|
|
Self { results }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct SummarizedTaskView {
|
2022-05-17 17:17:32 +08:00
|
|
|
task_uid: TaskId,
|
2022-05-17 01:50:45 +08:00
|
|
|
index_uid: Option<String>,
|
2021-12-02 23:03:26 +08:00
|
|
|
status: TaskStatus,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
task_type: TaskType,
|
2022-02-14 22:32:41 +08:00
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
|
|
|
enqueued_at: OffsetDateTime,
|
2021-12-02 23:03:26 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Task> for SummarizedTaskView {
|
|
|
|
fn from(mut other: Task) -> Self {
|
|
|
|
let created_event = other
|
|
|
|
.events
|
|
|
|
.drain(..1)
|
|
|
|
.next()
|
|
|
|
.expect("Task must have an enqueued event.");
|
|
|
|
|
|
|
|
let enqueued_at = match created_event {
|
|
|
|
TaskEvent::Created(ts) => ts,
|
|
|
|
_ => unreachable!("The first event of a task must always be 'Created'"),
|
|
|
|
};
|
|
|
|
|
|
|
|
Self {
|
2022-05-17 17:17:32 +08:00
|
|
|
task_uid: other.id,
|
2022-05-17 01:50:45 +08:00
|
|
|
index_uid: other.index_uid.map(|u| u.into_inner()),
|
2021-12-02 23:03:26 +08:00
|
|
|
status: TaskStatus::Enqueued,
|
|
|
|
task_type: other.content.into(),
|
|
|
|
enqueued_at,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|