Use more precise error codes/message for the task routes

+ Allow star operator in delete/cancel tasks
+ rename originalQuery to originalFilters
+ Display error/canceled_by in task view even when they are = null
+ Rename task filter fields by using their plural forms
+ Prepare an error code for canceledBy filter
+ Only return global tasks if the API key action `index.*` is there
This commit is contained in:
Loïc Lecrenier 2022-11-07 12:24:39 +01:00
parent 932414bf72
commit d5638d2c27
6 changed files with 606 additions and 282 deletions

View File

@ -1,4 +1,5 @@
use meilisearch_types::error::{Code, ErrorCode}; use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::tasks::{Kind, Status};
use meilisearch_types::{heed, milli}; use meilisearch_types::{heed, milli};
use thiserror::Error; use thiserror::Error;
@ -28,9 +29,35 @@ pub enum Error {
#[error("Corrupted dump.")] #[error("Corrupted dump.")]
CorruptedDump, CorruptedDump,
#[error( #[error(
"Tasks uids must be a comma-separated list of numbers. `{task_uids}` is invalid {error_message}" "Task `{field}` `{date}` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."
)] )]
InvalidTaskUids { task_uids: String, error_message: String }, InvalidTaskDate { field: String, date: String },
#[error("Task uid `{task_uid}` is invalid. It should only contain numeric characters.")]
InvalidTaskUids { task_uid: String },
#[error(
"Task status `{status}` is invalid. Available task statuses are {}.",
enum_iterator::all::<Status>()
.map(|s| format!("`{s}`"))
.collect::<Vec<String>>()
.join(", ")
)]
InvalidTaskStatuses { status: String },
#[error(
"Task type `{type_}` is invalid. Available task types are {}",
enum_iterator::all::<Kind>()
.map(|s| format!("`{s}`"))
.collect::<Vec<String>>()
.join(", ")
)]
InvalidTaskTypes { type_: String },
#[error(
"Task canceledBy `{canceled_by}` is invalid. It should only contains numeric characters separated by `,` character."
)]
InvalidTaskCanceledBy { canceled_by: String },
#[error(
"{index_uid} is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)."
)]
InvalidIndexUid { index_uid: String },
#[error("Task `{0}` not found.")] #[error("Task `{0}` not found.")]
TaskNotFound(TaskId), TaskNotFound(TaskId),
#[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uid`, `indexUid`, `status`, `type`.")] #[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uid`, `indexUid`, `status`, `type`.")]
@ -75,7 +102,12 @@ impl ErrorCode for Error {
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists, Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
Error::SwapDuplicateIndexesFound(_) => Code::DuplicateIndexFound, Error::SwapDuplicateIndexesFound(_) => Code::DuplicateIndexFound,
Error::SwapDuplicateIndexFound(_) => Code::DuplicateIndexFound, Error::SwapDuplicateIndexFound(_) => Code::DuplicateIndexFound,
Error::InvalidTaskUids { .. } => Code::InvalidTaskUid, Error::InvalidTaskDate { .. } => Code::InvalidTaskDate,
Error::InvalidTaskUids { .. } => Code::InvalidTaskUids,
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses,
Error::InvalidTaskTypes { .. } => Code::InvalidTaskTypes,
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledBy,
Error::InvalidIndexUid { .. } => Code::InvalidIndexUid,
Error::TaskNotFound(_) => Code::TaskNotFound, Error::TaskNotFound(_) => Code::TaskNotFound,
Error::TaskDeletionWithEmptyQuery => Code::TaskDeletionWithEmptyQuery, Error::TaskDeletionWithEmptyQuery => Code::TaskDeletionWithEmptyQuery,
Error::TaskCancelationWithEmptyQuery => Code::TaskCancelationWithEmptyQuery, Error::TaskCancelationWithEmptyQuery => Code::TaskCancelationWithEmptyQuery,

View File

@ -70,7 +70,7 @@ pub struct Query {
/// The minimum [task id](`meilisearch_types::tasks::Task::uid`) to be matched /// The minimum [task id](`meilisearch_types::tasks::Task::uid`) to be matched
pub from: Option<u32>, pub from: Option<u32>,
/// The allowed [statuses](`meilisearch_types::tasks::Task::status`) of the matched tasls /// The allowed [statuses](`meilisearch_types::tasks::Task::status`) of the matched tasls
pub status: Option<Vec<Status>>, pub statuses: Option<Vec<Status>>,
/// The allowed [kinds](meilisearch_types::tasks::Kind) of the matched tasks. /// The allowed [kinds](meilisearch_types::tasks::Kind) of the matched tasks.
/// ///
/// The kind of a task is given by: /// The kind of a task is given by:
@ -80,11 +80,11 @@ pub struct Query {
/// task.kind.as_kind() /// task.kind.as_kind()
/// # } /// # }
/// ``` /// ```
pub kind: Option<Vec<Kind>>, pub types: Option<Vec<Kind>>,
/// The allowed [index ids](meilisearch_types::tasks::Task::index_uid) of the matched tasks /// The allowed [index ids](meilisearch_types::tasks::Task::index_uid) of the matched tasks
pub index_uid: Option<Vec<String>>, pub index_uids: Option<Vec<String>>,
/// The [task ids](`meilisearch_types::tasks::Task::uid`) to be matched /// The [task ids](`meilisearch_types::tasks::Task::uid`) to be matched
pub uid: Option<Vec<TaskId>>, pub uids: Option<Vec<TaskId>>,
/// Exclusive upper bound of the matched tasks' [`enqueued_at`](meilisearch_types::tasks::Task::enqueued_at) field. /// Exclusive upper bound of the matched tasks' [`enqueued_at`](meilisearch_types::tasks::Task::enqueued_at) field.
pub before_enqueued_at: Option<OffsetDateTime>, pub before_enqueued_at: Option<OffsetDateTime>,
@ -109,10 +109,10 @@ impl Query {
Query { Query {
limit: None, limit: None,
from: None, from: None,
status: None, statuses: None,
kind: None, types: None,
index_uid: None, index_uids: None,
uid: None, uids: None,
before_enqueued_at: None, before_enqueued_at: None,
after_enqueued_at: None, after_enqueued_at: None,
before_started_at: None, before_started_at: None,
@ -125,9 +125,9 @@ impl Query {
/// Add an [index id](meilisearch_types::tasks::Task::index_uid) to the list of permitted indexes. /// Add an [index id](meilisearch_types::tasks::Task::index_uid) to the list of permitted indexes.
pub fn with_index(self, index_uid: String) -> Self { pub fn with_index(self, index_uid: String) -> Self {
let mut index_vec = self.index_uid.unwrap_or_default(); let mut index_vec = self.index_uids.unwrap_or_default();
index_vec.push(index_uid); index_vec.push(index_uid);
Self { index_uid: Some(index_vec), ..self } Self { index_uids: Some(index_vec), ..self }
} }
} }
@ -458,7 +458,7 @@ impl IndexScheduler {
tasks.remove_range(from.saturating_add(1)..); tasks.remove_range(from.saturating_add(1)..);
} }
if let Some(status) = &query.status { if let Some(status) = &query.statuses {
let mut status_tasks = RoaringBitmap::new(); let mut status_tasks = RoaringBitmap::new();
for status in status { for status in status {
match status { match status {
@ -475,12 +475,12 @@ impl IndexScheduler {
tasks &= status_tasks; tasks &= status_tasks;
} }
if let Some(uids) = &query.uid { if let Some(uids) = &query.uids {
let uids = RoaringBitmap::from_iter(uids); let uids = RoaringBitmap::from_iter(uids);
tasks &= &uids; tasks &= &uids;
} }
if let Some(kind) = &query.kind { if let Some(kind) = &query.types {
let mut kind_tasks = RoaringBitmap::new(); let mut kind_tasks = RoaringBitmap::new();
for kind in kind { for kind in kind {
kind_tasks |= self.get_kind(rtxn, *kind)?; kind_tasks |= self.get_kind(rtxn, *kind)?;
@ -488,7 +488,7 @@ impl IndexScheduler {
tasks &= &kind_tasks; tasks &= &kind_tasks;
} }
if let Some(index) = &query.index_uid { if let Some(index) = &query.index_uids {
let mut index_tasks = RoaringBitmap::new(); let mut index_tasks = RoaringBitmap::new();
for index in index { for index in index {
index_tasks |= self.index_tasks(rtxn, index)?; index_tasks |= self.index_tasks(rtxn, index)?;
@ -592,7 +592,7 @@ impl IndexScheduler {
// If the query contains a list of `index_uid`, then we must exclude all the kind that // If the query contains a list of `index_uid`, then we must exclude all the kind that
// arn't associated to one and only one index. // arn't associated to one and only one index.
if query.index_uid.is_some() { if query.index_uids.is_some() {
for kind in enum_iterator::all::<Kind>().filter(|kind| !kind.related_to_one_index()) { for kind in enum_iterator::all::<Kind>().filter(|kind| !kind.related_to_one_index()) {
tasks -= self.get_kind(rtxn, kind)?; tasks -= self.get_kind(rtxn, kind)?;
} }
@ -2218,18 +2218,18 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap(); let rtxn = index_scheduler.env.read_txn().unwrap();
let query = Query { status: Some(vec![Status::Processing]), ..Default::default() }; let query = Query { statuses: Some(vec![Status::Processing]), ..Default::default() };
let tasks = let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,]"); // only the processing tasks in the first tick snapshot!(snapshot_bitmap(&tasks), @"[0,]"); // only the processing tasks in the first tick
let query = Query { status: Some(vec![Status::Enqueued]), ..Default::default() }; let query = Query { statuses: Some(vec![Status::Enqueued]), ..Default::default() };
let tasks = let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); // only the enqueued tasks in the first tick snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); // only the enqueued tasks in the first tick
let query = Query { let query = Query {
status: Some(vec![Status::Enqueued, Status::Processing]), statuses: Some(vec![Status::Enqueued, Status::Processing]),
..Default::default() ..Default::default()
}; };
let tasks = let tasks =
@ -2237,7 +2237,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); // both enqueued and processing tasks in the first tick snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); // both enqueued and processing tasks in the first tick
let query = Query { let query = Query {
status: Some(vec![Status::Enqueued, Status::Processing]), statuses: Some(vec![Status::Enqueued, Status::Processing]),
after_started_at: Some(start_time), after_started_at: Some(start_time),
..Default::default() ..Default::default()
}; };
@ -2248,7 +2248,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[0,]"); snapshot!(snapshot_bitmap(&tasks), @"[0,]");
let query = Query { let query = Query {
status: Some(vec![Status::Enqueued, Status::Processing]), statuses: Some(vec![Status::Enqueued, Status::Processing]),
before_started_at: Some(start_time), before_started_at: Some(start_time),
..Default::default() ..Default::default()
}; };
@ -2259,7 +2259,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[]"); snapshot!(snapshot_bitmap(&tasks), @"[]");
let query = Query { let query = Query {
status: Some(vec![Status::Enqueued, Status::Processing]), statuses: Some(vec![Status::Enqueued, Status::Processing]),
after_started_at: Some(start_time), after_started_at: Some(start_time),
before_started_at: Some(start_time + Duration::minutes(1)), before_started_at: Some(start_time + Duration::minutes(1)),
..Default::default() ..Default::default()
@ -2278,7 +2278,7 @@ mod tests {
let second_start_time = OffsetDateTime::now_utc(); let second_start_time = OffsetDateTime::now_utc();
let query = Query { let query = Query {
status: Some(vec![Status::Succeeded, Status::Processing]), statuses: Some(vec![Status::Succeeded, Status::Processing]),
after_started_at: Some(start_time), after_started_at: Some(start_time),
before_started_at: Some(start_time + Duration::minutes(1)), before_started_at: Some(start_time + Duration::minutes(1)),
..Default::default() ..Default::default()
@ -2291,7 +2291,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]"); snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
let query = Query { let query = Query {
status: Some(vec![Status::Succeeded, Status::Processing]), statuses: Some(vec![Status::Succeeded, Status::Processing]),
before_started_at: Some(start_time), before_started_at: Some(start_time),
..Default::default() ..Default::default()
}; };
@ -2302,7 +2302,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[]"); snapshot!(snapshot_bitmap(&tasks), @"[]");
let query = Query { let query = Query {
status: Some(vec![Status::Enqueued, Status::Succeeded, Status::Processing]), statuses: Some(vec![Status::Enqueued, Status::Succeeded, Status::Processing]),
after_started_at: Some(second_start_time), after_started_at: Some(second_start_time),
before_started_at: Some(second_start_time + Duration::minutes(1)), before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default() ..Default::default()
@ -2325,7 +2325,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[2,]"); snapshot!(snapshot_bitmap(&tasks), @"[2,]");
let query = Query { let query = Query {
status: Some(vec![Status::Enqueued, Status::Succeeded, Status::Processing]), statuses: Some(vec![Status::Enqueued, Status::Succeeded, Status::Processing]),
after_started_at: Some(second_start_time), after_started_at: Some(second_start_time),
before_started_at: Some(second_start_time + Duration::minutes(1)), before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default() ..Default::default()
@ -2347,7 +2347,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[]"); snapshot!(snapshot_bitmap(&tasks), @"[]");
let query = Query { let query = Query {
status: Some(vec![Status::Failed]), statuses: Some(vec![Status::Failed]),
after_started_at: Some(second_start_time), after_started_at: Some(second_start_time),
before_started_at: Some(second_start_time + Duration::minutes(1)), before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default() ..Default::default()
@ -2358,7 +2358,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[2,]"); snapshot!(snapshot_bitmap(&tasks), @"[2,]");
let query = Query { let query = Query {
status: Some(vec![Status::Failed]), statuses: Some(vec![Status::Failed]),
after_started_at: Some(second_start_time), after_started_at: Some(second_start_time),
before_started_at: Some(second_start_time + Duration::minutes(1)), before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default() ..Default::default()
@ -2369,8 +2369,8 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[2,]"); snapshot!(snapshot_bitmap(&tasks), @"[2,]");
let query = Query { let query = Query {
status: Some(vec![Status::Failed]), statuses: Some(vec![Status::Failed]),
uid: Some(vec![1]), uids: Some(vec![1]),
after_started_at: Some(second_start_time), after_started_at: Some(second_start_time),
before_started_at: Some(second_start_time + Duration::minutes(1)), before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default() ..Default::default()
@ -2381,8 +2381,8 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[]"); snapshot!(snapshot_bitmap(&tasks), @"[]");
let query = Query { let query = Query {
status: Some(vec![Status::Failed]), statuses: Some(vec![Status::Failed]),
uid: Some(vec![2]), uids: Some(vec![2]),
after_started_at: Some(second_start_time), after_started_at: Some(second_start_time),
before_started_at: Some(second_start_time + Duration::minutes(1)), before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default() ..Default::default()
@ -2417,13 +2417,13 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap(); let rtxn = index_scheduler.env.read_txn().unwrap();
let query = Query { index_uid: Some(vec!["catto".to_owned()]), ..Default::default() }; let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
let tasks = let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// only the first task associated with catto is returned, the indexSwap tasks are excluded! // only the first task associated with catto is returned, the indexSwap tasks are excluded!
snapshot!(snapshot_bitmap(&tasks), @"[0,]"); snapshot!(snapshot_bitmap(&tasks), @"[0,]");
let query = Query { index_uid: Some(vec!["catto".to_owned()]), ..Default::default() }; let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
let tasks = index_scheduler let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()])) .get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()]))
.unwrap(); .unwrap();

View File

@ -271,7 +271,7 @@ pub fn create_all_stats(
let mut indexes = BTreeMap::new(); let mut indexes = BTreeMap::new();
let mut database_size = 0; let mut database_size = 0;
let processing_task = index_scheduler.get_tasks_from_authorized_indexes( let processing_task = index_scheduler.get_tasks_from_authorized_indexes(
Query { status: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() }, Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() },
search_rules.authorized_indexes(), search_rules.authorized_indexes(),
)?; )?;
let processing_index = processing_task.first().and_then(|task| task.index_uid()); let processing_index = processing_task.first().and_then(|task| task.index_uid());

View File

@ -1,3 +1,5 @@
use std::str::FromStr;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::{IndexScheduler, Query, TaskId}; use index_scheduler::{IndexScheduler, Query, TaskId};
@ -14,6 +16,8 @@ use serde_json::json;
use time::{Duration, OffsetDateTime}; use time::{Duration, OffsetDateTime};
use tokio::task; use tokio::task;
use self::date_deserializer::{deserialize_date, DeserializeDateOption};
use super::{fold_star_or, SummarizedTaskView}; use super::{fold_star_or, SummarizedTaskView};
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
@ -41,15 +45,10 @@ pub struct TaskView {
pub status: Status, pub status: Status,
#[serde(rename = "type")] #[serde(rename = "type")]
pub kind: Kind, pub kind: Kind,
#[serde(skip_serializing_if = "Option::is_none")]
pub canceled_by: Option<TaskId>, pub canceled_by: Option<TaskId>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub details: Option<DetailsView>, pub details: Option<DetailsView>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<ResponseError>, pub error: Option<ResponseError>,
#[serde(serialize_with = "serialize_duration", default)] #[serde(serialize_with = "serialize_duration", default)]
pub duration: Option<Duration>, pub duration: Option<Duration>,
#[serde(with = "time::serde::rfc3339")] #[serde(with = "time::serde::rfc3339")]
@ -98,7 +97,7 @@ pub struct DetailsView {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub deleted_tasks: Option<Option<u64>>, pub deleted_tasks: Option<Option<u64>>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub original_query: Option<String>, pub original_filters: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub dump_uid: Option<String>, pub dump_uid: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
@ -139,14 +138,14 @@ impl From<Details> for DetailsView {
DetailsView { DetailsView {
matched_tasks: Some(matched_tasks), matched_tasks: Some(matched_tasks),
canceled_tasks: Some(canceled_tasks), canceled_tasks: Some(canceled_tasks),
original_query: Some(original_query), original_filters: Some(original_query),
..DetailsView::default() ..DetailsView::default()
} }
} }
Details::TaskDeletion { matched_tasks, deleted_tasks, original_query } => DetailsView { Details::TaskDeletion { matched_tasks, deleted_tasks, original_query } => DetailsView {
matched_tasks: Some(matched_tasks), matched_tasks: Some(matched_tasks),
deleted_tasks: Some(deleted_tasks), deleted_tasks: Some(deleted_tasks),
original_query: Some(original_query), original_filters: Some(original_query),
..DetailsView::default() ..DetailsView::default()
}, },
Details::Dump { dump_uid } => { Details::Dump { dump_uid } => {
@ -159,102 +158,276 @@ impl From<Details> for DetailsView {
} }
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct TaskCommonQueryRaw {
uids: Option<CS<String>>,
types: Option<CS<StarOr<String>>>,
statuses: Option<CS<StarOr<String>>>,
index_uids: Option<CS<StarOr<String>>>,
}
impl TaskCommonQueryRaw {
fn validate(self) -> Result<TaskCommonQuery, ResponseError> {
let Self { uids, types, statuses, index_uids } = self;
let uids = if let Some(uids) = uids {
Some(
uids.into_iter()
.map(|uid_string| {
uid_string.parse::<u32>().map_err(|_e| {
index_scheduler::Error::InvalidTaskUids { task_uid: uid_string }.into()
})
})
.collect::<Result<Vec<u32>, ResponseError>>()?,
)
} else {
None
};
let types = if let Some(types) = types.and_then(fold_star_or) as Option<Vec<String>> {
Some(
types
.into_iter()
.map(|type_string| {
Kind::from_str(&type_string).map_err(|_e| {
index_scheduler::Error::InvalidTaskTypes { type_: type_string }.into()
})
})
.collect::<Result<Vec<Kind>, ResponseError>>()?,
)
} else {
None
};
let statuses = if let Some(statuses) =
statuses.and_then(fold_star_or) as Option<Vec<String>>
{
Some(
statuses
.into_iter()
.map(|status_string| {
Status::from_str(&status_string).map_err(|_e| {
index_scheduler::Error::InvalidTaskStatuses { status: status_string }
.into()
})
})
.collect::<Result<Vec<Status>, ResponseError>>()?,
)
} else {
None
};
let index_uids =
if let Some(index_uids) = index_uids.and_then(fold_star_or) as Option<Vec<String>> {
Some(
index_uids
.into_iter()
.map(|index_uid_string| {
IndexUid::from_str(&index_uid_string)
.map(|index_uid| index_uid.to_string())
.map_err(|_e| {
index_scheduler::Error::InvalidIndexUid {
index_uid: index_uid_string,
}
.into()
})
})
.collect::<Result<Vec<String>, ResponseError>>()?,
)
} else {
None
};
Ok(TaskCommonQuery { types, uids, statuses, index_uids })
}
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct TaskDateQueryRaw {
after_enqueued_at: Option<String>,
before_enqueued_at: Option<String>,
after_started_at: Option<String>,
before_started_at: Option<String>,
after_finished_at: Option<String>,
before_finished_at: Option<String>,
}
impl TaskDateQueryRaw {
fn validate(self) -> Result<TaskDateQuery, ResponseError> {
let Self {
after_enqueued_at,
before_enqueued_at,
after_started_at,
before_started_at,
after_finished_at,
before_finished_at,
} = self;
let mut query = TaskDateQuery {
after_enqueued_at: None,
before_enqueued_at: None,
after_started_at: None,
before_started_at: None,
after_finished_at: None,
before_finished_at: None,
};
for (field_name, string_value, before_or_after, dest) in [
(
"afterEnqueuedAt",
after_enqueued_at,
DeserializeDateOption::After,
&mut query.after_enqueued_at,
),
(
"beforeEnqueuedAt",
before_enqueued_at,
DeserializeDateOption::Before,
&mut query.before_enqueued_at,
),
(
"afterStartedAt",
after_started_at,
DeserializeDateOption::After,
&mut query.after_started_at,
),
(
"beforeStartedAt",
before_started_at,
DeserializeDateOption::Before,
&mut query.before_started_at,
),
(
"afterFinishedAt",
after_finished_at,
DeserializeDateOption::After,
&mut query.after_finished_at,
),
(
"beforeFinishedAt",
before_finished_at,
DeserializeDateOption::Before,
&mut query.before_finished_at,
),
] {
if let Some(string_value) = string_value {
*dest = Some(deserialize_date(field_name, &string_value, before_or_after)?);
}
}
Ok(query)
}
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct TasksFilterQueryRaw {
#[serde(flatten)]
common: TaskCommonQueryRaw,
#[serde(default = "DEFAULT_LIMIT")]
limit: u32,
from: Option<TaskId>,
#[serde(flatten)]
dates: TaskDateQueryRaw,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct TaskDeletionOrCancelationQueryRaw {
#[serde(flatten)]
common: TaskCommonQueryRaw,
#[serde(flatten)]
dates: TaskDateQueryRaw,
}
impl TasksFilterQueryRaw {
fn validate(self) -> Result<TasksFilterQuery, ResponseError> {
let Self { common, limit, from, dates } = self;
let common = common.validate()?;
let dates = dates.validate()?;
Ok(TasksFilterQuery { common, limit, from, dates })
}
}
impl TaskDeletionOrCancelationQueryRaw {
fn validate(self) -> Result<TaskDeletionOrCancelationQuery, ResponseError> {
let Self { common, dates } = self;
let common = common.validate()?;
let dates = dates.validate()?;
Ok(TaskDeletionOrCancelationQuery { common, dates })
}
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct TaskDateQuery { pub struct TaskDateQuery {
#[serde( #[serde(
default, default,
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
serialize_with = "time::serde::rfc3339::option::serialize", serialize_with = "time::serde::rfc3339::option::serialize"
deserialize_with = "date_deserializer::after::deserialize"
)] )]
after_enqueued_at: Option<OffsetDateTime>, after_enqueued_at: Option<OffsetDateTime>,
#[serde( #[serde(
default, default,
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
serialize_with = "time::serde::rfc3339::option::serialize", serialize_with = "time::serde::rfc3339::option::serialize"
deserialize_with = "date_deserializer::before::deserialize"
)] )]
before_enqueued_at: Option<OffsetDateTime>, before_enqueued_at: Option<OffsetDateTime>,
#[serde( #[serde(
default, default,
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
serialize_with = "time::serde::rfc3339::option::serialize", serialize_with = "time::serde::rfc3339::option::serialize"
deserialize_with = "date_deserializer::after::deserialize"
)] )]
after_started_at: Option<OffsetDateTime>, after_started_at: Option<OffsetDateTime>,
#[serde( #[serde(
default, default,
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
serialize_with = "time::serde::rfc3339::option::serialize", serialize_with = "time::serde::rfc3339::option::serialize"
deserialize_with = "date_deserializer::before::deserialize"
)] )]
before_started_at: Option<OffsetDateTime>, before_started_at: Option<OffsetDateTime>,
#[serde( #[serde(
default, default,
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
serialize_with = "time::serde::rfc3339::option::serialize", serialize_with = "time::serde::rfc3339::option::serialize"
deserialize_with = "date_deserializer::after::deserialize"
)] )]
after_finished_at: Option<OffsetDateTime>, after_finished_at: Option<OffsetDateTime>,
#[serde( #[serde(
default, default,
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
serialize_with = "time::serde::rfc3339::option::serialize", serialize_with = "time::serde::rfc3339::option::serialize"
deserialize_with = "date_deserializer::before::deserialize"
)] )]
before_finished_at: Option<OffsetDateTime>, before_finished_at: Option<OffsetDateTime>,
} }
#[derive(Deserialize, Debug)] #[derive(Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct TaskCommonQuery {
types: Option<Vec<Kind>>,
uids: Option<Vec<TaskId>>,
statuses: Option<Vec<Status>>,
index_uids: Option<Vec<String>>,
}
#[derive(Debug)]
pub struct TasksFilterQuery { pub struct TasksFilterQuery {
#[serde(rename = "type")]
kind: Option<CS<StarOr<Kind>>>,
uid: Option<CS<TaskId>>,
status: Option<CS<StarOr<Status>>>,
index_uid: Option<CS<StarOr<String>>>,
#[serde(default = "DEFAULT_LIMIT")]
limit: u32, limit: u32,
from: Option<TaskId>, from: Option<TaskId>,
#[serde(flatten)] common: TaskCommonQuery,
dates: TaskDateQuery, dates: TaskDateQuery,
} }
#[derive(Deserialize, Debug)] #[derive(Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct TaskDeletionOrCancelationQuery {
pub struct TaskDeletionQuery { common: TaskCommonQuery,
#[serde(rename = "type")]
kind: Option<CS<Kind>>,
uid: Option<CS<u32>>,
status: Option<CS<Status>>,
index_uid: Option<CS<IndexUid>>,
#[serde(flatten)]
dates: TaskDateQuery,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct TaskCancelationQuery {
#[serde(rename = "type")]
type_: Option<CS<Kind>>,
uid: Option<CS<u32>>,
status: Option<CS<Status>>,
index_uid: Option<CS<IndexUid>>,
#[serde(flatten)]
dates: TaskDateQuery, dates: TaskDateQuery,
} }
async fn cancel_tasks( async fn cancel_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
req: HttpRequest, req: HttpRequest,
params: web::Query<TaskCancelationQuery>, params: web::Query<TaskDeletionOrCancelationQueryRaw>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let TaskCancelationQuery { let query = params.into_inner().validate()?;
type_, let TaskDeletionOrCancelationQuery {
uid, common: TaskCommonQuery { types, uids, statuses, index_uids },
status,
index_uid,
dates: dates:
TaskDateQuery { TaskDateQuery {
after_enqueued_at, after_enqueued_at,
@ -264,21 +437,15 @@ async fn cancel_tasks(
after_finished_at, after_finished_at,
before_finished_at, before_finished_at,
}, },
} = params.into_inner(); } = query;
let kind: Option<Vec<_>> = type_.map(|x| x.into_iter().collect());
let uid: Option<Vec<_>> = uid.map(|x| x.into_iter().collect());
let status: Option<Vec<_>> = status.map(|x| x.into_iter().collect());
let index_uid: Option<Vec<_>> =
index_uid.map(|x| x.into_iter().map(|x| x.to_string()).collect());
let query = Query { let query = Query {
limit: None, limit: None,
from: None, from: None,
status, statuses,
kind, types,
index_uid, index_uids,
uid, uids,
before_enqueued_at, before_enqueued_at,
after_enqueued_at, after_enqueued_at,
before_started_at, before_started_at,
@ -308,13 +475,10 @@ async fn cancel_tasks(
async fn delete_tasks( async fn delete_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
req: HttpRequest, req: HttpRequest,
params: web::Query<TaskDeletionQuery>, params: web::Query<TaskDeletionOrCancelationQueryRaw>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let TaskDeletionQuery { let TaskDeletionOrCancelationQuery {
kind: type_, common: TaskCommonQuery { types, uids, statuses, index_uids },
uid,
status,
index_uid,
dates: dates:
TaskDateQuery { TaskDateQuery {
after_enqueued_at, after_enqueued_at,
@ -324,21 +488,15 @@ async fn delete_tasks(
after_finished_at, after_finished_at,
before_finished_at, before_finished_at,
}, },
} = params.into_inner(); } = params.into_inner().validate()?;
let kind: Option<Vec<_>> = type_.map(|x| x.into_iter().collect());
let uid: Option<Vec<_>> = uid.map(|x| x.into_iter().collect());
let status: Option<Vec<_>> = status.map(|x| x.into_iter().collect());
let index_uid: Option<Vec<_>> =
index_uid.map(|x| x.into_iter().map(|x| x.to_string()).collect());
let query = Query { let query = Query {
limit: None, limit: None,
from: None, from: None,
status, statuses,
kind, types,
index_uid, index_uids,
uid, uids,
after_enqueued_at, after_enqueued_at,
before_enqueued_at, before_enqueued_at,
after_started_at, after_started_at,
@ -375,15 +533,12 @@ pub struct AllTasks {
async fn get_tasks( async fn get_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
params: web::Query<TasksFilterQuery>, params: web::Query<TasksFilterQueryRaw>,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let TasksFilterQuery { let TasksFilterQuery {
kind, common: TaskCommonQuery { types, uids, statuses, index_uids },
uid,
status,
index_uid,
limit, limit,
from, from,
dates: dates:
@ -395,21 +550,14 @@ async fn get_tasks(
after_finished_at, after_finished_at,
before_finished_at, before_finished_at,
}, },
} = params.into_inner(); } = params.into_inner().validate()?;
// We first transform a potential indexUid=* into a "not specified indexUid filter"
// for every one of the filters: type, status, and indexUid.
let kind: Option<Vec<_>> = kind.and_then(fold_star_or);
let uid: Option<Vec<_>> = uid.map(|x| x.into_iter().collect());
let status: Option<Vec<_>> = status.and_then(fold_star_or);
let index_uid: Option<Vec<_>> = index_uid.and_then(fold_star_or);
analytics.publish( analytics.publish(
"Tasks Seen".to_string(), "Tasks Seen".to_string(),
json!({ json!({
"filtered_by_index_uid": index_uid.as_ref().map_or(false, |v| !v.is_empty()), "filtered_by_index_uid": index_uids.as_ref().map_or(false, |v| !v.is_empty()),
"filtered_by_type": kind.as_ref().map_or(false, |v| !v.is_empty()), "filtered_by_type": types.as_ref().map_or(false, |v| !v.is_empty()),
"filtered_by_status": status.as_ref().map_or(false, |v| !v.is_empty()), "filtered_by_status": statuses.as_ref().map_or(false, |v| !v.is_empty()),
}), }),
Some(&req), Some(&req),
); );
@ -420,10 +568,10 @@ async fn get_tasks(
let query = index_scheduler::Query { let query = index_scheduler::Query {
limit: Some(limit), limit: Some(limit),
from, from,
status, statuses,
kind, types,
index_uid, index_uids,
uid, uids,
before_enqueued_at, before_enqueued_at,
after_enqueued_at, after_enqueued_at,
before_started_at, before_started_at,
@ -462,20 +610,17 @@ async fn get_task(
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let task_uid_string = task_uid.into_inner(); let task_uid_string = task_uid.into_inner();
let task_uid: TaskId = match task_uid_string.parse() { let task_uid: TaskId = match task_uid_string.parse() {
Ok(id) => id, Ok(id) => id,
Err(e) => { Err(_e) => {
return Err(index_scheduler::Error::InvalidTaskUids { return Err(index_scheduler::Error::InvalidTaskUids { task_uid: task_uid_string }.into())
task_uids: task_uid_string,
error_message: e.to_string(),
}
.into())
} }
}; };
analytics.publish("Tasks Seen".to_string(), json!({ "per_task_uid": true }), Some(&req)); analytics.publish("Tasks Seen".to_string(), json!({ "per_task_uid": true }), Some(&req));
let query = index_scheduler::Query { uid: Some(vec![task_uid]), ..Query::default() }; let query = index_scheduler::Query { uids: Some(vec![task_uid]), ..Query::default() };
if let Some(task) = index_scheduler if let Some(task) = index_scheduler
.get_tasks_from_authorized_indexes( .get_tasks_from_authorized_indexes(
@ -492,19 +637,21 @@ async fn get_task(
} }
pub(crate) mod date_deserializer { pub(crate) mod date_deserializer {
use meilisearch_types::error::ResponseError;
use time::format_description::well_known::Rfc3339; use time::format_description::well_known::Rfc3339;
use time::macros::format_description; use time::macros::format_description;
use time::{Date, Duration, OffsetDateTime, Time}; use time::{Date, Duration, OffsetDateTime, Time};
enum DeserializeDateOption { pub enum DeserializeDateOption {
Before, Before,
After, After,
} }
fn deserialize_date<E: serde::de::Error>( pub fn deserialize_date(
field_name: &str,
value: &str, value: &str,
option: DeserializeDateOption, option: DeserializeDateOption,
) -> std::result::Result<OffsetDateTime, E> { ) -> std::result::Result<OffsetDateTime, ResponseError> {
// We can't parse using time's rfc3339 format, since then we won't know what part of the // We can't parse using time's rfc3339 format, since then we won't know what part of the
// datetime was not explicitly specified, and thus we won't be able to increment it to the // datetime was not explicitly specified, and thus we won't be able to increment it to the
// next step. // next step.
@ -521,120 +668,17 @@ pub(crate) mod date_deserializer {
match option { match option {
DeserializeDateOption::Before => Ok(datetime), DeserializeDateOption::Before => Ok(datetime),
DeserializeDateOption::After => { DeserializeDateOption::After => {
let datetime = datetime let datetime =
.checked_add(Duration::days(1)) datetime.checked_add(Duration::days(1)).unwrap_or_else(|| datetime);
.ok_or_else(|| serde::de::Error::custom("date overflow"))?;
Ok(datetime) Ok(datetime)
} }
} }
} else { } else {
Err(serde::de::Error::custom( Err(index_scheduler::Error::InvalidTaskDate {
"could not parse a date with the RFC3339 or YYYY-MM-DD format", field: field_name.to_string(),
)) date: value.to_string(),
}
}
/// Deserialize an upper bound datetime with RFC3339 or YYYY-MM-DD.
pub(crate) mod before {
use serde::Deserializer;
use time::OffsetDateTime;
use super::{deserialize_date, DeserializeDateOption};
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
pub fn deserialize<'a, D: Deserializer<'a>>(
deserializer: D,
) -> Result<Option<OffsetDateTime>, D::Error> {
deserializer.deserialize_option(Visitor)
}
struct Visitor;
#[derive(Debug)]
struct DeserializeError;
impl<'a> serde::de::Visitor<'a> for Visitor {
type Value = Option<OffsetDateTime>;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str(
"an optional date written as a string with the RFC3339 or YYYY-MM-DD format",
)
}
fn visit_str<E: serde::de::Error>(
self,
value: &str,
) -> Result<Option<OffsetDateTime>, E> {
deserialize_date(value, DeserializeDateOption::Before).map(Some)
}
fn visit_some<D: Deserializer<'a>>(
self,
deserializer: D,
) -> Result<Option<OffsetDateTime>, D::Error> {
deserializer.deserialize_str(Visitor)
}
fn visit_none<E: serde::de::Error>(self) -> Result<Option<OffsetDateTime>, E> {
Ok(None)
}
fn visit_unit<E: serde::de::Error>(self) -> Result<Self::Value, E> {
Ok(None)
}
}
}
/// Deserialize a lower bound datetime with RFC3339 or YYYY-MM-DD.
///
/// If YYYY-MM-DD is used, the day is incremented by one.
pub(crate) mod after {
use serde::Deserializer;
use time::OffsetDateTime;
use super::{deserialize_date, DeserializeDateOption};
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
pub fn deserialize<'a, D: Deserializer<'a>>(
deserializer: D,
) -> Result<Option<OffsetDateTime>, D::Error> {
deserializer.deserialize_option(Visitor)
}
struct Visitor;
#[derive(Debug)]
struct DeserializeError;
impl<'a> serde::de::Visitor<'a> for Visitor {
type Value = Option<OffsetDateTime>;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str(
"an optional date written as a string with the RFC3339 or YYYY-MM-DD format",
)
}
fn visit_str<E: serde::de::Error>(
self,
value: &str,
) -> Result<Option<OffsetDateTime>, E> {
deserialize_date(value, DeserializeDateOption::After).map(Some)
}
fn visit_some<D: Deserializer<'a>>(
self,
deserializer: D,
) -> Result<Option<OffsetDateTime>, D::Error> {
deserializer.deserialize_str(Visitor)
}
fn visit_none<E: serde::de::Error>(self) -> Result<Option<OffsetDateTime>, E> {
Ok(None)
}
fn visit_unit<E: serde::de::Error>(self) -> Result<Self::Value, E> {
Ok(None)
} }
.into())
} }
} }
} }
@ -643,10 +687,10 @@ pub(crate) mod date_deserializer {
mod tests { mod tests {
use meili_snap::snapshot; use meili_snap::snapshot;
use crate::routes::tasks::TaskDeletionQuery; use crate::routes::tasks::{TaskDeletionOrCancelationQueryRaw, TasksFilterQueryRaw};
#[test] #[test]
fn deserialize_task_deletion_query_datetime() { fn deserialize_task_filter_dates() {
{ {
let json = r#" { let json = r#" {
"afterEnqueuedAt": "2021-12-03", "afterEnqueuedAt": "2021-12-03",
@ -656,7 +700,10 @@ mod tests {
"afterFinishedAt": "2021-12-03", "afterFinishedAt": "2021-12-03",
"beforeFinishedAt": "2021-12-03" "beforeFinishedAt": "2021-12-03"
} "#; } "#;
let query = serde_json::from_str::<TaskDeletionQuery>(json).unwrap(); let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
snapshot!(format!("{:?}", query.dates.before_enqueued_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.before_enqueued_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
snapshot!(format!("{:?}", query.dates.after_started_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.after_started_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
@ -666,45 +713,256 @@ mod tests {
} }
{ {
let json = r#" { "afterEnqueuedAt": "2021-12-03T23:45:23Z", "beforeEnqueuedAt": "2021-12-03T23:45:23Z" } "#; let json = r#" { "afterEnqueuedAt": "2021-12-03T23:45:23Z", "beforeEnqueuedAt": "2021-12-03T23:45:23Z" } "#;
let query = serde_json::from_str::<TaskDeletionQuery>(json).unwrap(); let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00");
snapshot!(format!("{:?}", query.dates.before_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.before_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00");
} }
{ {
let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06-06:20" } "#; let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06-06:20" } "#;
let query = serde_json::from_str::<TaskDeletionQuery>(json).unwrap(); let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 -06:20:00"); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 -06:20:00");
} }
{ {
let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06+00:00" } "#; let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06+00:00" } "#;
let query = serde_json::from_str::<TaskDeletionQuery>(json).unwrap(); let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 +00:00:00");
} }
{ {
let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06.200000300Z" } "#; let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06.200000300Z" } "#;
let query = serde_json::from_str::<TaskDeletionQuery>(json).unwrap(); let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.2000003 +00:00:00"); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.2000003 +00:00:00");
} }
{ {
let json = r#" { "afterEnqueuedAt": "2021" } "#; let json = r#" { "afterFinishedAt": "2021" } "#;
let err = serde_json::from_str::<TaskDeletionQuery>(json).unwrap_err(); let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
snapshot!(format!("{err}"), @"could not parse a date with the RFC3339 or YYYY-MM-DD format at line 1 column 30"); .unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task `afterFinishedAt` `2021` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
}
{
let json = r#" { "beforeFinishedAt": "2021" } "#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task `beforeFinishedAt` `2021` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
} }
{ {
let json = r#" { "afterEnqueuedAt": "2021-12" } "#; let json = r#" { "afterEnqueuedAt": "2021-12" } "#;
let err = serde_json::from_str::<TaskDeletionQuery>(json).unwrap_err(); let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
snapshot!(format!("{err}"), @"could not parse a date with the RFC3339 or YYYY-MM-DD format at line 1 column 33"); .unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task `afterEnqueuedAt` `2021-12` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
} }
{ {
let json = r#" { "afterEnqueuedAt": "2021-12-03T23" } "#; let json = r#" { "beforeEnqueuedAt": "2021-12-03T23" } "#;
let err = serde_json::from_str::<TaskDeletionQuery>(json).unwrap_err(); let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
snapshot!(format!("{err}"), @"could not parse a date with the RFC3339 or YYYY-MM-DD format at line 1 column 39"); .unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task `beforeEnqueuedAt` `2021-12-03T23` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
} }
{ {
let json = r#" { "afterEnqueuedAt": "2021-12-03T23:45" } "#; let json = r#" { "afterStartedAt": "2021-12-03T23:45" } "#;
let err = serde_json::from_str::<TaskDeletionQuery>(json).unwrap_err(); let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
snapshot!(format!("{err}"), @"could not parse a date with the RFC3339 or YYYY-MM-DD format at line 1 column 42"); .unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task `afterStartedAt` `2021-12-03T23:45` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
let json = r#" { "beforeStartedAt": "2021-12-03T23:45" } "#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task `beforeStartedAt` `2021-12-03T23:45` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.");
}
}
#[test]
fn deserialize_task_filter_uids() {
{
let json = r#" { "uids": "78,1,12,73" } "#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.common.uids.unwrap()), @"[78, 1, 12, 73]");
}
{
let json = r#" { "uids": "1" } "#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.common.uids.unwrap()), @"[1]");
}
{
let json = r#" { "uids": "78,hello,world" } "#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task uid `hello` is invalid. It should only contain numeric characters.");
}
{
let json = r#" { "uids": "cat" } "#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task uid `cat` is invalid. It should only contain numeric characters.");
}
}
#[test]
fn deserialize_task_filter_status() {
{
let json = r#" { "statuses": "succeeded,failed,enqueued,processing,canceled" } "#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.common.statuses.unwrap()), @"[Succeeded, Failed, Enqueued, Processing, Canceled]");
}
{
let json = r#" { "statuses": "enqueued" } "#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.common.statuses.unwrap()), @"[Enqueued]");
}
{
let json = r#" { "statuses": "finished" } "#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task status `finished` is invalid. Available task statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`");
}
}
#[test]
fn deserialize_task_filter_types() {
{
let json = r#" { "types": "documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation" }"#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.common.types.unwrap()), @"[DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation]");
}
{
let json = r#" { "types": "settingsUpdate" } "#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.common.types.unwrap()), @"[SettingsUpdate]");
}
{
let json = r#" { "types": "createIndex" } "#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"Task type `createIndex` is invalid. Available task types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`");
}
}
#[test]
fn deserialize_task_filter_index_uids() {
{
let json = r#" { "indexUids": "toto,tata-78" }"#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.common.index_uids.unwrap()), @r###"["toto", "tata-78"]"###);
}
{
let json = r#" { "indexUids": "index_a" } "#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query.common.index_uids.unwrap()), @r###"["index_a"]"###);
}
{
let json = r#" { "indexUids": "1,hé" } "#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"hé is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).");
}
{
let json = r#" { "indexUids": "hé" } "#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap_err();
snapshot!(format!("{err}"), @"hé is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).");
}
}
#[test]
fn deserialize_task_filter_general() {
{
let json = r#" { "from": 12, "limit": 15, "indexUids": "toto,tata-78", "statuses": "succeeded,enqueued", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#;
let query =
serde_json::from_str::<TasksFilterQueryRaw>(json).unwrap().validate().unwrap();
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: 15, from: Some(12), common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), statuses: Some([Succeeded, Enqueued]), index_uids: Some(["toto", "tata-78"]) }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }"###);
}
{
// Stars should translate to `None` in the query
// Verify value of the default limit
let json = r#" { "indexUids": "*", "statuses": "succeeded,*", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#;
let query =
serde_json::from_str::<TasksFilterQueryRaw>(json).unwrap().validate().unwrap();
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: 20, from: None, common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), statuses: None, index_uids: None }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }");
}
{
// Stars should also translate to `None` in task deletion/cancelation queries
let json = r#" { "indexUids": "*", "statuses": "succeeded,*", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#;
let query = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json)
.unwrap()
.validate()
.unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), statuses: None, index_uids: None }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }");
}
{
// Stars in uids not allowed
let json = r#" { "uids": "*" }"#;
let err =
serde_json::from_str::<TasksFilterQueryRaw>(json).unwrap().validate().unwrap_err();
snapshot!(format!("{err}"), @"Task uid `*` is invalid. It should only contain numeric characters.");
}
{
// From not allowed in task deletion/cancelation queries
let json = r#" { "from": 12 }"#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json).unwrap_err();
snapshot!(format!("{err}"), @"unknown field `from` at line 1 column 15");
}
{
// Limit not allowed in task deletion/cancelation queries
let json = r#" { "limit": 12 }"#;
let err = serde_json::from_str::<TaskDeletionOrCancelationQueryRaw>(json).unwrap_err();
snapshot!(format!("{err}"), @"unknown field `limit` at line 1 column 16");
} }
} }
} }

View File

@ -147,7 +147,11 @@ pub enum Code {
MissingMasterKey, MissingMasterKey,
NoSpaceLeftOnDevice, NoSpaceLeftOnDevice,
DumpNotFound, DumpNotFound,
InvalidTaskUid, InvalidTaskDate,
InvalidTaskStatuses,
InvalidTaskTypes,
InvalidTaskCanceledBy,
InvalidTaskUids,
TaskNotFound, TaskNotFound,
TaskDeletionWithEmptyQuery, TaskDeletionWithEmptyQuery,
TaskCancelationWithEmptyQuery, TaskCancelationWithEmptyQuery,
@ -239,7 +243,21 @@ impl Code {
MissingMasterKey => { MissingMasterKey => {
ErrCode::authentication("missing_master_key", StatusCode::UNAUTHORIZED) ErrCode::authentication("missing_master_key", StatusCode::UNAUTHORIZED)
} }
InvalidTaskUid => ErrCode::invalid("invalid_task_uid", StatusCode::BAD_REQUEST), InvalidTaskDate => {
ErrCode::invalid("invalid_task_date_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskUids => {
ErrCode::invalid("invalid_task_uids_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskStatuses => {
ErrCode::invalid("invalid_task_statuses_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskTypes => {
ErrCode::invalid("invalid_task_types_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskCanceledBy => {
ErrCode::invalid("invalid_task_canceled_by_filter", StatusCode::BAD_REQUEST)
}
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND), TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
TaskDeletionWithEmptyQuery => { TaskDeletionWithEmptyQuery => {
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST) ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)

View File

@ -398,7 +398,23 @@ impl Kind {
} }
} }
} }
impl Display for Kind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Kind::DocumentAdditionOrUpdate => write!(f, "documentAdditionOrUpdate"),
Kind::DocumentDeletion => write!(f, "documentDeletion"),
Kind::SettingsUpdate => write!(f, "settingsUpdate"),
Kind::IndexCreation => write!(f, "indexCreation"),
Kind::IndexDeletion => write!(f, "indexDeletion"),
Kind::IndexUpdate => write!(f, "indexUpdate"),
Kind::IndexSwap => write!(f, "indexSwap"),
Kind::TaskCancelation => write!(f, "taskCancelation"),
Kind::TaskDeletion => write!(f, "taskDeletion"),
Kind::DumpCreation => write!(f, "dumpCreation"),
Kind::SnapshotCreation => write!(f, "snapshotCreation"),
}
}
}
impl FromStr for Kind { impl FromStr for Kind {
type Err = ResponseError; type Err = ResponseError;