2399: Update the tasks endpoints r=MarinPostma a=Kerollmops

This PR wraps all the changes related to the `tasks` endpoints, it is related to https://github.com/meilisearch/meilisearch/issues/2377 but doesn't close it. I will create a new PR to work on [the seek-based pagination](https://github.com/meilisearch/specifications/pull/115).

I wanted to do something cool with Github: being able to merge multiple PR in this one, to help review changes one by one, unfortunately, Github doesn't allow creating empty PRs. I also struggled with git itself when it comes to merging things in the right order, so I decided that I would add all of the changes in this single PR. I will list the changes and references to the specs here.

 - [x] Tasks statuses and types must be case insensitive
 - [x] Tasks statuses, types and indexUid must accept the `*` selector
 - [ ] Rename the `TaskDetails` struct fields

## Changes

- [ ] Add seek-based pagination following [the spec](https://github.com/meilisearch/specifications/pull/115) 
- [x] Add filtering on the `/tasks` endpoint following [this spec](https://github.com/meilisearch/specifications/pull/116)
  - [x] Add filtering capabilities on `type`, `status` and `indexUid` for `GET` `task` lists endpoints.
  - [x] It is possible to specify several values for a filter using the `,` character. e.g. `?status=enqueued,processing`
  - [x] Between two different filters, an AND operation is applied. e.g. `?status=enqueued&type=indexCreation` is equivalent to `status=enqueued AND type = indexCreation`
- [x] Remove `GET /indexes/:indexUid/tasks`. It can be replaced by `GET /tasks?indexUid=:indexUid`
- [x] Remove `GET /indexes/:indexUid/tasks/:taskUid`.
- [x] Rename `uid` to `taskUid` in the `202 - Accepted` task response return by every asynchronous tasks (ex: index creation, document addition...)
- [x] Rename some task properties
  - [x] `documentPartial`-> `documentAdditionOrUpdate`
  - [x] `documentAddition`-> `documentAdditionOrUpdate`
  - [x] `clearAll` -> `documentDeletion` 

Co-authored-by: Kerollmops <clement@meilisearch.com>
This commit is contained in:
bors[bot] 2022-05-31 09:40:40 +00:00 committed by GitHub
commit ab39df9693
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 402 additions and 198 deletions

10
Cargo.lock generated
View File

@ -2046,6 +2046,7 @@ dependencies = [
"rustls-pemfile", "rustls-pemfile",
"segment", "segment",
"serde", "serde",
"serde-cs",
"serde_json", "serde_json",
"serde_url_params", "serde_url_params",
"sha-1", "sha-1",
@ -3085,6 +3086,15 @@ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]]
name = "serde-cs"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18d5b0435c9139761fbe5abeb1283234bcfbde88fadc2ae432579648fbce72ad"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.136" version = "1.0.136"

View File

@ -62,6 +62,7 @@ rustls = "0.20.4"
rustls-pemfile = "0.3.0" rustls-pemfile = "0.3.0"
segment = { version = "0.2.0", optional = true } segment = { version = "0.2.0", optional = true }
serde = { version = "1.0.136", features = ["derive"] } serde = { version = "1.0.136", features = ["derive"] }
serde-cs = "0.2.2"
serde_json = { version = "1.0.79", features = ["preserve_order"] } serde_json = { version = "1.0.79", features = ["preserve_order"] }
sha2 = "0.10.2" sha2 = "0.10.2"
siphasher = "0.3.10" siphasher = "0.3.10"

View File

@ -2,7 +2,7 @@
#[macro_use] #[macro_use]
pub mod error; pub mod error;
pub mod analytics; pub mod analytics;
mod task; pub mod task;
#[macro_use] #[macro_use]
pub mod extractors; pub mod extractors;
pub mod helpers; pub mod helpers;

View File

@ -15,7 +15,6 @@ use crate::task::SummarizedTaskView;
pub mod documents; pub mod documents;
pub mod search; pub mod search;
pub mod settings; pub mod settings;
pub mod tasks;
pub fn configure(cfg: &mut web::ServiceConfig) { pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service( cfg.service(
@ -34,7 +33,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats)))) .service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
.service(web::scope("/documents").configure(documents::configure)) .service(web::scope("/documents").configure(documents::configure))
.service(web::scope("/search").configure(search::configure)) .service(web::scope("/search").configure(search::configure))
.service(web::scope("/tasks").configure(tasks::configure))
.service(web::scope("/settings").configure(settings::configure)), .service(web::scope("/settings").configure(settings::configure)),
); );
} }

View File

@ -1,80 +0,0 @@
use actix_web::{web, HttpRequest, HttpResponse};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize};
use serde_json::json;
use time::OffsetDateTime;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::task::{TaskListView, TaskView};
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_all_tasks_status))))
.service(web::resource("{task_id}").route(web::get().to(SeqHandler(get_task_status))));
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdateIndexResponse {
name: String,
uid: String,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
created_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
updated_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
primary_key: OffsetDateTime,
}
#[derive(Deserialize)]
pub struct UpdateParam {
index_uid: String,
task_id: u64,
}
pub async fn get_task_status(
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
index_uid: web::Path<UpdateParam>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Index Tasks Seen".to_string(),
json!({ "per_task_uid": true }),
Some(&req),
);
let UpdateParam { index_uid, task_id } = index_uid.into_inner();
let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Ok().json(task))
}
pub async fn get_all_tasks_status(
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
index_uid: web::Path<String>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Index Tasks Seen".to_string(),
json!({ "per_task_uid": false }),
Some(&req),
);
let tasks: TaskListView = meilisearch
.list_index_task(index_uid.into_inner(), None, None)
.await?
.into_iter()
.map(TaskView::from)
.collect::<Vec<_>>()
.into();
debug!("returns: {:?}", tasks);
Ok(HttpResponse::Ok().json(tasks))
}

View File

@ -1,22 +1,93 @@
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
use meilisearch_lib::tasks::task::TaskId; use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId};
use meilisearch_lib::tasks::TaskFilter; use meilisearch_lib::tasks::TaskFilter;
use meilisearch_lib::MeiliSearch; use meilisearch_lib::{IndexUid, MeiliSearch};
use serde::Deserialize;
use serde_cs::vec::CS;
use serde_json::json; use serde_json::json;
use std::str::FromStr;
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler; use crate::extractors::sequential_extractor::SeqHandler;
use crate::task::{TaskListView, TaskView}; use crate::task::{TaskListView, TaskStatus, TaskType, TaskView};
pub fn configure(cfg: &mut web::ServiceConfig) { pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks)))) cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task)))); .service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
} }
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct TaskFilterQuery {
#[serde(rename = "type")]
type_: Option<CS<StarOr<TaskType>>>,
status: Option<CS<StarOr<TaskStatus>>>,
index_uid: Option<CS<StarOr<IndexUid>>>,
}
/// A type that tries to match either a star (*) or
/// any other thing that implements `FromStr`.
#[derive(Debug)]
enum StarOr<T> {
Star,
Other(T),
}
impl<T: FromStr> FromStr for StarOr<T> {
type Err = T::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.trim() == "*" {
Ok(StarOr::Star)
} else {
T::from_str(s).map(StarOr::Other)
}
}
}
/// Extracts the raw values from the `StarOr` types and
/// return None if a `StarOr::Star` is encountered.
fn fold_star_or<T>(content: Vec<StarOr<T>>) -> Option<Vec<T>> {
content
.into_iter()
.fold(Some(Vec::new()), |acc, val| match (acc, val) {
(None, _) | (_, StarOr::Star) => None,
(Some(mut acc), StarOr::Other(uid)) => {
acc.push(uid);
Some(acc)
}
})
}
#[rustfmt::skip]
fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool {
matches!((type_, content),
(TaskType::IndexCreation, TaskContent::IndexCreation { .. })
| (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. })
| (TaskType::IndexDeletion, TaskContent::IndexDeletion)
| (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. })
| (TaskType::DocumentDeletion, TaskContent::DocumentDeletion(_))
| (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. })
)
}
#[rustfmt::skip]
fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool {
events.last().map_or(false, |event| {
matches!((status, event),
(TaskStatus::Enqueued, TaskEvent::Created(_))
| (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. })
| (TaskStatus::Succeeded, TaskEvent::Succeded { .. })
| (TaskStatus::Failed, TaskEvent::Failed { .. }),
)
})
}
async fn get_tasks( async fn get_tasks(
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>, meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
params: web::Query<TaskFilterQuery>,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
@ -26,8 +97,34 @@ async fn get_tasks(
Some(&req), Some(&req),
); );
let TaskFilterQuery {
type_,
status,
index_uid,
} = params.into_inner();
let search_rules = &meilisearch.filters().search_rules; let search_rules = &meilisearch.filters().search_rules;
let filters = if search_rules.is_index_authorized("*") {
// We first tranform a potential indexUid=* into a "not specified indexUid filter"
// for every one of the filters: type, status, and indexUid.
let type_ = type_.map(CS::into_inner).and_then(fold_star_or);
let status = status.map(CS::into_inner).and_then(fold_star_or);
let index_uid = index_uid.map(CS::into_inner).and_then(fold_star_or);
// Then we filter on potential indexes and make sure that the search filter
// restrictions are also applied.
let indexes_filters = match index_uid {
Some(indexes) => {
let mut filters = TaskFilter::default();
for name in indexes {
if search_rules.is_index_authorized(&name) {
filters.filter_index(name.to_string());
}
}
Some(filters)
}
None => {
if search_rules.is_index_authorized("*") {
None None
} else { } else {
let mut filters = TaskFilter::default(); let mut filters = TaskFilter::default();
@ -35,6 +132,33 @@ async fn get_tasks(
filters.filter_index(index); filters.filter_index(index);
} }
Some(filters) Some(filters)
}
}
};
// Then we complete the task filter with other potential status and types filters.
let filters = if type_.is_some() || status.is_some() {
let mut filters = indexes_filters.unwrap_or_default();
filters.filter_fn(move |task| {
let matches_type = match &type_ {
Some(types) => types
.iter()
.any(|t| task_type_matches_content(t, &task.content)),
None => true,
};
let matches_status = match &status {
Some(statuses) => statuses
.iter()
.any(|t| task_status_matches_events(t, &task.events)),
None => true,
};
matches_type && matches_status
});
Some(filters)
} else {
indexes_filters
}; };
let tasks: TaskListView = meilisearch let tasks: TaskListView = meilisearch

View File

@ -1,64 +1,104 @@
use std::fmt::Write; use std::fmt::Write;
use std::str::FromStr;
use std::write; use std::write;
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked}; use meilisearch_lib::index::{Settings, Unchecked};
use meilisearch_lib::milli::update::IndexDocumentsMethod;
use meilisearch_lib::tasks::batch::BatchId; use meilisearch_lib::tasks::batch::BatchId;
use meilisearch_lib::tasks::task::{ use meilisearch_lib::tasks::task::{
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult, DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
}; };
use serde::{Serialize, Serializer}; use serde::{Deserialize, Serialize, Serializer};
use time::{Duration, OffsetDateTime}; use time::{Duration, OffsetDateTime};
use crate::AUTOBATCHING_ENABLED; use crate::AUTOBATCHING_ENABLED;
#[derive(Debug, Serialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
enum TaskType { pub enum TaskType {
IndexCreation, IndexCreation,
IndexUpdate, IndexUpdate,
IndexDeletion, IndexDeletion,
DocumentAddition, DocumentAdditionOrUpdate,
DocumentPartial,
DocumentDeletion, DocumentDeletion,
SettingsUpdate, SettingsUpdate,
ClearAll,
DumpCreation, DumpCreation,
} }
impl From<TaskContent> for TaskType { impl From<TaskContent> for TaskType {
fn from(other: TaskContent) -> Self { fn from(other: TaskContent) -> Self {
match other { match other {
TaskContent::DocumentAddition {
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
..
} => TaskType::DocumentAddition,
TaskContent::DocumentAddition {
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
..
} => TaskType::DocumentPartial,
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion,
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
TaskContent::IndexDeletion => TaskType::IndexDeletion,
TaskContent::IndexCreation { .. } => TaskType::IndexCreation, TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate, TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
TaskContent::IndexDeletion => TaskType::IndexDeletion,
TaskContent::DocumentAddition { .. } => TaskType::DocumentAdditionOrUpdate,
TaskContent::DocumentDeletion(_) => TaskType::DocumentDeletion,
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
TaskContent::Dump { .. } => TaskType::DumpCreation, TaskContent::Dump { .. } => TaskType::DumpCreation,
_ => unreachable!("unexpected task type"),
} }
} }
} }
#[derive(Debug, Serialize)] impl FromStr for TaskType {
type Err = String;
fn from_str(status: &str) -> Result<Self, String> {
if status.eq_ignore_ascii_case("indexCreation") {
Ok(TaskType::IndexCreation)
} else if status.eq_ignore_ascii_case("indexUpdate") {
Ok(TaskType::IndexUpdate)
} else if status.eq_ignore_ascii_case("indexDeletion") {
Ok(TaskType::IndexDeletion)
} else if status.eq_ignore_ascii_case("documentAdditionOrUpdate") {
Ok(TaskType::DocumentAdditionOrUpdate)
} else if status.eq_ignore_ascii_case("documentDeletion") {
Ok(TaskType::DocumentDeletion)
} else if status.eq_ignore_ascii_case("settingsUpdate") {
Ok(TaskType::SettingsUpdate)
} else if status.eq_ignore_ascii_case("dumpCreation") {
Ok(TaskType::DumpCreation)
} else {
Err(format!(
"invalid task type `{}`, expecting one of: \
indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \
documentDeletion, settingsUpdate, dumpCreation",
status
))
}
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
enum TaskStatus { pub enum TaskStatus {
Enqueued, Enqueued,
Processing, Processing,
Succeeded, Succeeded,
Failed, Failed,
} }
impl FromStr for TaskStatus {
type Err = String;
fn from_str(status: &str) -> Result<Self, String> {
if status.eq_ignore_ascii_case("enqueued") {
Ok(TaskStatus::Enqueued)
} else if status.eq_ignore_ascii_case("processing") {
Ok(TaskStatus::Processing)
} else if status.eq_ignore_ascii_case("succeeded") {
Ok(TaskStatus::Succeeded)
} else if status.eq_ignore_ascii_case("failed") {
Ok(TaskStatus::Failed)
} else {
Err(format!(
"invalid task status `{}`, expecting one of: \
enqueued, processing, succeeded, or failed",
status,
))
}
}
}
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(untagged)] #[serde(untagged)]
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
@ -172,22 +212,14 @@ impl From<Task> for TaskView {
let (task_type, mut details) = match content { let (task_type, mut details) = match content {
TaskContent::DocumentAddition { TaskContent::DocumentAddition {
merge_strategy, documents_count, ..
documents_count,
..
} => { } => {
let details = TaskDetails::DocumentAddition { let details = TaskDetails::DocumentAddition {
received_documents: documents_count, received_documents: documents_count,
indexed_documents: None, indexed_documents: None,
}; };
let task_type = match merge_strategy { (TaskType::DocumentAdditionOrUpdate, Some(details))
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial,
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition,
_ => unreachable!("Unexpected document merge strategy."),
};
(task_type, Some(details))
} }
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => ( TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
TaskType::DocumentDeletion, TaskType::DocumentDeletion,
@ -197,7 +229,7 @@ impl From<Task> for TaskView {
}), }),
), ),
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => ( TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
TaskType::ClearAll, TaskType::DocumentDeletion,
Some(TaskDetails::ClearAll { Some(TaskDetails::ClearAll {
deleted_documents: None, deleted_documents: None,
}), }),
@ -349,7 +381,7 @@ impl From<Vec<TaskView>> for TaskListView {
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SummarizedTaskView { pub struct SummarizedTaskView {
uid: TaskId, task_uid: TaskId,
index_uid: Option<String>, index_uid: Option<String>,
status: TaskStatus, status: TaskStatus,
#[serde(rename = "type")] #[serde(rename = "type")]
@ -372,7 +404,7 @@ impl From<Task> for SummarizedTaskView {
}; };
Self { Self {
uid: other.id, task_uid: other.id,
index_uid: other.index_uid.map(|u| u.into_inner()), index_uid: other.index_uid.map(|u| u.into_inner()),
status: TaskStatus::Enqueued, status: TaskStatus::Enqueued,
task_type: other.content.into(), task_type: other.content.into(),

View File

@ -16,8 +16,8 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "*"}, ("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "*"},
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"}, ("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"},
("GET", "/tasks") => hashset!{"tasks.get", "*"}, ("GET", "/tasks") => hashset!{"tasks.get", "*"},
("GET", "/indexes/products/tasks") => hashset!{"tasks.get", "*"}, ("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "*"},
("GET", "/indexes/products/tasks/0") => hashset!{"tasks.get", "*"}, ("GET", "/tasks/0") => hashset!{"tasks.get", "*"},
("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"}, ("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"},
("GET", "/indexes/products/") => hashset!{"indexes.get", "*"}, ("GET", "/indexes/products/") => hashset!{"indexes.get", "*"},
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"}, ("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"},
@ -523,7 +523,7 @@ async fn error_creating_index_without_action() {
let (response, code) = index.add_documents(documents, None).await; let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202, "{:?}", response); assert_eq!(code, 202, "{:?}", response);
let task_id = response["uid"].as_u64().unwrap(); let task_id = response["taskUid"].as_u64().unwrap();
let response = index.wait_task(task_id).await; let response = index.wait_task(task_id).await;
assert_eq!(response["status"], "failed"); assert_eq!(response["status"], "failed");
@ -534,7 +534,7 @@ async fn error_creating_index_without_action() {
let (response, code) = index.update_settings(settings).await; let (response, code) = index.update_settings(settings).await;
assert_eq!(code, 202); assert_eq!(code, 202);
let task_id = response["uid"].as_u64().unwrap(); let task_id = response["taskUid"].as_u64().unwrap();
let response = index.wait_task(task_id).await; let response = index.wait_task(task_id).await;
@ -544,7 +544,7 @@ async fn error_creating_index_without_action() {
// try to create a index via add specialized settings route // try to create a index via add specialized settings route
let (response, code) = index.update_distinct_attribute(json!("test")).await; let (response, code) = index.update_distinct_attribute(json!("test")).await;
assert_eq!(code, 202); assert_eq!(code, 202);
let task_id = response["uid"].as_u64().unwrap(); let task_id = response["taskUid"].as_u64().unwrap();
let response = index.wait_task(task_id).await; let response = index.wait_task(task_id).await;
@ -583,7 +583,7 @@ async fn lazy_create_index() {
let (response, code) = index.add_documents(documents, None).await; let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202, "{:?}", response); assert_eq!(code, 202, "{:?}", response);
let task_id = response["uid"].as_u64().unwrap(); let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await; index.wait_task(task_id).await;
@ -597,7 +597,7 @@ async fn lazy_create_index() {
let (response, code) = index.update_settings(settings).await; let (response, code) = index.update_settings(settings).await;
assert_eq!(code, 202); assert_eq!(code, 202);
let task_id = response["uid"].as_u64().unwrap(); let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await; index.wait_task(task_id).await;
@ -609,7 +609,7 @@ async fn lazy_create_index() {
let index = server.index("test2"); let index = server.index("test2");
let (response, code) = index.update_distinct_attribute(json!("test")).await; let (response, code) = index.update_distinct_attribute(json!("test")).await;
assert_eq!(code, 202); assert_eq!(code, 202);
let task_id = response["uid"].as_u64().unwrap(); let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await; index.wait_task(task_id).await;

View File

@ -46,7 +46,7 @@ impl Index<'_> {
.post_str(url, include_str!("../assets/test_set.json")) .post_str(url, include_str!("../assets/test_set.json"))
.await; .await;
assert_eq!(code, 202); assert_eq!(code, 202);
let update_id = response["uid"].as_i64().unwrap(); let update_id = response["taskUid"].as_i64().unwrap();
self.wait_task(update_id as u64).await; self.wait_task(update_id as u64).await;
update_id as u64 update_id as u64
} }
@ -122,12 +122,23 @@ impl Index<'_> {
} }
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) { pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
let url = format!("/indexes/{}/tasks/{}", self.uid, update_id); let url = format!("/tasks/{}", update_id);
self.service.get(url).await self.service.get(url).await
} }
pub async fn list_tasks(&self) -> (Value, StatusCode) { pub async fn list_tasks(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/tasks", self.uid); let url = format!("/tasks?indexUid={}", self.uid);
self.service.get(url).await
}
pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) {
let mut url = format!("/tasks?indexUid={}", self.uid);
if !type_.is_empty() {
url += &format!("&type={}", type_.join(","));
}
if !status.is_empty() {
url += &format!("&status={}", status.join(","));
}
self.service.get(url).await self.service.get(url).await
} }

View File

@ -35,7 +35,7 @@ async fn add_documents_test_json_content_types() {
let body = test::read_body(res).await; let body = test::read_body(res).await;
let response: Value = serde_json::from_slice(&body).unwrap_or_default(); let response: Value = serde_json::from_slice(&body).unwrap_or_default();
assert_eq!(status_code, 202); assert_eq!(status_code, 202);
assert_eq!(response["uid"], 0); assert_eq!(response["taskUid"], 0);
// put // put
let req = test::TestRequest::put() let req = test::TestRequest::put()
@ -48,7 +48,7 @@ async fn add_documents_test_json_content_types() {
let body = test::read_body(res).await; let body = test::read_body(res).await;
let response: Value = serde_json::from_slice(&body).unwrap_or_default(); let response: Value = serde_json::from_slice(&body).unwrap_or_default();
assert_eq!(status_code, 202); assert_eq!(status_code, 202);
assert_eq!(response["uid"], 1); assert_eq!(response["taskUid"], 1);
} }
/// any other content-type is must be refused /// any other content-type is must be refused
@ -599,7 +599,7 @@ async fn add_documents_no_index_creation() {
let (response, code) = index.add_documents(documents, None).await; let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202); assert_eq!(code, 202);
assert_eq!(response["uid"], 0); assert_eq!(response["taskUid"], 0);
/* /*
* currently we dont check these field to stay ISO with meilisearch * currently we dont check these field to stay ISO with meilisearch
* assert_eq!(response["status"], "pending"); * assert_eq!(response["status"], "pending");
@ -615,7 +615,7 @@ async fn add_documents_no_index_creation() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded"); assert_eq!(response["status"], "succeeded");
assert_eq!(response["uid"], 0); assert_eq!(response["uid"], 0);
assert_eq!(response["type"], "documentAddition"); assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["receivedDocuments"], 1);
assert_eq!(response["details"]["indexedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1);
@ -685,7 +685,7 @@ async fn document_addition_with_primary_key() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded"); assert_eq!(response["status"], "succeeded");
assert_eq!(response["uid"], 0); assert_eq!(response["uid"], 0);
assert_eq!(response["type"], "documentAddition"); assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["receivedDocuments"], 1);
assert_eq!(response["details"]["indexedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1);
@ -714,7 +714,7 @@ async fn document_update_with_primary_key() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded"); assert_eq!(response["status"], "succeeded");
assert_eq!(response["uid"], 0); assert_eq!(response["uid"], 0);
assert_eq!(response["type"], "documentPartial"); assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1);
assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["receivedDocuments"], 1);
@ -818,7 +818,7 @@ async fn add_larger_dataset() {
let (response, code) = index.get_task(update_id).await; let (response, code) = index.get_task(update_id).await;
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded"); assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "documentAddition"); assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 77); assert_eq!(response["details"]["indexedDocuments"], 77);
assert_eq!(response["details"]["receivedDocuments"], 77); assert_eq!(response["details"]["receivedDocuments"], 77);
let (response, code) = index let (response, code) = index
@ -840,7 +840,7 @@ async fn update_larger_dataset() {
index.wait_task(0).await; index.wait_task(0).await;
let (response, code) = index.get_task(0).await; let (response, code) = index.get_task(0).await;
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["type"], "documentPartial"); assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 77); assert_eq!(response["details"]["indexedDocuments"], 77);
let (response, code) = index let (response, code) = index
.get_all_documents(GetAllDocumentsOptions { .get_all_documents(GetAllDocumentsOptions {

View File

@ -69,7 +69,7 @@ async fn import_dump_v2_movie_raw() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks, tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]})
); );
// finally we're just going to check that we can still get a few documents by id // finally we're just going to check that we can still get a few documents by id
@ -134,7 +134,7 @@ async fn import_dump_v2_movie_with_settings() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks, tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]})
); );
// finally we're just going to check that we can still get a few documents by id // finally we're just going to check that we can still get a few documents by id
@ -199,7 +199,7 @@ async fn import_dump_v2_rubygems_with_settings() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks["results"][0], tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
); );
assert_eq!( assert_eq!(
tasks["results"][92], tasks["results"][92],
@ -268,7 +268,7 @@ async fn import_dump_v3_movie_raw() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks, tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]})
); );
// finally we're just going to check that we can still get a few documents by id // finally we're just going to check that we can still get a few documents by id
@ -333,7 +333,7 @@ async fn import_dump_v3_movie_with_settings() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks, tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]})
); );
// finally we're just going to check that we can still get a few documents by id // finally we're just going to check that we can still get a few documents by id
@ -398,7 +398,7 @@ async fn import_dump_v3_rubygems_with_settings() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks["results"][0], tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
); );
assert_eq!( assert_eq!(
tasks["results"][92], tasks["results"][92],
@ -467,7 +467,7 @@ async fn import_dump_v4_movie_raw() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks, tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]}) json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z"}]})
); );
// finally we're just going to check that we can still get a few documents by id // finally we're just going to check that we can still get a few documents by id
@ -532,7 +532,7 @@ async fn import_dump_v4_movie_with_settings() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks, tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAddition", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]}) json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }]})
); );
// finally we're just going to check that we can still get a few documents by id // finally we're just going to check that we can still get a few documents by id
@ -597,7 +597,7 @@ async fn import_dump_v4_rubygems_with_settings() {
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!( assert_eq!(
tasks["results"][0], tasks["results"][0],
json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAddition", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
); );
assert_eq!( assert_eq!(
tasks["results"][92], tasks["results"][92],

View File

@ -52,10 +52,10 @@ async fn loop_delete_add_documents() {
let mut tasks = Vec::new(); let mut tasks = Vec::new();
for _ in 0..50 { for _ in 0..50 {
let (response, code) = index.add_documents(documents.clone(), None).await; let (response, code) = index.add_documents(documents.clone(), None).await;
tasks.push(response["uid"].as_u64().unwrap()); tasks.push(response["taskUid"].as_u64().unwrap());
assert_eq!(code, 202, "{}", response); assert_eq!(code, 202, "{}", response);
let (response, code) = index.delete().await; let (response, code) = index.delete().await;
tasks.push(response["uid"].as_u64().unwrap()); tasks.push(response["taskUid"].as_u64().unwrap());
assert_eq!(code, 202, "{}", response); assert_eq!(code, 202, "{}", response);
} }

View File

@ -35,7 +35,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await; let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202); assert_eq!(code, 202);
assert_eq!(response["uid"], 1); assert_eq!(response["taskUid"], 1);
index.wait_task(1).await; index.wait_task(1).await;

View File

@ -122,7 +122,7 @@ async fn reset_all_settings() {
let (response, code) = index.add_documents(documents, None).await; let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202); assert_eq!(code, 202);
assert_eq!(response["uid"], 0); assert_eq!(response["taskUid"], 0);
index.wait_task(0).await; index.wait_task(0).await;
index index

View File

@ -54,7 +54,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await; let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202, "{}", response); assert_eq!(code, 202, "{}", response);
assert_eq!(response["uid"], 1); assert_eq!(response["taskUid"], 1);
index.wait_task(1).await; index.wait_task(1).await;

View File

@ -3,22 +3,6 @@ use serde_json::json;
use time::format_description::well_known::Rfc3339; use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime; use time::OffsetDateTime;
#[actix_rt::test]
async fn error_get_task_unexisting_index() {
let server = Server::new().await;
let (response, code) = server.service.get("/indexes/test/tasks").await;
let expected_response = json!({
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response, expected_response);
assert_eq!(code, 404);
}
#[actix_rt::test] #[actix_rt::test]
async fn error_get_unexisting_task_status() { async fn error_get_unexisting_task_status() {
let server = Server::new().await; let server = Server::new().await;
@ -58,22 +42,6 @@ async fn get_task_status() {
// TODO check resonse format, as per #48 // TODO check resonse format, as per #48
} }
#[actix_rt::test]
async fn error_list_tasks_unexisting_index() {
let server = Server::new().await;
let (response, code) = server.index("test").list_tasks().await;
let expected_response = json!({
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response, expected_response);
assert_eq!(code, 404);
}
#[actix_rt::test] #[actix_rt::test]
async fn list_tasks() { async fn list_tasks() {
let server = Server::new().await; let server = Server::new().await;
@ -91,10 +59,140 @@ async fn list_tasks() {
assert_eq!(response["results"].as_array().unwrap().len(), 2); assert_eq!(response["results"].as_array().unwrap().len(), 2);
} }
#[actix_rt::test]
async fn list_tasks_with_star_filters() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.await;
let (response, code) = index.service.get("/tasks?indexUid=test").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?indexUid=*").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?indexUid=*,pasteque").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?type=*").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test,*")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_status_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
// We can't be sure that the update isn't already processed so we can't test this
// let (response, code) = index.filtered_tasks(&[], &["processing"]).await;
// assert_eq!(code, 200, "{}", response);
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
index.wait_task(1).await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_type_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index
.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[])
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_status_and_type_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 0);
let (response, code) = index
.filtered_tasks(
&["indexCreation", "documentAdditionOrUpdate"],
&["succeeded", "processing"],
)
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
macro_rules! assert_valid_summarized_task { macro_rules! assert_valid_summarized_task {
($response:expr, $task_type:literal, $index:literal) => {{ ($response:expr, $task_type:literal, $index:literal) => {{
assert_eq!($response.as_object().unwrap().len(), 5); assert_eq!($response.as_object().unwrap().len(), 5);
assert!($response["uid"].as_u64().is_some()); assert!($response["taskUid"].as_u64().is_some());
assert_eq!($response["indexUid"], $index); assert_eq!($response["indexUid"], $index);
assert_eq!($response["status"], "enqueued"); assert_eq!($response["status"], "enqueued");
assert_eq!($response["type"], $task_type); assert_eq!($response["type"], $task_type);
@ -119,16 +217,16 @@ async fn test_summarized_task_view() {
assert_valid_summarized_task!(response, "settingsUpdate", "test"); assert_valid_summarized_task!(response, "settingsUpdate", "test");
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await; let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
assert_valid_summarized_task!(response, "documentPartial", "test"); assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await; let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
assert_valid_summarized_task!(response, "documentAddition", "test"); assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
let (response, _) = index.delete_document(1).await; let (response, _) = index.delete_document(1).await;
assert_valid_summarized_task!(response, "documentDeletion", "test"); assert_valid_summarized_task!(response, "documentDeletion", "test");
let (response, _) = index.clear_all_documents().await; let (response, _) = index.clear_all_documents().await;
assert_valid_summarized_task!(response, "clearAll", "test"); assert_valid_summarized_task!(response, "documentDeletion", "test");
let (response, _) = index.delete().await; let (response, _) = index.delete().await;
assert_valid_summarized_task!(response, "indexDeletion", "test"); assert_valid_summarized_task!(response, "indexDeletion", "test");

View File

@ -35,7 +35,8 @@ use error::Result;
use self::error::IndexControllerError; use self::error::IndexControllerError;
use crate::index_resolver::index_store::{IndexStore, MapIndexStore}; use crate::index_resolver::index_store::{IndexStore, MapIndexStore};
use crate::index_resolver::meta_store::{HeedMetaStore, IndexMetaStore}; use crate::index_resolver::meta_store::{HeedMetaStore, IndexMetaStore};
use crate::index_resolver::{create_index_resolver, IndexResolver, IndexUid}; pub use crate::index_resolver::IndexUid;
use crate::index_resolver::{create_index_resolver, IndexResolver};
use crate::update_file_store::UpdateFileStore; use crate::update_file_store::UpdateFileStore;
pub mod error; pub mod error;

View File

@ -4,6 +4,7 @@ pub mod meta_store;
use std::convert::{TryFrom, TryInto}; use std::convert::{TryFrom, TryInto};
use std::path::Path; use std::path::Path;
use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use error::{IndexResolverError, Result}; use error::{IndexResolverError, Result};
@ -88,6 +89,14 @@ impl TryInto<IndexUid> for String {
} }
} }
impl FromStr for IndexUid {
type Err = IndexResolverError;
fn from_str(s: &str) -> Result<IndexUid> {
IndexUid::new(s.to_string())
}
}
pub struct IndexResolver<U, I> { pub struct IndexResolver<U, I> {
index_uuid_store: U, index_uuid_store: U,
index_store: I, index_store: I,

View File

@ -13,7 +13,7 @@ mod update_file_store;
use std::path::Path; use std::path::Path;
pub use index_controller::MeiliSearch; pub use index_controller::{IndexUid, MeiliSearch};
pub use milli; pub use milli;
pub use milli::heed; pub use milli::heed;