From f1d38581e512e13edb67299e81dbf5ed547d199b Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 19 Nov 2024 15:05:34 +0100 Subject: [PATCH] add the front end tests on the batches routes --- crates/meilisearch/tests/batches/errors.rs | 211 ++++++ crates/meilisearch/tests/batches/mod.rs | 778 ++++++++++++++++++++ crates/meilisearch/tests/batches/webhook.rs | 129 ++++ crates/meilisearch/tests/common/index.rs | 29 + crates/meilisearch/tests/common/server.rs | 9 + crates/meilisearch/tests/integration.rs | 1 + 6 files changed, 1157 insertions(+) create mode 100644 crates/meilisearch/tests/batches/errors.rs create mode 100644 crates/meilisearch/tests/batches/mod.rs create mode 100644 crates/meilisearch/tests/batches/webhook.rs diff --git a/crates/meilisearch/tests/batches/errors.rs b/crates/meilisearch/tests/batches/errors.rs new file mode 100644 index 000000000..11f9e9b3c --- /dev/null +++ b/crates/meilisearch/tests/batches/errors.rs @@ -0,0 +1,211 @@ +use meili_snap::*; + +use crate::common::Server; + +#[actix_rt::test] +async fn batch_bad_uids() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("uids=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer", + "code": "invalid_task_uids", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_canceled_by() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("canceledBy=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer", + "code": "invalid_task_canceled_by", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_canceled_by" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_types() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("types=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", + "code": "invalid_task_types", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_types" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_statuses() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("statuses=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", + "code": "invalid_task_statuses", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_statuses" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_index_uids() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("indexUids=the%20good%20doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" + } + "###); +} + +#[actix_rt::test] +async fn batch_bad_limit() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("limit=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", + "code": "invalid_task_limit", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_limit" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_from() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("from=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `from`: could not parse `doggo` as a positive integer", + "code": "invalid_task_from", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_from" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_after_enqueued_at() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("afterEnqueuedAt=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_after_enqueued_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_before_enqueued_at() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("beforeEnqueuedAt=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_before_enqueued_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_after_started_at() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("afterStartedAt=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_after_started_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_before_started_at() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("beforeStartedAt=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_before_started_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_after_finished_at() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("afterFinishedAt=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_after_finished_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at" + } + "#); +} + +#[actix_rt::test] +async fn batch_bad_before_finished_at() { + let server = Server::new_shared(); + + let (response, code) = server.batches_filter("beforeFinishedAt=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r#" + { + "message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_before_finished_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at" + } + "#); +} diff --git a/crates/meilisearch/tests/batches/mod.rs b/crates/meilisearch/tests/batches/mod.rs new file mode 100644 index 000000000..48fbc34e3 --- /dev/null +++ b/crates/meilisearch/tests/batches/mod.rs @@ -0,0 +1,778 @@ +mod errors; + +use meili_snap::insta::assert_json_snapshot; +use meili_snap::snapshot; +use time::format_description::well_known::Rfc3339; +use time::OffsetDateTime; + +use crate::common::Server; +use crate::json; + +#[actix_rt::test] +async fn error_get_unexisting_batch_status() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + let (response, code) = index.get_batch(1).await; + + let expected_response = json!({ + "message": "batch `1` not found.", + "code": "batch_not_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#batch_not_found" + }); + + assert_eq!(response, expected_response); + assert_eq!(code, 404); +} + +#[actix_rt::test] +async fn get_batch_status() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index + .add_documents( + json!([{ + "id": 1, + "content": "foobar", + }]), + None, + ) + .await; + index.wait_task(0).await; + let (_response, code) = index.get_batch(1).await; + assert_eq!(code, 200); + // TODO check response format, as per #48 +} + +#[actix_rt::test] +async fn list_batches() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) + .await; + let (response, code) = index.list_batches().await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + +#[actix_rt::test] +async fn list_batches_with_star_filters() { + let server = Server::new().await; + let index = server.index("test"); + let (batch, _code) = index.create(None).await; + index.wait_task(batch.uid()).await; + index + .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) + .await; + let (response, code) = index.service.get("/batches?indexUids=test").await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index.service.get("/batches?indexUids=*").await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index.service.get("/batches?indexUids=*,pasteque").await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index.service.get("/batches?types=*").await; + assert_eq!(code, 200); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = + index.service.get("/batches?types=*,documentAdditionOrUpdate&statuses=*").await; + assert_eq!(code, 200, "{:?}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index + .service + .get("/batches?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test") + .await; + assert_eq!(code, 200, "{:?}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); + + let (response, code) = index + .service + .get("/batches?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*") + .await; + assert_eq!(code, 200, "{:?}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + +#[actix_rt::test] +async fn list_batches_status_filtered() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) + .await; + + let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 1); + + // We can't be sure that the update isn't already processed so we can't test this + // let (response, code) = index.filtered_batches(&[], &["processing"]).await; + // assert_eq!(code, 200, "{}", response); + // assert_eq!(response["results"].as_array().unwrap().len(), 1); + + index.wait_task(1).await; + + let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + +#[actix_rt::test] +async fn list_batches_type_filtered() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) + .await; + + let (response, code) = index.filtered_batches(&["indexCreation"], &[], &[]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 1); + + let (response, code) = + index.filtered_batches(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + +#[actix_rt::test] +async fn list_batches_invalid_canceled_by_filter() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) + .await; + + let (response, code) = index.filtered_batches(&[], &[], &["0"]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 0); +} + +#[actix_rt::test] +async fn list_batches_status_and_type_filtered() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) + .await; + + let (response, code) = index.filtered_batches(&["indexCreation"], &["failed"], &[]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 0); + + let (response, code) = index + .filtered_batches( + &["indexCreation", "documentAdditionOrUpdate"], + &["succeeded", "processing", "enqueued"], + &[], + ) + .await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 2); +} + +#[actix_rt::test] +async fn get_batch_filter_error() { + let server = Server::new().await; + + let (response, code) = server.batches_filter("lol=pied").await; + assert_eq!(code, 400, "{}", response); + meili_snap::snapshot!(meili_snap::json_string!(response), @r###" + { + "message": "Unknown parameter `lol`: expected one of `limit`, `from`, `batchUids`, `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); + + let (response, code) = server.batches_filter("uids=pied").await; + assert_eq!(code, 400, "{}", response); + meili_snap::snapshot!(meili_snap::json_string!(response), @r#" + { + "message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer", + "code": "invalid_task_uids", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" + } + "#); + + let (response, code) = server.batches_filter("from=pied").await; + assert_eq!(code, 400, "{}", response); + meili_snap::snapshot!(meili_snap::json_string!(response), @r#" + { + "message": "Invalid value in parameter `from`: could not parse `pied` as a positive integer", + "code": "invalid_task_from", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_from" + } + "#); + + let (response, code) = server.batches_filter("beforeStartedAt=pied").await; + assert_eq!(code, 400, "{}", response); + meili_snap::snapshot!(meili_snap::json_string!(response), @r#" + { + "message": "Invalid value in parameter `beforeStartedAt`: `pied` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_before_started_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at" + } + "#); +} + +macro_rules! assert_valid_summarized_batch { + ($response:expr, $batch_type:literal, $index:literal) => {{ + assert_eq!($response.as_object().unwrap().len(), 5); + assert!($response["batchUid"].as_u64().is_some()); + assert_eq!($response["indexUid"], $index); + assert_eq!($response["status"], "enqueued"); + assert_eq!($response["type"], $batch_type); + let date = $response["enqueuedAt"].as_str().expect("missing date"); + + OffsetDateTime::parse(date, &Rfc3339).unwrap(); + }}; +} + +#[actix_web::test] +async fn test_summarized_batch_view() { + let server = Server::new().await; + let index = server.index("test"); + + let (response, _) = index.create(None).await; + assert_valid_summarized_batch!(response, "indexCreation", "test"); + + let (response, _) = index.update(None).await; + assert_valid_summarized_batch!(response, "indexUpdate", "test"); + + let (response, _) = index.update_settings(json!({})).await; + assert_valid_summarized_batch!(response, "settingsUpdate", "test"); + + let (response, _) = index.update_documents(json!([{"id": 1}]), None).await; + assert_valid_summarized_batch!(response, "documentAdditionOrUpdate", "test"); + + let (response, _) = index.add_documents(json!([{"id": 1}]), None).await; + assert_valid_summarized_batch!(response, "documentAdditionOrUpdate", "test"); + + let (response, _) = index.delete_document(1).await; + assert_valid_summarized_batch!(response, "documentDeletion", "test"); + + let (response, _) = index.clear_all_documents().await; + assert_valid_summarized_batch!(response, "documentDeletion", "test"); + + let (response, _) = index.delete().await; + assert_valid_summarized_batch!(response, "indexDeletion", "test"); +} + +#[actix_web::test] +async fn test_summarized_document_addition_or_update() { + let server = Server::new().await; + let index = server.index("test"); + index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await; + index.wait_task(0).await; + let (batch, _) = index.get_batch(0).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; + index.wait_task(1).await; + let (batch, _) = index.get_batch(1).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 1, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_delete_documents_by_batch() { + let server = Server::new().await; + let index = server.index("test"); + index.delete_batch(vec![1, 2, 3]).await; + index.wait_task(0).await; + let (batch, _) = index.get_batch(0).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + index.create(None).await; + index.delete_batch(vec![42]).await; + index.wait_task(2).await; + let (batch, _) = index.get_batch(2).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 2, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_delete_documents_by_filter() { + let server = Server::new().await; + let index = server.index("test"); + + index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; + index.wait_task(0).await; + let (batch, _) = index.get_batch(0).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + index.create(None).await; + index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; + index.wait_task(2).await; + let (batch, _) = index.get_batch(2).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 2, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await; + index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; + index.wait_task(4).await; + let (batch, _) = index.get_batch(4).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 4, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_delete_document_by_id() { + let server = Server::new().await; + let index = server.index("test"); + index.delete_document(1).await; + index.wait_task(0).await; + let (batch, _) = index.get_batch(0).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + index.create(None).await; + index.delete_document(42).await; + index.wait_task(2).await; + let (batch, _) = index.get_batch(2).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 2, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_settings_update() { + let server = Server::new().await; + let index = server.index("test"); + // here we should find my payload even in the failed batch. + let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await; + meili_snap::snapshot!(code, @"400 Bad Request"); + meili_snap::snapshot!(meili_snap::json_string!(response), @r###" + { + "message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.", + "code": "invalid_settings_ranking_rules", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_settings_ranking_rules" + } + "###); + + index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await; + index.wait_task(0).await; + let (batch, _) = index.get_batch(0).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_index_creation() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + let (batch, _) = index.get_batch(0).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + index.create(Some("doggos")).await; + index.wait_task(1).await; + let (batch, _) = index.get_batch(1).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 1, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_index_deletion() { + let server = Server::new().await; + let index = server.index("test"); + let (ret, _code) = index.delete().await; + let batch = index.wait_task(ret.uid()).await; + snapshot!(batch, + @r###" + { + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "test", + "status": "failed", + "type": "indexDeletion", + "canceledBy": null, + "details": { + "deletedDocuments": 0 + }, + "error": { + "message": "Index `test` not found.", + "code": "index_not_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#index_not_found" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // is the details correctly set when documents are actually deleted. + // /!\ We need to wait for the document addition to be processed otherwise, if the test runs too slow, + // both batches may get autobatched and the deleted documents count will be wrong. + let (ret, _code) = + index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; + let batch = index.wait_task(ret.uid()).await; + snapshot!(batch, + @r###" + { + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + let (ret, _code) = index.delete().await; + let batch = index.wait_task(ret.uid()).await; + snapshot!(batch, + @r###" + { + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "test", + "status": "succeeded", + "type": "indexDeletion", + "canceledBy": null, + "details": { + "deletedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // What happens when you delete an index that doesn't exists. + let (ret, _code) = index.delete().await; + let batch = index.wait_task(ret.uid()).await; + snapshot!(batch, + @r###" + { + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "test", + "status": "failed", + "type": "indexDeletion", + "canceledBy": null, + "details": { + "deletedDocuments": 0 + }, + "error": { + "message": "Index `test` not found.", + "code": "index_not_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#index_not_found" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); +} + +#[actix_web::test] +async fn test_summarized_index_update() { + let server = Server::new().await; + let index = server.index("test"); + // If the index doesn't exist yet, we should get errors with or without the primary key. + index.update(None).await; + index.wait_task(0).await; + let (batch, _) = index.get_batch(0).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + index.update(Some("bones")).await; + index.wait_task(1).await; + let (batch, _) = index.get_batch(1).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 1, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + // And run the same two tests once the index do exists. + index.create(None).await; + + index.update(None).await; + index.wait_task(3).await; + let (batch, _) = index.get_batch(3).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 3, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + index.update(Some("bones")).await; + index.wait_task(4).await; + let (batch, _) = index.get_batch(4).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 4, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_index_swap() { + let server = Server::new().await; + server + .index_swap(json!([ + { "indexes": ["doggos", "cattos"] } + ])) + .await; + server.wait_task(0).await; + let (batch, _) = server.get_batch(0).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); + + server.index("doggos").create(None).await; + server.index("cattos").create(None).await; + server + .index_swap(json!([ + { "indexes": ["doggos", "cattos"] } + ])) + .await; + server.wait_task(3).await; + let (batch, _) = server.get_batch(3).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 3, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_batch_cancelation() { + let server = Server::new().await; + let index = server.index("doggos"); + // to avoid being flaky we're only going to cancel an already finished batch :( + index.create(None).await; + index.wait_task(0).await; + server.cancel_tasks("uids=0").await; + index.wait_task(1).await; + let (batch, _) = index.get_batch(1).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 1, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_batch_deletion() { + let server = Server::new().await; + let index = server.index("doggos"); + // to avoid being flaky we're only going to delete an already finished batch :( + index.create(None).await; + index.wait_task(0).await; + server.delete_tasks("uids=0").await; + index.wait_task(1).await; + let (batch, _) = index.get_batch(1).await; + assert_json_snapshot!(batch, + { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 1, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} + +#[actix_web::test] +async fn test_summarized_dump_creation() { + let server = Server::new().await; + server.create_dump().await; + server.wait_task(0).await; + let (batch, _) = server.get_batch(0).await; + assert_json_snapshot!(batch, + { ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + @r#" + { + "uid": 0, + "duration": "[duration]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "#); +} diff --git a/crates/meilisearch/tests/batches/webhook.rs b/crates/meilisearch/tests/batches/webhook.rs new file mode 100644 index 000000000..b18002eb7 --- /dev/null +++ b/crates/meilisearch/tests/batches/webhook.rs @@ -0,0 +1,129 @@ +//! To test the webhook, we need to spawn a new server with a URL listening for +//! post requests. The webhook handle starts a server and forwards all the +//! received requests into a channel for you to handle. + +use std::sync::Arc; + +use actix_http::body::MessageBody; +use actix_web::dev::{ServiceFactory, ServiceResponse}; +use actix_web::web::{Bytes, Data}; +use actix_web::{post, App, HttpRequest, HttpResponse, HttpServer}; +use meili_snap::snapshot; +use meilisearch::Opt; +use tokio::sync::mpsc; +use url::Url; + +use crate::common::{self, default_settings, Server}; +use crate::json; + +#[post("/")] +async fn forward_body( + req: HttpRequest, + sender: Data>>, + body: Bytes, +) -> HttpResponse { + let headers = req.headers(); + assert_eq!(headers.get("content-type").unwrap(), "application/x-ndjson"); + assert_eq!(headers.get("transfer-encoding").unwrap(), "chunked"); + assert_eq!(headers.get("accept-encoding").unwrap(), "gzip"); + assert_eq!(headers.get("content-encoding").unwrap(), "gzip"); + + let body = body.to_vec(); + sender.send(body).unwrap(); + HttpResponse::Ok().into() +} + +fn create_app( + sender: Arc>>, +) -> actix_web::App< + impl ServiceFactory< + actix_web::dev::ServiceRequest, + Config = (), + Response = ServiceResponse, + Error = actix_web::Error, + InitError = (), + >, +> { + App::new().service(forward_body).app_data(Data::from(sender)) +} + +struct WebhookHandle { + pub server_handle: tokio::task::JoinHandle>, + pub url: String, + pub receiver: mpsc::UnboundedReceiver>, +} + +async fn create_webhook_server() -> WebhookHandle { + let (sender, receiver) = mpsc::unbounded_channel(); + let sender = Arc::new(sender); + + // By listening on the port 0, the system will give us any available port. + let server = + HttpServer::new(move || create_app(sender.clone())).bind(("127.0.0.1", 0)).unwrap(); + let (ip, scheme) = server.addrs_with_scheme()[0]; + let url = format!("{scheme}://{ip}/"); + + let server_handle = tokio::spawn(server.run()); + WebhookHandle { server_handle, url, receiver } +} + +#[actix_web::test] +async fn test_basic_webhook() { + let WebhookHandle { server_handle, url, mut receiver } = create_webhook_server().await; + + let db_path = tempfile::tempdir().unwrap(); + let server = Server::new_with_options(Opt { + task_webhook_url: Some(Url::parse(&url).unwrap()), + ..default_settings(db_path.path()) + }) + .await + .unwrap(); + + let index = server.index("tamo"); + // May be flaky: we're relying on the fact that while the first document addition is processed, the other + // operations will be received and will be batched together. If it doesn't happen it's not a problem + // the rest of the test won't assume anything about the number of tasks per batch. + for i in 0..5 { + let (_, _status) = index.add_documents(json!({ "id": i, "doggo": "bone" }), None).await; + } + + let mut nb_tasks = 0; + while let Some(payload) = receiver.recv().await { + let payload = String::from_utf8(payload).unwrap(); + let jsonl = payload.split('\n'); + for json in jsonl { + if json.is_empty() { + break; // we reached EOF + } + nb_tasks += 1; + let json: serde_json::Value = serde_json::from_str(json).unwrap(); + snapshot!(common::Value(json), + @r###" + { + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "tamo", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + } + if nb_tasks == 5 { + break; + } + } + + assert!(nb_tasks == 5, "We should have received the 5 tasks but only received {nb_tasks}"); + + server_handle.abort(); +} diff --git a/crates/meilisearch/tests/common/index.rs b/crates/meilisearch/tests/common/index.rs index 221333fd7..b8c1d2824 100644 --- a/crates/meilisearch/tests/common/index.rs +++ b/crates/meilisearch/tests/common/index.rs @@ -136,6 +136,11 @@ impl<'a> Index<'a, Owned> { self.service.get(url).await } + pub async fn list_batches(&self) -> (Value, StatusCode) { + let url = format!("/batches?indexUids={}", self.uid); + self.service.get(url).await + } + pub async fn delete_document(&self, id: u64) -> (Value, StatusCode) { let url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id); self.service.delete(url).await @@ -374,6 +379,30 @@ impl Index<'_, State> { self.service.get(url).await } + pub async fn get_batch(&self, batch_id: u32) -> (Value, StatusCode) { + let url = format!("/batches/{}", batch_id); + self.service.get(url).await + } + + pub async fn filtered_batches( + &self, + types: &[&str], + statuses: &[&str], + canceled_by: &[&str], + ) -> (Value, StatusCode) { + let mut url = format!("/batches?indexUids={}", self.uid); + if !types.is_empty() { + let _ = write!(url, "&types={}", types.join(",")); + } + if !statuses.is_empty() { + let _ = write!(url, "&statuses={}", statuses.join(",")); + } + if !canceled_by.is_empty() { + let _ = write!(url, "&canceledBy={}", canceled_by.join(",")); + } + self.service.get(url).await + } + pub async fn get_document(&self, id: u64, options: Option) -> (Value, StatusCode) { let mut url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id); if let Some(options) = options { diff --git a/crates/meilisearch/tests/common/server.rs b/crates/meilisearch/tests/common/server.rs index 5069c9ea6..49214d646 100644 --- a/crates/meilisearch/tests/common/server.rs +++ b/crates/meilisearch/tests/common/server.rs @@ -330,6 +330,10 @@ impl Server { self.service.get(format!("/tasks?{}", filter)).await } + pub async fn batches_filter(&self, filter: &str) -> (Value, StatusCode) { + self.service.get(format!("/batches?{}", filter)).await + } + pub async fn version(&self) -> (Value, StatusCode) { self.service.get("/version").await } @@ -376,6 +380,11 @@ impl Server { self.service.get(url).await } + pub async fn get_batch(&self, batch_id: u32) -> (Value, StatusCode) { + let url = format!("/batches/{}", batch_id); + self.service.get(url).await + } + pub async fn get_features(&self) -> (Value, StatusCode) { self.service.get("/experimental-features").await } diff --git a/crates/meilisearch/tests/integration.rs b/crates/meilisearch/tests/integration.rs index 78da9825a..85deb9cdf 100644 --- a/crates/meilisearch/tests/integration.rs +++ b/crates/meilisearch/tests/integration.rs @@ -1,4 +1,5 @@ mod auth; +mod batches; mod common; mod dashboard; mod documents;