mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-22 10:07:40 +08:00
Fix tests
This commit is contained in:
parent
b6d80293f7
commit
fcbd47281b
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -2304,6 +2304,7 @@ dependencies = [
|
|||||||
"http",
|
"http",
|
||||||
"index-scheduler",
|
"index-scheduler",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
|
"insta",
|
||||||
"itertools",
|
"itertools",
|
||||||
"jsonwebtoken",
|
"jsonwebtoken",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
@ -419,7 +419,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c");
|
||||||
assert_eq!(update_files.len(), 22);
|
assert_eq!(update_files.len(), 22);
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
assert!(update_files[0].is_none()); // the dump creation
|
||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
|
@ -201,7 +201,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c");
|
||||||
assert_eq!(update_files.len(), 22);
|
assert_eq!(update_files.len(), 22);
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
assert!(update_files[0].is_none()); // the dump creation
|
||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
@ -279,7 +279,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"491e244a80a19fe2a900b809d310c24a");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"c2445ddd1785528b80f2ba534d3bd00c");
|
||||||
assert_eq!(update_files.len(), 10);
|
assert_eq!(update_files.len(), 10);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
@ -356,7 +356,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"7cacce2e21702be696b866808c726946");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"cd12efd308fe3ed226356a727ab42ed3");
|
||||||
assert_eq!(update_files.len(), 10);
|
assert_eq!(update_files.len(), 10);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
@ -449,7 +449,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6cabec4e252b74c8f3a2c8517622e85f");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"bc616290adfe7d09a624cf6065ca9069");
|
||||||
assert_eq!(update_files.len(), 9);
|
assert_eq!(update_files.len(), 9);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
@ -76,6 +76,7 @@ yaup = "0.2.0"
|
|||||||
actix-rt = "2.7.0"
|
actix-rt = "2.7.0"
|
||||||
assert-json-diff = "2.0.2"
|
assert-json-diff = "2.0.2"
|
||||||
brotli = "3.3.4"
|
brotli = "3.3.4"
|
||||||
|
insta = "1.19.1"
|
||||||
manifest-dir-macros = "0.1.16"
|
manifest-dir-macros = "0.1.16"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meili-snap = {path = "../meili-snap"}
|
meili-snap = {path = "../meili-snap"}
|
||||||
|
@ -896,16 +896,104 @@ async fn error_primary_key_inference() {
|
|||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
let (response, code) = index.get_task(0).await;
|
let (response, code) = index.get_task(0).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["status"], "failed");
|
|
||||||
|
|
||||||
let expected_error = json!({
|
insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||||
"message": r#"The primary key inference process failed because the engine did not find any fields containing `id` substring in their name. If your document identifier does not contain any `id` substring, you can set the primary key of the index."#,
|
@r###"
|
||||||
"code": "primary_key_inference_failed",
|
{
|
||||||
|
"uid": 0,
|
||||||
|
"indexUid": "test",
|
||||||
|
"status": "failed",
|
||||||
|
"type": "documentAdditionOrUpdate",
|
||||||
|
"canceledBy": null,
|
||||||
|
"details": {
|
||||||
|
"receivedDocuments": 1,
|
||||||
|
"indexedDocuments": 1
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"message": "The primary key inference process failed because the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.",
|
||||||
|
"code": "index_primary_key_no_candidate_found",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#primary_key_inference_failed"
|
"link": "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found"
|
||||||
});
|
},
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
assert_eq!(response["error"], expected_error);
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"primary_id": "12",
|
||||||
|
"object_id": "42",
|
||||||
|
"id": "124",
|
||||||
|
"title": "11",
|
||||||
|
"desc": "foobar"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(1).await;
|
||||||
|
let (response, code) = index.get_task(1).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||||
|
@r###"
|
||||||
|
{
|
||||||
|
"uid": 1,
|
||||||
|
"indexUid": "test",
|
||||||
|
"status": "failed",
|
||||||
|
"type": "documentAdditionOrUpdate",
|
||||||
|
"canceledBy": null,
|
||||||
|
"details": {
|
||||||
|
"receivedDocuments": 1,
|
||||||
|
"indexedDocuments": 1
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"message": "The primary key inference process failed because the engine found 3 fields ending with `id` in their name, such as 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.",
|
||||||
|
"code": "index_primary_key_multiple_candidates_found",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#index_primary_key_multiple_candidates_found"
|
||||||
|
},
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"primary_id": "12",
|
||||||
|
"title": "11",
|
||||||
|
"desc": "foobar"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
|
index.add_documents(documents, None).await;
|
||||||
|
index.wait_task(2).await;
|
||||||
|
let (response, code) = index.get_task(2).await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||||
|
@r###"
|
||||||
|
{
|
||||||
|
"uid": 2,
|
||||||
|
"indexUid": "test",
|
||||||
|
"status": "succeeded",
|
||||||
|
"type": "documentAdditionOrUpdate",
|
||||||
|
"canceledBy": null,
|
||||||
|
"details": {
|
||||||
|
"receivedDocuments": 1,
|
||||||
|
"indexedDocuments": 1
|
||||||
|
},
|
||||||
|
"error": null,
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
Loading…
Reference in New Issue
Block a user