mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-23 02:27:40 +08:00
Merge remote-tracking branch 'origin/release-v1.0.0' into bring-v1-changes
This commit is contained in:
commit
a015e232ab
3
.github/workflows/publish-docker-images.yml
vendored
3
.github/workflows/publish-docker-images.yml
vendored
@ -52,6 +52,9 @@ jobs:
|
|||||||
- name: Set build-args for Docker buildx
|
- name: Set build-args for Docker buildx
|
||||||
id: build-metadata
|
id: build-metadata
|
||||||
run: |
|
run: |
|
||||||
|
# Define ownership
|
||||||
|
git config --global --add safe.directory /home/meili/actions-runner/_work/meilisearch/meilisearch
|
||||||
|
|
||||||
# Extract commit date
|
# Extract commit date
|
||||||
commit_date=$(git show -s --format=%cd --date=iso-strict ${{ github.sha }})
|
commit_date=$(git show -s --format=%cd --date=iso-strict ${{ github.sha }})
|
||||||
|
|
||||||
|
419
Cargo.lock
generated
419
Cargo.lock
generated
@ -77,8 +77,8 @@ version = "0.2.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6"
|
checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -211,9 +211,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "1fa9362663c8643d67b2d5eafba49e4cb2c8a053a29ed00a0bea121f17c76b13"
|
checksum = "1fa9362663c8643d67b2d5eafba49e4cb2c8a053a29ed00a0bea121f17c76b13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-router",
|
"actix-router",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -330,9 +330,9 @@ version = "0.3.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27"
|
checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -341,9 +341,9 @@ version = "0.1.61"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282"
|
checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -415,7 +415,7 @@ dependencies = [
|
|||||||
"criterion",
|
"criterion",
|
||||||
"csv",
|
"csv",
|
||||||
"flate2",
|
"flate2",
|
||||||
"milli",
|
"milli 1.0.0",
|
||||||
"mimalloc",
|
"mimalloc",
|
||||||
"rand",
|
"rand",
|
||||||
"rand_chacha",
|
"rand_chacha",
|
||||||
@ -448,15 +448,6 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-set"
|
|
||||||
version = "0.5.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
|
|
||||||
dependencies = [
|
|
||||||
"bit-vec",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bit-vec"
|
name = "bit-vec"
|
||||||
version = "0.6.3"
|
version = "0.6.3"
|
||||||
@ -566,9 +557,9 @@ version = "1.3.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5fe233b960f12f8007e3db2d136e3cb1c291bfd7396e384ee76025fc1a3932b4"
|
checksum = "5fe233b960f12f8007e3db2d136e3cb1c291bfd7396e384ee76025fc1a3932b4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -761,9 +752,9 @@ checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro-error",
|
"proc-macro-error",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -774,9 +765,9 @@ checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro-error",
|
"proc-macro-error",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -803,9 +794,9 @@ version = "0.1.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747"
|
checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1056,10 +1047,10 @@ checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"fnv",
|
"fnv",
|
||||||
"ident_case",
|
"ident_case",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"strsim",
|
"strsim",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1069,8 +1060,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e"
|
checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling_core",
|
"darling_core",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1089,9 +1080,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "1f91d4cfa921f1c05904dc3c57b4a32c38aed3340cce209f3a6fd1478babafc4"
|
checksum = "1f91d4cfa921f1c05904dc3c57b4a32c38aed3340cce209f3a6fd1478babafc4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling",
|
"darling",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1101,7 +1092,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68"
|
checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"derive_builder_core",
|
"derive_builder_core",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1111,10 +1102,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
|
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"convert_case 0.4.0",
|
"convert_case 0.4.0",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"rustc_version",
|
"rustc_version",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1123,7 +1114,18 @@ version = "0.1.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "86290491a2b5c21a1a5083da8dae831006761258fabd5617309c3eebc5f89468"
|
checksum = "86290491a2b5c21a1a5083da8dae831006761258fabd5617309c3eebc5f89468"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"deserr-internal",
|
"deserr-internal 0.1.4",
|
||||||
|
"serde-cs",
|
||||||
|
"serde_json",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "deserr"
|
||||||
|
version = "0.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "28380303ca15ec07e1d5b079baf19cf849b09edad5cab219c1c51b2bd07523de"
|
||||||
|
dependencies = [
|
||||||
|
"deserr-internal 0.3.0",
|
||||||
"serde-cs",
|
"serde-cs",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
@ -1135,9 +1137,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "7131de1c27581bc376a22166c9f570be91b76cb096be2f6aecf224c27bf7c49a"
|
checksum = "7131de1c27581bc376a22166c9f570be91b76cb096be2f6aecf224c27bf7c49a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"convert_case 0.5.0",
|
"convert_case 0.5.0",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "deserr-internal"
|
||||||
|
version = "0.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "860928cd8af78d223a3d70dd581f21d7c3de8aa2eecd938e0c0a399ded7c1451"
|
||||||
|
dependencies = [
|
||||||
|
"convert_case 0.5.0",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1315,9 +1329,9 @@ version = "1.1.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "828de45d0ca18782232dfb8f3ea9cc428e8ced380eb26a520baaacfc70de39ce"
|
checksum = "828de45d0ca18782232dfb8f3ea9cc428e8ced380eb26a520baaacfc70de39ce"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1380,9 +1394,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "35c9bb4a2c13ffb3a93a39902aaf4e7190a1706a4779b6db0449aee433d26c4a"
|
checksum = "35c9bb4a2c13ffb3a93a39902aaf4e7190a1706a4779b6db0449aee433d26c4a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling",
|
"darling",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"uuid 0.8.2",
|
"uuid 0.8.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1408,6 +1422,15 @@ dependencies = [
|
|||||||
"windows-sys",
|
"windows-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "filter-parser"
|
||||||
|
version = "0.41.1"
|
||||||
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3"
|
||||||
|
dependencies = [
|
||||||
|
"nom",
|
||||||
|
"nom_locate",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filter-parser"
|
name = "filter-parser"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
@ -1428,6 +1451,14 @@ dependencies = [
|
|||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flatten-serde-json"
|
||||||
|
version = "0.41.1"
|
||||||
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3"
|
||||||
|
dependencies = [
|
||||||
|
"serde_json",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flatten-serde-json"
|
name = "flatten-serde-json"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
@ -1511,9 +1542,9 @@ version = "0.3.25"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d"
|
checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1585,9 +1616,9 @@ version = "0.12.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "30ce01e8bbb3e7e0758dcf907fe799f5998a54368963f766ae94b84624ba60c8"
|
checksum = "30ce01e8bbb3e7e0758dcf907fe799f5998a54368963f766ae94b84624ba60c8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1642,9 +1673,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "e45727250e75cc04ff2846a66397da8ef2b3db8e40e0cef4df67950a07621eb9"
|
checksum = "e45727250e75cc04ff2846a66397da8ef2b3db8e40e0cef4df67950a07621eb9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-error",
|
"proc-macro-error",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2052,6 +2083,14 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "json-depth-checker"
|
||||||
|
version = "0.41.1"
|
||||||
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3"
|
||||||
|
dependencies = [
|
||||||
|
"serde_json",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "json-depth-checker"
|
name = "json-depth-checker"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
@ -2393,9 +2432,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81"
|
checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2415,9 +2454,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "f08150cf2bab1fc47c2196f4f41173a27fcd0f684165e5458c0046b53a472e2f"
|
checksum = "f08150cf2bab1fc47c2196f4f41173a27fcd0f684165e5458c0046b53a472e2f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2455,6 +2494,7 @@ dependencies = [
|
|||||||
"assert-json-diff",
|
"assert-json-diff",
|
||||||
"async-stream",
|
"async-stream",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
"atty",
|
||||||
"brotli",
|
"brotli",
|
||||||
"bstr 1.1.0",
|
"bstr 1.1.0",
|
||||||
"byte-unit",
|
"byte-unit",
|
||||||
@ -2462,7 +2502,7 @@ dependencies = [
|
|||||||
"cargo_toml",
|
"cargo_toml",
|
||||||
"clap 4.0.32",
|
"clap 4.0.32",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
"deserr",
|
"deserr 0.3.0",
|
||||||
"dump",
|
"dump",
|
||||||
"either",
|
"either",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
@ -2503,7 +2543,6 @@ dependencies = [
|
|||||||
"rustls-pemfile",
|
"rustls-pemfile",
|
||||||
"segment",
|
"segment",
|
||||||
"serde",
|
"serde",
|
||||||
"serde-cs",
|
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
"sha-1",
|
"sha-1",
|
||||||
@ -2515,6 +2554,7 @@ dependencies = [
|
|||||||
"tar",
|
"tar",
|
||||||
"temp-env",
|
"temp-env",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
"termcolor",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"time",
|
"time",
|
||||||
"tokio",
|
"tokio",
|
||||||
@ -2554,7 +2594,7 @@ dependencies = [
|
|||||||
"anyhow",
|
"anyhow",
|
||||||
"convert_case 0.6.0",
|
"convert_case 0.6.0",
|
||||||
"csv",
|
"csv",
|
||||||
"deserr",
|
"deserr 0.3.0",
|
||||||
"either",
|
"either",
|
||||||
"enum-iterator",
|
"enum-iterator",
|
||||||
"file-store",
|
"file-store",
|
||||||
@ -2563,11 +2603,10 @@ dependencies = [
|
|||||||
"insta",
|
"insta",
|
||||||
"meili-snap",
|
"meili-snap",
|
||||||
"memmap2",
|
"memmap2",
|
||||||
"milli",
|
"milli 0.41.1",
|
||||||
"proptest",
|
|
||||||
"proptest-derive",
|
|
||||||
"roaring",
|
"roaring",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde-cs",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tar",
|
"tar",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
@ -2601,6 +2640,52 @@ dependencies = [
|
|||||||
"autocfg",
|
"autocfg",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "milli"
|
||||||
|
version = "0.41.1"
|
||||||
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3"
|
||||||
|
dependencies = [
|
||||||
|
"bimap",
|
||||||
|
"bincode",
|
||||||
|
"bstr 1.1.0",
|
||||||
|
"byteorder",
|
||||||
|
"charabia",
|
||||||
|
"concat-arrays",
|
||||||
|
"crossbeam-channel",
|
||||||
|
"csv",
|
||||||
|
"deserr 0.3.0",
|
||||||
|
"either",
|
||||||
|
"filter-parser 0.41.1",
|
||||||
|
"flatten-serde-json 0.41.1",
|
||||||
|
"fst",
|
||||||
|
"fxhash",
|
||||||
|
"geoutils",
|
||||||
|
"grenad",
|
||||||
|
"heed",
|
||||||
|
"itertools",
|
||||||
|
"json-depth-checker 0.41.1",
|
||||||
|
"levenshtein_automata",
|
||||||
|
"log",
|
||||||
|
"logging_timer",
|
||||||
|
"memmap2",
|
||||||
|
"obkv",
|
||||||
|
"once_cell",
|
||||||
|
"ordered-float",
|
||||||
|
"rayon",
|
||||||
|
"roaring",
|
||||||
|
"rstar",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"slice-group-by",
|
||||||
|
"smallstr",
|
||||||
|
"smallvec",
|
||||||
|
"smartstring",
|
||||||
|
"tempfile",
|
||||||
|
"thiserror",
|
||||||
|
"time",
|
||||||
|
"uuid 1.2.2",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "milli"
|
name = "milli"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
@ -2614,10 +2699,10 @@ dependencies = [
|
|||||||
"concat-arrays",
|
"concat-arrays",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
"csv",
|
"csv",
|
||||||
"deserr",
|
"deserr 0.1.4",
|
||||||
"either",
|
"either",
|
||||||
"filter-parser",
|
"filter-parser 1.0.0",
|
||||||
"flatten-serde-json",
|
"flatten-serde-json 1.0.0",
|
||||||
"fst",
|
"fst",
|
||||||
"fuzzcheck",
|
"fuzzcheck",
|
||||||
"fxhash",
|
"fxhash",
|
||||||
@ -2626,7 +2711,7 @@ dependencies = [
|
|||||||
"heed",
|
"heed",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools",
|
"itertools",
|
||||||
"json-depth-checker",
|
"json-depth-checker 1.0.0",
|
||||||
"levenshtein_automata",
|
"levenshtein_automata",
|
||||||
"log",
|
"log",
|
||||||
"logging_timer",
|
"logging_timer",
|
||||||
@ -2991,9 +3076,9 @@ checksum = "46b53634d8c8196302953c74d5352f33d0c512a9499bd2ce468fc9f4128fa27c"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"pest",
|
"pest",
|
||||||
"pest_meta",
|
"pest_meta",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3119,9 +3204,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-error-attr",
|
"proc-macro-error-attr",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3131,20 +3216,11 @@ version = "1.0.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro2"
|
|
||||||
version = "0.4.30"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-xid 0.1.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.49"
|
version = "1.0.49"
|
||||||
@ -3184,71 +3260,19 @@ dependencies = [
|
|||||||
"thiserror",
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proptest"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5"
|
|
||||||
dependencies = [
|
|
||||||
"bit-set",
|
|
||||||
"bitflags",
|
|
||||||
"byteorder",
|
|
||||||
"lazy_static",
|
|
||||||
"num-traits",
|
|
||||||
"quick-error 2.0.1",
|
|
||||||
"rand",
|
|
||||||
"rand_chacha",
|
|
||||||
"rand_xorshift",
|
|
||||||
"regex-syntax",
|
|
||||||
"rusty-fork",
|
|
||||||
"tempfile",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proptest-derive"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "90b46295382dc76166cb7cf2bb4a97952464e4b7ed5a43e6cd34e1fec3349ddc"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.30",
|
|
||||||
"quote 0.6.13",
|
|
||||||
"syn 0.15.44",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "protobuf"
|
name = "protobuf"
|
||||||
version = "2.28.0"
|
version = "2.28.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94"
|
checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quick-error"
|
|
||||||
version = "1.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quick-error"
|
|
||||||
version = "2.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quote"
|
|
||||||
version = "0.6.13"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.30",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.23"
|
version = "1.0.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
|
checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3281,15 +3305,6 @@ dependencies = [
|
|||||||
"getrandom",
|
"getrandom",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_xorshift"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rayon"
|
name = "rayon"
|
||||||
version = "1.6.1"
|
version = "1.6.1"
|
||||||
@ -3504,18 +3519,6 @@ version = "1.0.11"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70"
|
checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rusty-fork"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
|
|
||||||
dependencies = [
|
|
||||||
"fnv",
|
|
||||||
"quick-error 1.2.3",
|
|
||||||
"tempfile",
|
|
||||||
"wait-timeout",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
version = "1.0.12"
|
version = "1.0.12"
|
||||||
@ -3591,9 +3594,9 @@ version = "1.0.152"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3788,25 +3791,14 @@ version = "2.4.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "syn"
|
|
||||||
version = "0.15.44"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.30",
|
|
||||||
"quote 0.6.13",
|
|
||||||
"unicode-xid 0.1.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.107"
|
version = "1.0.107"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3825,10 +3817,10 @@ version = "0.12.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
|
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"unicode-xid 0.2.4",
|
"unicode-xid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3910,9 +3902,9 @@ version = "1.0.38"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
|
checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3993,9 +3985,9 @@ version = "1.8.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8"
|
checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4130,12 +4122,6 @@ version = "0.1.10"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-xid"
|
|
||||||
version = "0.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-xid"
|
name = "unicode-xid"
|
||||||
version = "0.2.4"
|
version = "0.2.4"
|
||||||
@ -4218,15 +4204,6 @@ version = "0.9.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wait-timeout"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "walkdir"
|
name = "walkdir"
|
||||||
version = "2.3.2"
|
version = "2.3.2"
|
||||||
@ -4284,9 +4261,9 @@ dependencies = [
|
|||||||
"bumpalo",
|
"bumpalo",
|
||||||
"log",
|
"log",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -4308,7 +4285,7 @@ version = "0.2.83"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
|
checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -4318,9 +4295,9 @@ version = "0.2.83"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
|
checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
@ -4517,8 +4494,8 @@ version = "0.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb"
|
checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"synstructure",
|
"synstructure",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ expression: products.settings().unwrap()
|
|||||||
"*"
|
"*"
|
||||||
],
|
],
|
||||||
"filterableAttributes": [],
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -13,13 +13,17 @@ expression: movies.settings().unwrap()
|
|||||||
"genres",
|
"genres",
|
||||||
"id"
|
"id"
|
||||||
],
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
"proximity",
|
"proximity",
|
||||||
"attribute",
|
"attribute",
|
||||||
"exactness",
|
"exactness",
|
||||||
"asc(release_date)"
|
"release_date:asc"
|
||||||
],
|
],
|
||||||
"stopWords": [],
|
"stopWords": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
|
@ -10,6 +10,7 @@ expression: spells.settings().unwrap()
|
|||||||
"*"
|
"*"
|
||||||
],
|
],
|
||||||
"filterableAttributes": [],
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
use std::collections::BTreeSet;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use super::v2_to_v3::CompatV2ToV3;
|
use super::v2_to_v3::CompatV2ToV3;
|
||||||
@ -102,14 +101,15 @@ impl CompatIndexV1ToV2 {
|
|||||||
|
|
||||||
impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
||||||
fn from(source: v1::settings::Settings) -> Self {
|
fn from(source: v1::settings::Settings) -> Self {
|
||||||
let displayed_attributes = source
|
Self {
|
||||||
.displayed_attributes
|
displayed_attributes: option_to_setting(source.displayed_attributes)
|
||||||
.map(|opt| opt.map(|displayed_attributes| displayed_attributes.into_iter().collect()));
|
.map(|displayed| displayed.into_iter().collect()),
|
||||||
let attributes_for_faceting = source.attributes_for_faceting.map(|opt| {
|
searchable_attributes: option_to_setting(source.searchable_attributes),
|
||||||
opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect())
|
filterable_attributes: option_to_setting(source.attributes_for_faceting.clone())
|
||||||
});
|
.map(|filterable| filterable.into_iter().collect()),
|
||||||
let ranking_rules = source.ranking_rules.map(|opt| {
|
sortable_attributes: option_to_setting(source.attributes_for_faceting)
|
||||||
opt.map(|ranking_rules| {
|
.map(|sortable| sortable.into_iter().collect()),
|
||||||
|
ranking_rules: option_to_setting(source.ranking_rules).map(|ranking_rules| {
|
||||||
ranking_rules
|
ranking_rules
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|ranking_rule| {
|
.filter_map(|ranking_rule| {
|
||||||
@ -119,26 +119,33 @@ impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
|||||||
ranking_rule.into();
|
ranking_rule.into();
|
||||||
criterion.as_ref().map(ToString::to_string)
|
criterion.as_ref().map(ToString::to_string)
|
||||||
}
|
}
|
||||||
Err(()) => Some(ranking_rule),
|
Err(()) => {
|
||||||
|
log::warn!(
|
||||||
|
"Could not import the following ranking rule: `{}`.",
|
||||||
|
ranking_rule
|
||||||
|
);
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
})
|
}),
|
||||||
});
|
stop_words: option_to_setting(source.stop_words),
|
||||||
|
synonyms: option_to_setting(source.synonyms),
|
||||||
Self {
|
distinct_attribute: option_to_setting(source.distinct_attribute),
|
||||||
displayed_attributes,
|
|
||||||
searchable_attributes: source.searchable_attributes,
|
|
||||||
filterable_attributes: attributes_for_faceting,
|
|
||||||
ranking_rules,
|
|
||||||
stop_words: source.stop_words,
|
|
||||||
synonyms: source.synonyms,
|
|
||||||
distinct_attribute: source.distinct_attribute,
|
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn option_to_setting<T>(opt: Option<Option<T>>) -> v2::Setting<T> {
|
||||||
|
match opt {
|
||||||
|
Some(Some(t)) => v2::Setting::Set(t),
|
||||||
|
None => v2::Setting::NotSet,
|
||||||
|
Some(None) => v2::Setting::Reset,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<v1::update::UpdateStatus> for Option<v2::updates::UpdateStatus> {
|
impl From<v1::update::UpdateStatus> for Option<v2::updates::UpdateStatus> {
|
||||||
fn from(source: v1::update::UpdateStatus) -> Self {
|
fn from(source: v1::update::UpdateStatus) -> Self {
|
||||||
use v1::update::UpdateStatus as UpdateStatusV1;
|
use v1::update::UpdateStatus as UpdateStatusV1;
|
||||||
@ -251,38 +258,27 @@ impl From<v1::update::UpdateType> for Option<v2::updates::UpdateMeta> {
|
|||||||
|
|
||||||
impl From<v1::settings::SettingsUpdate> for v2::Settings<v2::Unchecked> {
|
impl From<v1::settings::SettingsUpdate> for v2::Settings<v2::Unchecked> {
|
||||||
fn from(source: v1::settings::SettingsUpdate) -> Self {
|
fn from(source: v1::settings::SettingsUpdate) -> Self {
|
||||||
let displayed_attributes: Option<Option<BTreeSet<String>>> =
|
let ranking_rules = v2::Setting::from(source.ranking_rules);
|
||||||
source.displayed_attributes.into();
|
|
||||||
|
|
||||||
let attributes_for_faceting: Option<Option<Vec<String>>> =
|
|
||||||
source.attributes_for_faceting.into();
|
|
||||||
|
|
||||||
let ranking_rules: Option<Option<Vec<v1::settings::RankingRule>>> =
|
|
||||||
source.ranking_rules.into();
|
|
||||||
|
|
||||||
// go from the concrete types of v1 (RankingRule) to the concrete type of v2 (Criterion),
|
// go from the concrete types of v1 (RankingRule) to the concrete type of v2 (Criterion),
|
||||||
// and then back to string as this is what the settings manipulate
|
// and then back to string as this is what the settings manipulate
|
||||||
let ranking_rules = ranking_rules.map(|opt| {
|
let ranking_rules = ranking_rules.map(|ranking_rules| {
|
||||||
opt.map(|ranking_rules| {
|
|
||||||
ranking_rules
|
ranking_rules
|
||||||
.into_iter()
|
.into_iter()
|
||||||
// filter out the WordsPosition ranking rule that exists in v1 but not v2
|
// filter out the WordsPosition ranking rule that exists in v1 but not v2
|
||||||
.filter_map(|ranking_rule| {
|
.filter_map(Option::<v2::settings::Criterion>::from)
|
||||||
Option::<v2::settings::Criterion>::from(ranking_rule)
|
|
||||||
})
|
|
||||||
.map(|criterion| criterion.to_string())
|
.map(|criterion| criterion.to_string())
|
||||||
.collect()
|
.collect()
|
||||||
})
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
displayed_attributes: displayed_attributes.map(|opt| {
|
displayed_attributes: v2::Setting::from(source.displayed_attributes)
|
||||||
opt.map(|displayed_attributes| displayed_attributes.into_iter().collect())
|
.map(|displayed_attributes| displayed_attributes.into_iter().collect()),
|
||||||
}),
|
|
||||||
searchable_attributes: source.searchable_attributes.into(),
|
searchable_attributes: source.searchable_attributes.into(),
|
||||||
filterable_attributes: attributes_for_faceting.map(|opt| {
|
filterable_attributes: v2::Setting::from(source.attributes_for_faceting.clone())
|
||||||
opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect())
|
.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()),
|
||||||
}),
|
sortable_attributes: v2::Setting::from(source.attributes_for_faceting)
|
||||||
|
.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()),
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words: source.stop_words.into(),
|
stop_words: source.stop_words.into(),
|
||||||
synonyms: source.synonyms.into(),
|
synonyms: source.synonyms.into(),
|
||||||
@ -314,12 +310,12 @@ impl From<v1::settings::RankingRule> for Option<v2::settings::Criterion> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<v1::settings::UpdateState<T>> for Option<Option<T>> {
|
impl<T> From<v1::settings::UpdateState<T>> for v2::Setting<T> {
|
||||||
fn from(source: v1::settings::UpdateState<T>) -> Self {
|
fn from(source: v1::settings::UpdateState<T>) -> Self {
|
||||||
match source {
|
match source {
|
||||||
v1::settings::UpdateState::Update(new_value) => Some(Some(new_value)),
|
v1::settings::UpdateState::Update(new_value) => v2::Setting::Set(new_value),
|
||||||
v1::settings::UpdateState::Clear => Some(None),
|
v1::settings::UpdateState::Clear => v2::Setting::Reset,
|
||||||
v1::settings::UpdateState::Nothing => None,
|
v1::settings::UpdateState::Nothing => v2::Setting::NotSet,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -352,7 +348,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"ad6245d98d1a8e30535f3339a9a8d223");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2298010973ee98cf4670787314176a3a");
|
||||||
assert_eq!(update_files.len(), 9);
|
assert_eq!(update_files.len(), 9);
|
||||||
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dumps v1
|
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dumps v1
|
||||||
|
|
||||||
|
@ -361,28 +361,29 @@ impl From<String> for v3::Code {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn option_to_setting<T>(opt: Option<Option<T>>) -> v3::Setting<T> {
|
impl<A> From<v2::Setting<A>> for v3::Setting<A> {
|
||||||
match opt {
|
fn from(setting: v2::Setting<A>) -> Self {
|
||||||
Some(Some(t)) => v3::Setting::Set(t),
|
match setting {
|
||||||
None => v3::Setting::NotSet,
|
v2::settings::Setting::Set(a) => v3::settings::Setting::Set(a),
|
||||||
Some(None) => v3::Setting::Reset,
|
v2::settings::Setting::Reset => v3::settings::Setting::Reset,
|
||||||
|
v2::settings::Setting::NotSet => v3::settings::Setting::NotSet,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<v2::Settings<T>> for v3::Settings<v3::Unchecked> {
|
impl<T> From<v2::Settings<T>> for v3::Settings<v3::Unchecked> {
|
||||||
fn from(settings: v2::Settings<T>) -> Self {
|
fn from(settings: v2::Settings<T>) -> Self {
|
||||||
v3::Settings {
|
v3::Settings {
|
||||||
displayed_attributes: option_to_setting(settings.displayed_attributes),
|
displayed_attributes: settings.displayed_attributes.into(),
|
||||||
searchable_attributes: option_to_setting(settings.searchable_attributes),
|
searchable_attributes: settings.searchable_attributes.into(),
|
||||||
filterable_attributes: option_to_setting(settings.filterable_attributes)
|
filterable_attributes: settings.filterable_attributes.into(),
|
||||||
.map(|f| f.into_iter().collect()),
|
sortable_attributes: settings.sortable_attributes.into(),
|
||||||
sortable_attributes: v3::Setting::NotSet,
|
ranking_rules: v3::Setting::from(settings.ranking_rules).map(|criteria| {
|
||||||
ranking_rules: option_to_setting(settings.ranking_rules).map(|criteria| {
|
|
||||||
criteria.into_iter().map(|criterion| patch_ranking_rules(&criterion)).collect()
|
criteria.into_iter().map(|criterion| patch_ranking_rules(&criterion)).collect()
|
||||||
}),
|
}),
|
||||||
stop_words: option_to_setting(settings.stop_words),
|
stop_words: settings.stop_words.into(),
|
||||||
synonyms: option_to_setting(settings.synonyms),
|
synonyms: settings.synonyms.into(),
|
||||||
distinct_attribute: option_to_setting(settings.distinct_attribute),
|
distinct_attribute: settings.distinct_attribute.into(),
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -394,6 +395,7 @@ fn patch_ranking_rules(ranking_rule: &str) -> String {
|
|||||||
Ok(v2::settings::Criterion::Typo) => String::from("typo"),
|
Ok(v2::settings::Criterion::Typo) => String::from("typo"),
|
||||||
Ok(v2::settings::Criterion::Proximity) => String::from("proximity"),
|
Ok(v2::settings::Criterion::Proximity) => String::from("proximity"),
|
||||||
Ok(v2::settings::Criterion::Attribute) => String::from("attribute"),
|
Ok(v2::settings::Criterion::Attribute) => String::from("attribute"),
|
||||||
|
Ok(v2::settings::Criterion::Sort) => String::from("sort"),
|
||||||
Ok(v2::settings::Criterion::Exactness) => String::from("exactness"),
|
Ok(v2::settings::Criterion::Exactness) => String::from("exactness"),
|
||||||
Ok(v2::settings::Criterion::Asc(name)) => format!("{name}:asc"),
|
Ok(v2::settings::Criterion::Asc(name)) => format!("{name}:asc"),
|
||||||
Ok(v2::settings::Criterion::Desc(name)) => format!("{name}:desc"),
|
Ok(v2::settings::Criterion::Desc(name)) => format!("{name}:desc"),
|
||||||
|
@ -260,7 +260,7 @@ impl From<v5::ResponseError> for v6::ResponseError {
|
|||||||
"index_already_exists" => v6::Code::IndexAlreadyExists,
|
"index_already_exists" => v6::Code::IndexAlreadyExists,
|
||||||
"index_not_found" => v6::Code::IndexNotFound,
|
"index_not_found" => v6::Code::IndexNotFound,
|
||||||
"invalid_index_uid" => v6::Code::InvalidIndexUid,
|
"invalid_index_uid" => v6::Code::InvalidIndexUid,
|
||||||
"invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo,
|
"invalid_min_word_length_for_typo" => v6::Code::InvalidSettingsTypoTolerance,
|
||||||
"invalid_state" => v6::Code::InvalidState,
|
"invalid_state" => v6::Code::InvalidState,
|
||||||
"primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound,
|
"primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound,
|
||||||
"index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists,
|
"index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists,
|
||||||
@ -439,7 +439,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"10c673c97f053830aa659876d7aa0b53");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c");
|
||||||
assert_eq!(update_files.len(), 22);
|
assert_eq!(update_files.len(), 22);
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
assert!(update_files[0].is_none()); // the dump creation
|
||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
|
@ -201,7 +201,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"10c673c97f053830aa659876d7aa0b53");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c");
|
||||||
assert_eq!(update_files.len(), 22);
|
assert_eq!(update_files.len(), 22);
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
assert!(update_files[0].is_none()); // the dump creation
|
||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
@ -279,7 +279,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"12eca43d5d1e1f334200eb4df653b0c9");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"c2445ddd1785528b80f2ba534d3bd00c");
|
||||||
assert_eq!(update_files.len(), 10);
|
assert_eq!(update_files.len(), 10);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
@ -356,7 +356,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2f51c6345fabccf47b18c82bad618ffe");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"cd12efd308fe3ed226356a727ab42ed3");
|
||||||
assert_eq!(update_files.len(), 10);
|
assert_eq!(update_files.len(), 10);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
@ -449,7 +449,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b27292d0bb86d4b4dd1b375a46b33890");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"bc616290adfe7d09a624cf6065ca9069");
|
||||||
assert_eq!(update_files.len(), 9);
|
assert_eq!(update_files.len(), 9);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
@ -530,6 +530,82 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn import_dump_v2_from_meilisearch_v0_22_0_issue_3435() {
|
||||||
|
let dump = File::open("tests/assets/v2-v0.22.0.dump").unwrap();
|
||||||
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
|
// top level infos
|
||||||
|
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
||||||
|
assert_eq!(dump.instance_uid().unwrap(), None);
|
||||||
|
|
||||||
|
// tasks
|
||||||
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2db37756d8af1fb7623436b76e8956a6");
|
||||||
|
assert_eq!(update_files.len(), 8);
|
||||||
|
assert!(update_files[0..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
|
||||||
|
// keys
|
||||||
|
let keys = dump.keys().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
meili_snap::snapshot_hash!(meili_snap::json_string!(keys), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
|
// indexes
|
||||||
|
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
// the index are not ordered in any way by default
|
||||||
|
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
||||||
|
|
||||||
|
let mut products = indexes.pop().unwrap();
|
||||||
|
let mut movies = indexes.pop().unwrap();
|
||||||
|
let mut spells = indexes.pop().unwrap();
|
||||||
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
|
// products
|
||||||
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "products",
|
||||||
|
"primaryKey": "sku",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(products.settings().unwrap());
|
||||||
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
|
// movies
|
||||||
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "movies",
|
||||||
|
"primaryKey": "id",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(movies.settings().unwrap());
|
||||||
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||||
|
|
||||||
|
// spells
|
||||||
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "dnd_spells",
|
||||||
|
"primaryKey": "index",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(spells.settings().unwrap());
|
||||||
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn import_dump_v1() {
|
fn import_dump_v1() {
|
||||||
let dump = File::open("tests/assets/v1.dump").unwrap();
|
let dump = File::open("tests/assets/v1.dump").unwrap();
|
||||||
@ -542,7 +618,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"9725ccfceea3f8d5846c44006c9e1e7b");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"8df6eab075a44b3c1af6b726f9fd9a43");
|
||||||
assert_eq!(update_files.len(), 9);
|
assert_eq!(update_files.len(), 9);
|
||||||
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1
|
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ expression: spells.settings().unwrap()
|
|||||||
"*"
|
"*"
|
||||||
],
|
],
|
||||||
"filterableAttributes": [],
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -10,6 +10,7 @@ expression: products.settings().unwrap()
|
|||||||
"*"
|
"*"
|
||||||
],
|
],
|
||||||
"filterableAttributes": [],
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -13,6 +13,10 @@ expression: movies.settings().unwrap()
|
|||||||
"genres",
|
"genres",
|
||||||
"id"
|
"id"
|
||||||
],
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: spells.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: products.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {
|
||||||
|
"android": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"iphone": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"phone": [
|
||||||
|
"android",
|
||||||
|
"iphone",
|
||||||
|
"smartphone"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: movies.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"release_date"
|
||||||
|
],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"exactness",
|
||||||
|
"release_date:asc"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -41,6 +41,7 @@ use super::Document;
|
|||||||
use crate::{IndexMetadata, Result, Version};
|
use crate::{IndexMetadata, Result, Version};
|
||||||
|
|
||||||
pub type Settings<T> = settings::Settings<T>;
|
pub type Settings<T> = settings::Settings<T>;
|
||||||
|
pub type Setting<T> = settings::Setting<T>;
|
||||||
pub type Checked = settings::Checked;
|
pub type Checked = settings::Checked;
|
||||||
pub type Unchecked = settings::Unchecked;
|
pub type Unchecked = settings::Unchecked;
|
||||||
|
|
||||||
@ -306,4 +307,81 @@ pub(crate) mod test {
|
|||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn read_dump_v2_from_meilisearch_v0_22_0_issue_3435() {
|
||||||
|
let dump = File::open("tests/assets/v2-v0.22.0.dump").unwrap();
|
||||||
|
let dir = TempDir::new().unwrap();
|
||||||
|
let mut dump = BufReader::new(dump);
|
||||||
|
let gz = GzDecoder::new(&mut dump);
|
||||||
|
let mut archive = tar::Archive::new(gz);
|
||||||
|
archive.unpack(dir.path()).unwrap();
|
||||||
|
|
||||||
|
let mut dump = V2Reader::open(dir).unwrap();
|
||||||
|
|
||||||
|
// top level infos
|
||||||
|
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
||||||
|
|
||||||
|
// tasks
|
||||||
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"aca8ba13046272664eb3ea2da3031633");
|
||||||
|
assert_eq!(update_files.len(), 8);
|
||||||
|
assert!(update_files[0..].iter().all(|u| u.is_none())); // everything has already been processed
|
||||||
|
|
||||||
|
// indexes
|
||||||
|
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
// the index are not ordered in any way by default
|
||||||
|
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
||||||
|
|
||||||
|
let mut products = indexes.pop().unwrap();
|
||||||
|
let mut movies = indexes.pop().unwrap();
|
||||||
|
let mut spells = indexes.pop().unwrap();
|
||||||
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
|
// products
|
||||||
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "products",
|
||||||
|
"primaryKey": "sku",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(products.settings().unwrap());
|
||||||
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
|
// movies
|
||||||
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "movies",
|
||||||
|
"primaryKey": "id",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(movies.settings().unwrap());
|
||||||
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||||
|
|
||||||
|
// spells
|
||||||
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "dnd_spells",
|
||||||
|
"primaryKey": "index",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(spells.settings().unwrap());
|
||||||
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,35 +1,33 @@
|
|||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
use std::fmt::Display;
|
use std::fmt;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::{Deserialize, Deserializer};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn serialize_with_wildcard<S>(
|
fn serialize_with_wildcard<S>(
|
||||||
field: &Option<Option<Vec<String>>>,
|
field: &Setting<Vec<String>>,
|
||||||
s: S,
|
s: S,
|
||||||
) -> std::result::Result<S::Ok, S::Error>
|
) -> std::result::Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
S: serde::Serializer,
|
S: serde::Serializer,
|
||||||
{
|
{
|
||||||
let wildcard = vec!["*".to_string()];
|
use serde::Serialize;
|
||||||
s.serialize_some(&field.as_ref().map(|o| o.as_ref().unwrap_or(&wildcard)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_some<'de, T, D>(deserializer: D) -> std::result::Result<Option<T>, D::Error>
|
let wildcard = vec!["*".to_string()];
|
||||||
where
|
match field {
|
||||||
T: Deserialize<'de>,
|
Setting::Set(value) => Some(value),
|
||||||
D: Deserializer<'de>,
|
Setting::Reset => Some(&wildcard),
|
||||||
{
|
Setting::NotSet => None,
|
||||||
Deserialize::deserialize(deserializer).map(Some)
|
}
|
||||||
|
.serialize(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug)]
|
#[derive(Clone, Default, Debug)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
pub struct Checked;
|
pub struct Checked;
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, Deserialize)]
|
#[derive(Clone, Default, Debug, Deserialize)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
pub struct Unchecked;
|
pub struct Unchecked;
|
||||||
@ -42,75 +40,54 @@ pub struct Unchecked;
|
|||||||
pub struct Settings<T> {
|
pub struct Settings<T> {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Option::is_none"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
pub displayed_attributes: Option<Option<Vec<String>>>,
|
pub displayed_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Option::is_none"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
pub searchable_attributes: Option<Option<Vec<String>>>,
|
pub searchable_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
default,
|
pub filterable_attributes: Setting<BTreeSet<String>>,
|
||||||
deserialize_with = "deserialize_some",
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
skip_serializing_if = "Option::is_none"
|
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||||
)]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub filterable_attributes: Option<Option<BTreeSet<String>>>,
|
pub ranking_rules: Setting<Vec<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[serde(
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
default,
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
deserialize_with = "deserialize_some",
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
skip_serializing_if = "Option::is_none"
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
)]
|
pub distinct_attribute: Setting<String>,
|
||||||
pub ranking_rules: Option<Option<Vec<String>>>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
skip_serializing_if = "Option::is_none"
|
|
||||||
)]
|
|
||||||
pub stop_words: Option<Option<BTreeSet<String>>>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
skip_serializing_if = "Option::is_none"
|
|
||||||
)]
|
|
||||||
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
skip_serializing_if = "Option::is_none"
|
|
||||||
)]
|
|
||||||
pub distinct_attribute: Option<Option<String>>,
|
|
||||||
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
pub _kind: PhantomData<T>,
|
pub _kind: PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Settings<Unchecked> {
|
impl Settings<Unchecked> {
|
||||||
pub fn check(mut self) -> Settings<Checked> {
|
pub fn check(self) -> Settings<Checked> {
|
||||||
let displayed_attributes = match self.displayed_attributes.take() {
|
let displayed_attributes = match self.displayed_attributes {
|
||||||
Some(Some(fields)) => {
|
Setting::Set(fields) => {
|
||||||
if fields.iter().any(|f| f == "*") {
|
if fields.iter().any(|f| f == "*") {
|
||||||
Some(None)
|
Setting::Reset
|
||||||
} else {
|
} else {
|
||||||
Some(Some(fields))
|
Setting::Set(fields)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
otherwise => otherwise,
|
otherwise => otherwise,
|
||||||
};
|
};
|
||||||
|
|
||||||
let searchable_attributes = match self.searchable_attributes.take() {
|
let searchable_attributes = match self.searchable_attributes {
|
||||||
Some(Some(fields)) => {
|
Setting::Set(fields) => {
|
||||||
if fields.iter().any(|f| f == "*") {
|
if fields.iter().any(|f| f == "*") {
|
||||||
Some(None)
|
Setting::Reset
|
||||||
} else {
|
} else {
|
||||||
Some(Some(fields))
|
Setting::Set(fields)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
otherwise => otherwise,
|
otherwise => otherwise,
|
||||||
@ -120,6 +97,7 @@ impl Settings<Unchecked> {
|
|||||||
displayed_attributes,
|
displayed_attributes,
|
||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
filterable_attributes: self.filterable_attributes,
|
filterable_attributes: self.filterable_attributes,
|
||||||
|
sortable_attributes: self.sortable_attributes,
|
||||||
ranking_rules: self.ranking_rules,
|
ranking_rules: self.ranking_rules,
|
||||||
stop_words: self.stop_words,
|
stop_words: self.stop_words,
|
||||||
synonyms: self.synonyms,
|
synonyms: self.synonyms,
|
||||||
@ -129,10 +107,61 @@ impl Settings<Unchecked> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static ASC_DESC_REGEX: Lazy<Regex> =
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
Lazy::new(|| Regex::new(r#"(asc|desc)\(([\w_-]+)\)"#).unwrap());
|
pub enum Setting<T> {
|
||||||
|
Set(T),
|
||||||
|
Reset,
|
||||||
|
NotSet,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
impl<T> Default for Setting<T> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::NotSet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Setting<T> {
|
||||||
|
pub const fn is_not_set(&self) -> bool {
|
||||||
|
matches!(self, Self::NotSet)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn map<A>(self, f: fn(T) -> A) -> Setting<A> {
|
||||||
|
match self {
|
||||||
|
Setting::Set(a) => Setting::Set(f(a)),
|
||||||
|
Setting::Reset => Setting::Reset,
|
||||||
|
Setting::NotSet => Setting::NotSet,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl<T: serde::Serialize> serde::Serialize for Setting<T> {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
match self {
|
||||||
|
Self::Set(value) => Some(value),
|
||||||
|
// Usually not_set isn't serialized by setting skip_serializing_if field attribute
|
||||||
|
Self::NotSet | Self::Reset => None,
|
||||||
|
}
|
||||||
|
.serialize(serializer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de, T: Deserialize<'de>> Deserialize<'de> for Setting<T> {
|
||||||
|
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
Deserialize::deserialize(deserializer).map(|x| match x {
|
||||||
|
Some(x) => Self::Set(x),
|
||||||
|
None => Self::Reset, // Reset is forced by sending null value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum Criterion {
|
pub enum Criterion {
|
||||||
/// Sorted by decreasing number of matched query terms.
|
/// Sorted by decreasing number of matched query terms.
|
||||||
/// Query words at the front of an attribute is considered better than if it was at the back.
|
/// Query words at the front of an attribute is considered better than if it was at the back.
|
||||||
@ -142,8 +171,11 @@ pub enum Criterion {
|
|||||||
/// Sorted by increasing distance between matched query terms.
|
/// Sorted by increasing distance between matched query terms.
|
||||||
Proximity,
|
Proximity,
|
||||||
/// Documents with quey words contained in more important
|
/// Documents with quey words contained in more important
|
||||||
/// attributes are considred better.
|
/// attributes are considered better.
|
||||||
Attribute,
|
Attribute,
|
||||||
|
/// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable
|
||||||
|
/// attributes can be used in place of this criterion at query time.
|
||||||
|
Sort,
|
||||||
/// Sorted by the similarity of the matched words with the query words.
|
/// Sorted by the similarity of the matched words with the query words.
|
||||||
Exactness,
|
Exactness,
|
||||||
/// Sorted by the increasing value of the field specified.
|
/// Sorted by the increasing value of the field specified.
|
||||||
@ -152,40 +184,86 @@ pub enum Criterion {
|
|||||||
Desc(String),
|
Desc(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Criterion {
|
||||||
|
/// Returns the field name parameter of this criterion.
|
||||||
|
pub fn field_name(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
Criterion::Asc(name) | Criterion::Desc(name) => Some(name),
|
||||||
|
_otherwise => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl FromStr for Criterion {
|
impl FromStr for Criterion {
|
||||||
|
// since we're not going to show the custom error message we can override the
|
||||||
|
// error type.
|
||||||
type Err = ();
|
type Err = ();
|
||||||
|
|
||||||
fn from_str(txt: &str) -> Result<Criterion, Self::Err> {
|
fn from_str(text: &str) -> Result<Criterion, Self::Err> {
|
||||||
match txt {
|
match text {
|
||||||
"words" => Ok(Criterion::Words),
|
"words" => Ok(Criterion::Words),
|
||||||
"typo" => Ok(Criterion::Typo),
|
"typo" => Ok(Criterion::Typo),
|
||||||
"proximity" => Ok(Criterion::Proximity),
|
"proximity" => Ok(Criterion::Proximity),
|
||||||
"attribute" => Ok(Criterion::Attribute),
|
"attribute" => Ok(Criterion::Attribute),
|
||||||
|
"sort" => Ok(Criterion::Sort),
|
||||||
"exactness" => Ok(Criterion::Exactness),
|
"exactness" => Ok(Criterion::Exactness),
|
||||||
text => {
|
text => match AscDesc::from_str(text) {
|
||||||
let caps = ASC_DESC_REGEX.captures(text).ok_or(())?;
|
Ok(AscDesc::Asc(field)) => Ok(Criterion::Asc(field)),
|
||||||
let order = caps.get(1).unwrap().as_str();
|
Ok(AscDesc::Desc(field)) => Ok(Criterion::Desc(field)),
|
||||||
let field_name = caps.get(2).unwrap().as_str();
|
Err(_) => Err(()),
|
||||||
match order {
|
},
|
||||||
"asc" => Ok(Criterion::Asc(field_name.to_string())),
|
|
||||||
"desc" => Ok(Criterion::Desc(field_name.to_string())),
|
|
||||||
_text => Err(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Criterion {
|
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
pub enum AscDesc {
|
||||||
|
Asc(String),
|
||||||
|
Desc(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for AscDesc {
|
||||||
|
type Err = ();
|
||||||
|
|
||||||
|
// since we don't know if this comes from the old or new syntax we need to check
|
||||||
|
// for both syntax.
|
||||||
|
// WARN: this code doesn't come from the original meilisearch v0.22.0 but was
|
||||||
|
// written specifically to be able to import the dump of meilisearch v0.21.0 AND
|
||||||
|
// meilisearch v0.22.0.
|
||||||
|
fn from_str(text: &str) -> Result<AscDesc, Self::Err> {
|
||||||
|
if let Some((field_name, asc_desc)) = text.rsplit_once(':') {
|
||||||
|
match asc_desc {
|
||||||
|
"asc" => Ok(AscDesc::Asc(field_name.to_string())),
|
||||||
|
"desc" => Ok(AscDesc::Desc(field_name.to_string())),
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
} else if text.starts_with("asc(") && text.ends_with(')') {
|
||||||
|
Ok(AscDesc::Asc(
|
||||||
|
text.strip_prefix("asc(").unwrap().strip_suffix(')').unwrap().to_string(),
|
||||||
|
))
|
||||||
|
} else if text.starts_with("desc(") && text.ends_with(')') {
|
||||||
|
Ok(AscDesc::Desc(
|
||||||
|
text.strip_prefix("desc(").unwrap().strip_suffix(')').unwrap().to_string(),
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Criterion {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
use Criterion::*;
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
Criterion::Words => write!(f, "words"),
|
Words => f.write_str("words"),
|
||||||
Criterion::Typo => write!(f, "typo"),
|
Typo => f.write_str("typo"),
|
||||||
Criterion::Proximity => write!(f, "proximity"),
|
Proximity => f.write_str("proximity"),
|
||||||
Criterion::Attribute => write!(f, "attribute"),
|
Attribute => f.write_str("attribute"),
|
||||||
Criterion::Exactness => write!(f, "exactness"),
|
Sort => f.write_str("sort"),
|
||||||
Criterion::Asc(field_name) => write!(f, "asc({})", field_name),
|
Exactness => f.write_str("exactness"),
|
||||||
Criterion::Desc(field_name) => write!(f, "desc({})", field_name),
|
Asc(attr) => write!(f, "{}:asc", attr),
|
||||||
|
Desc(attr) => write!(f, "{}:desc", attr),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/v2/mod.rs
|
||||||
|
expression: spells.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/v2/mod.rs
|
||||||
|
expression: products.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {
|
||||||
|
"android": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"iphone": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"phone": [
|
||||||
|
"android",
|
||||||
|
"iphone",
|
||||||
|
"smartphone"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/v2/mod.rs
|
||||||
|
expression: movies.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"release_date"
|
||||||
|
],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"exactness",
|
||||||
|
"release_date:asc"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -5,10 +5,8 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
|
||||||
pub struct ResponseError {
|
pub struct ResponseError {
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
|
|
||||||
pub code: StatusCode,
|
pub code: StatusCode,
|
||||||
pub message: String,
|
pub message: String,
|
||||||
#[serde(rename = "code")]
|
#[serde(rename = "code")]
|
||||||
|
@ -5,7 +5,6 @@ use serde::Deserialize;
|
|||||||
|
|
||||||
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
pub struct ResponseError {
|
pub struct ResponseError {
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
|
BIN
dump/tests/assets/v2-v0.22.0.dump
Normal file
BIN
dump/tests/assets/v2-v0.22.0.dump
Normal file
Binary file not shown.
@ -1,4 +1,3 @@
|
|||||||
use std::collections::BTreeSet;
|
|
||||||
use std::fs::File as StdFile;
|
use std::fs::File as StdFile;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
@ -11,10 +10,14 @@ const UPDATE_FILES_PATH: &str = "updates/updates_files";
|
|||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
#[error("Could not parse file name as utf-8")]
|
||||||
|
CouldNotParseFileNameAsUtf8,
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
IoError(#[from] std::io::Error),
|
IoError(#[from] std::io::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
PersistError(#[from] tempfile::PersistError),
|
PersistError(#[from] tempfile::PersistError),
|
||||||
|
#[error(transparent)]
|
||||||
|
UuidError(#[from] uuid::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
@ -33,13 +36,11 @@ impl DerefMut for File {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(test, faux::create)]
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct FileStore {
|
pub struct FileStore {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
pub fn new(path: impl AsRef<Path>) -> Result<FileStore> {
|
pub fn new(path: impl AsRef<Path>) -> Result<FileStore> {
|
||||||
let path = path.as_ref().to_path_buf();
|
let path = path.as_ref().to_path_buf();
|
||||||
@ -48,7 +49,6 @@ impl FileStore {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(test, faux::methods)]
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
/// Creates a new temporary update file.
|
/// Creates a new temporary update file.
|
||||||
/// A call to `persist` is needed to persist the file in the database.
|
/// A call to `persist` is needed to persist the file in the database.
|
||||||
@ -94,7 +94,17 @@ impl FileStore {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_size(&self, uuid: Uuid) -> Result<u64> {
|
/// Compute the size of all the updates contained in the file store.
|
||||||
|
pub fn compute_total_size(&self) -> Result<u64> {
|
||||||
|
let mut total = 0;
|
||||||
|
for uuid in self.all_uuids()? {
|
||||||
|
total += self.compute_size(uuid?).unwrap_or_default();
|
||||||
|
}
|
||||||
|
Ok(total)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute the size of one update
|
||||||
|
pub fn compute_size(&self, uuid: Uuid) -> Result<u64> {
|
||||||
Ok(self.get_update(uuid)?.metadata()?.len())
|
Ok(self.get_update(uuid)?.metadata()?.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,17 +115,12 @@ impl FileStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// List the Uuids of the files in the FileStore
|
/// List the Uuids of the files in the FileStore
|
||||||
///
|
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
|
||||||
/// This function is meant to be used by tests only.
|
Ok(self.path.read_dir()?.map(|entry| {
|
||||||
#[doc(hidden)]
|
Ok(Uuid::from_str(
|
||||||
pub fn __all_uuids(&self) -> BTreeSet<Uuid> {
|
entry?.file_name().to_str().ok_or(Error::CouldNotParseFileNameAsUtf8)?,
|
||||||
let mut uuids = BTreeSet::new();
|
)?)
|
||||||
for entry in self.path.read_dir().unwrap() {
|
}))
|
||||||
let entry = entry.unwrap();
|
|
||||||
let uuid = Uuid::from_str(entry.file_name().to_str().unwrap()).unwrap();
|
|
||||||
uuids.insert(uuid);
|
|
||||||
}
|
|
||||||
uuids
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,10 +19,16 @@ use crate::KindWithContent;
|
|||||||
///
|
///
|
||||||
/// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`]
|
/// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`]
|
||||||
enum AutobatchKind {
|
enum AutobatchKind {
|
||||||
DocumentImport { method: IndexDocumentsMethod, allow_index_creation: bool },
|
DocumentImport {
|
||||||
|
method: IndexDocumentsMethod,
|
||||||
|
allow_index_creation: bool,
|
||||||
|
primary_key: Option<String>,
|
||||||
|
},
|
||||||
DocumentDeletion,
|
DocumentDeletion,
|
||||||
DocumentClear,
|
DocumentClear,
|
||||||
Settings { allow_index_creation: bool },
|
Settings {
|
||||||
|
allow_index_creation: bool,
|
||||||
|
},
|
||||||
IndexCreation,
|
IndexCreation,
|
||||||
IndexDeletion,
|
IndexDeletion,
|
||||||
IndexUpdate,
|
IndexUpdate,
|
||||||
@ -38,14 +44,24 @@ impl AutobatchKind {
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn primary_key(&self) -> Option<Option<&str>> {
|
||||||
|
match self {
|
||||||
|
AutobatchKind::DocumentImport { primary_key, .. } => Some(primary_key.as_deref()),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<KindWithContent> for AutobatchKind {
|
impl From<KindWithContent> for AutobatchKind {
|
||||||
fn from(kind: KindWithContent) -> Self {
|
fn from(kind: KindWithContent) -> Self {
|
||||||
match kind {
|
match kind {
|
||||||
KindWithContent::DocumentAdditionOrUpdate { method, allow_index_creation, .. } => {
|
KindWithContent::DocumentAdditionOrUpdate {
|
||||||
AutobatchKind::DocumentImport { method, allow_index_creation }
|
method,
|
||||||
}
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
|
..
|
||||||
|
} => AutobatchKind::DocumentImport { method, allow_index_creation, primary_key },
|
||||||
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
|
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
|
||||||
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
|
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
|
||||||
KindWithContent::SettingsUpdate { allow_index_creation, is_deletion, .. } => {
|
KindWithContent::SettingsUpdate { allow_index_creation, is_deletion, .. } => {
|
||||||
@ -75,6 +91,7 @@ pub enum BatchKind {
|
|||||||
DocumentImport {
|
DocumentImport {
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
allow_index_creation: bool,
|
allow_index_creation: bool,
|
||||||
|
primary_key: Option<String>,
|
||||||
import_ids: Vec<TaskId>,
|
import_ids: Vec<TaskId>,
|
||||||
},
|
},
|
||||||
DocumentDeletion {
|
DocumentDeletion {
|
||||||
@ -89,6 +106,7 @@ pub enum BatchKind {
|
|||||||
settings_ids: Vec<TaskId>,
|
settings_ids: Vec<TaskId>,
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
allow_index_creation: bool,
|
allow_index_creation: bool,
|
||||||
|
primary_key: Option<String>,
|
||||||
import_ids: Vec<TaskId>,
|
import_ids: Vec<TaskId>,
|
||||||
},
|
},
|
||||||
Settings {
|
Settings {
|
||||||
@ -120,6 +138,16 @@ impl BatchKind {
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn primary_key(&self) -> Option<Option<&str>> {
|
||||||
|
match self {
|
||||||
|
BatchKind::DocumentImport { primary_key, .. }
|
||||||
|
| BatchKind::SettingsAndDocumentImport { primary_key, .. } => {
|
||||||
|
Some(primary_key.as_deref())
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BatchKind {
|
impl BatchKind {
|
||||||
@ -131,6 +159,7 @@ impl BatchKind {
|
|||||||
pub fn new(
|
pub fn new(
|
||||||
task_id: TaskId,
|
task_id: TaskId,
|
||||||
kind: KindWithContent,
|
kind: KindWithContent,
|
||||||
|
primary_key: Option<&str>,
|
||||||
) -> (ControlFlow<BatchKind, BatchKind>, bool) {
|
) -> (ControlFlow<BatchKind, BatchKind>, bool) {
|
||||||
use AutobatchKind as K;
|
use AutobatchKind as K;
|
||||||
|
|
||||||
@ -140,10 +169,25 @@ impl BatchKind {
|
|||||||
K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false),
|
K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false),
|
||||||
K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false),
|
K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false),
|
||||||
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
|
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
|
||||||
K::DocumentImport { method, allow_index_creation } => (
|
K::DocumentImport { method, allow_index_creation, primary_key: pk }
|
||||||
|
if primary_key.is_none() || pk.is_none() || primary_key == pk.as_deref() =>
|
||||||
|
{
|
||||||
|
(
|
||||||
Continue(BatchKind::DocumentImport {
|
Continue(BatchKind::DocumentImport {
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
|
primary_key: pk,
|
||||||
|
import_ids: vec![task_id],
|
||||||
|
}),
|
||||||
|
allow_index_creation,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// if the primary key set in the task was different than ours we should stop and make this batch fail asap.
|
||||||
|
K::DocumentImport { method, allow_index_creation, primary_key } => (
|
||||||
|
Break(BatchKind::DocumentImport {
|
||||||
|
method,
|
||||||
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
import_ids: vec![task_id],
|
import_ids: vec![task_id],
|
||||||
}),
|
}),
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
@ -163,7 +207,7 @@ impl BatchKind {
|
|||||||
/// To ease the writting of the code. `true` can be returned when you don't need to create an index
|
/// To ease the writting of the code. `true` can be returned when you don't need to create an index
|
||||||
/// but false can't be returned if you needs to create an index.
|
/// but false can't be returned if you needs to create an index.
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool) -> ControlFlow<BatchKind, BatchKind> {
|
fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool, primary_key: Option<&str>) -> ControlFlow<BatchKind, BatchKind> {
|
||||||
use AutobatchKind as K;
|
use AutobatchKind as K;
|
||||||
|
|
||||||
match (self, kind) {
|
match (self, kind) {
|
||||||
@ -173,11 +217,39 @@ impl BatchKind {
|
|||||||
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
||||||
Break(this)
|
Break(this)
|
||||||
},
|
},
|
||||||
|
// NOTE: We need to negate the whole condition since we're checking if we need to break instead of continue.
|
||||||
|
// I wrote it this way because it's easier to understand than the other way around.
|
||||||
|
(this, kind) if !(
|
||||||
|
// 1. If both task don't interact with primary key -> we can continue
|
||||||
|
(this.primary_key().is_none() && kind.primary_key().is_none()) ||
|
||||||
|
// 2. Else ->
|
||||||
|
(
|
||||||
|
// 2.1 If we already have a primary-key ->
|
||||||
|
(
|
||||||
|
primary_key.is_some() &&
|
||||||
|
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
|
||||||
|
// 2.1.2 If the task don't have a primary-key -> we can continue
|
||||||
|
kind.primary_key().map_or(true, |pk| pk == primary_key)
|
||||||
|
) ||
|
||||||
|
// 2.2 If we don't have a primary-key ->
|
||||||
|
(
|
||||||
|
// 2.2.1 If both the batch and the task have a primary key they should be equal
|
||||||
|
// 2.2.2 If the batch is set to Some(None), the task should be too
|
||||||
|
// 2.2.3 If the batch is set to None -> we can continue
|
||||||
|
this.primary_key().zip(kind.primary_key()).map_or(true, |(this, kind)| this == kind)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
) // closing the negation
|
||||||
|
|
||||||
|
=> {
|
||||||
|
Break(this)
|
||||||
|
},
|
||||||
// The index deletion can batch with everything but must stop after
|
// The index deletion can batch with everything but must stop after
|
||||||
(
|
(
|
||||||
BatchKind::DocumentClear { mut ids }
|
BatchKind::DocumentClear { mut ids }
|
||||||
| BatchKind::DocumentDeletion { deletion_ids: mut ids }
|
| BatchKind::DocumentDeletion { deletion_ids: mut ids }
|
||||||
| BatchKind::DocumentImport { method: _, allow_index_creation: _, import_ids: mut ids }
|
| BatchKind::DocumentImport { method: _, allow_index_creation: _, primary_key: _, import_ids: mut ids }
|
||||||
| BatchKind::Settings { allow_index_creation: _, settings_ids: mut ids },
|
| BatchKind::Settings { allow_index_creation: _, settings_ids: mut ids },
|
||||||
K::IndexDeletion,
|
K::IndexDeletion,
|
||||||
) => {
|
) => {
|
||||||
@ -186,7 +258,7 @@ impl BatchKind {
|
|||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::ClearAndSettings { settings_ids: mut ids, allow_index_creation: _, mut other }
|
BatchKind::ClearAndSettings { settings_ids: mut ids, allow_index_creation: _, mut other }
|
||||||
| BatchKind::SettingsAndDocumentImport { import_ids: mut ids, method: _, allow_index_creation: _, settings_ids: mut other },
|
| BatchKind::SettingsAndDocumentImport { import_ids: mut ids, method: _, allow_index_creation: _, primary_key: _, settings_ids: mut other },
|
||||||
K::IndexDeletion,
|
K::IndexDeletion,
|
||||||
) => {
|
) => {
|
||||||
ids.push(id);
|
ids.push(id);
|
||||||
@ -206,7 +278,7 @@ impl BatchKind {
|
|||||||
K::DocumentImport { .. } | K::Settings { .. },
|
K::DocumentImport { .. } | K::Settings { .. },
|
||||||
) => Break(this),
|
) => Break(this),
|
||||||
(
|
(
|
||||||
BatchKind::DocumentImport { method: _, allow_index_creation: _, import_ids: mut ids },
|
BatchKind::DocumentImport { method: _, allow_index_creation: _, primary_key: _, import_ids: mut ids },
|
||||||
K::DocumentClear,
|
K::DocumentClear,
|
||||||
) => {
|
) => {
|
||||||
ids.push(id);
|
ids.push(id);
|
||||||
@ -215,24 +287,27 @@ impl BatchKind {
|
|||||||
|
|
||||||
// we can autobatch the same kind of document additions / updates
|
// we can autobatch the same kind of document additions / updates
|
||||||
(
|
(
|
||||||
BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, mut import_ids },
|
BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, primary_key: _, mut import_ids },
|
||||||
K::DocumentImport { method: ReplaceDocuments, .. },
|
K::DocumentImport { method: ReplaceDocuments, primary_key: pk, .. },
|
||||||
) => {
|
) => {
|
||||||
import_ids.push(id);
|
import_ids.push(id);
|
||||||
Continue(BatchKind::DocumentImport {
|
Continue(BatchKind::DocumentImport {
|
||||||
method: ReplaceDocuments,
|
method: ReplaceDocuments,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids,
|
import_ids,
|
||||||
|
primary_key: pk,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, mut import_ids },
|
BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, primary_key: _, mut import_ids },
|
||||||
K::DocumentImport { method: UpdateDocuments, .. },
|
K::DocumentImport { method: UpdateDocuments, primary_key: pk, .. },
|
||||||
) => {
|
) => {
|
||||||
|
|
||||||
import_ids.push(id);
|
import_ids.push(id);
|
||||||
Continue(BatchKind::DocumentImport {
|
Continue(BatchKind::DocumentImport {
|
||||||
method: UpdateDocuments,
|
method: UpdateDocuments,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
|
primary_key: pk,
|
||||||
import_ids,
|
import_ids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -245,12 +320,13 @@ impl BatchKind {
|
|||||||
) => Break(this),
|
) => Break(this),
|
||||||
|
|
||||||
(
|
(
|
||||||
BatchKind::DocumentImport { method, allow_index_creation, import_ids },
|
BatchKind::DocumentImport { method, allow_index_creation, primary_key, import_ids },
|
||||||
K::Settings { .. },
|
K::Settings { .. },
|
||||||
) => Continue(BatchKind::SettingsAndDocumentImport {
|
) => Continue(BatchKind::SettingsAndDocumentImport {
|
||||||
settings_ids: vec![id],
|
settings_ids: vec![id],
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
import_ids,
|
import_ids,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
@ -327,7 +403,7 @@ impl BatchKind {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: _, import_ids: mut other, allow_index_creation },
|
BatchKind::SettingsAndDocumentImport { settings_ids, method: _, import_ids: mut other, allow_index_creation, primary_key: _ },
|
||||||
K::DocumentClear,
|
K::DocumentClear,
|
||||||
) => {
|
) => {
|
||||||
other.push(id);
|
other.push(id);
|
||||||
@ -339,26 +415,28 @@ impl BatchKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
(
|
(
|
||||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, mut import_ids, allow_index_creation },
|
BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, mut import_ids, allow_index_creation, primary_key: _},
|
||||||
K::DocumentImport { method: ReplaceDocuments, .. },
|
K::DocumentImport { method: ReplaceDocuments, primary_key: pk2, .. },
|
||||||
) => {
|
) => {
|
||||||
import_ids.push(id);
|
import_ids.push(id);
|
||||||
Continue(BatchKind::SettingsAndDocumentImport {
|
Continue(BatchKind::SettingsAndDocumentImport {
|
||||||
settings_ids,
|
settings_ids,
|
||||||
method: ReplaceDocuments,
|
method: ReplaceDocuments,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
|
primary_key: pk2,
|
||||||
import_ids,
|
import_ids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, mut import_ids },
|
BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: _, mut import_ids },
|
||||||
K::DocumentImport { method: UpdateDocuments, .. },
|
K::DocumentImport { method: UpdateDocuments, primary_key: pk2, .. },
|
||||||
) => {
|
) => {
|
||||||
import_ids.push(id);
|
import_ids.push(id);
|
||||||
Continue(BatchKind::SettingsAndDocumentImport {
|
Continue(BatchKind::SettingsAndDocumentImport {
|
||||||
settings_ids,
|
settings_ids,
|
||||||
method: UpdateDocuments,
|
method: UpdateDocuments,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
|
primary_key: pk2,
|
||||||
import_ids,
|
import_ids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -369,7 +447,7 @@ impl BatchKind {
|
|||||||
K::DocumentDeletion | K::DocumentImport { .. },
|
K::DocumentDeletion | K::DocumentImport { .. },
|
||||||
) => Break(this),
|
) => Break(this),
|
||||||
(
|
(
|
||||||
BatchKind::SettingsAndDocumentImport { mut settings_ids, method, allow_index_creation, import_ids },
|
BatchKind::SettingsAndDocumentImport { mut settings_ids, method, allow_index_creation,primary_key, import_ids },
|
||||||
K::Settings { .. },
|
K::Settings { .. },
|
||||||
) => {
|
) => {
|
||||||
settings_ids.push(id);
|
settings_ids.push(id);
|
||||||
@ -377,6 +455,7 @@ impl BatchKind {
|
|||||||
settings_ids,
|
settings_ids,
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
import_ids,
|
import_ids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -406,6 +485,7 @@ impl BatchKind {
|
|||||||
pub fn autobatch(
|
pub fn autobatch(
|
||||||
enqueued: Vec<(TaskId, KindWithContent)>,
|
enqueued: Vec<(TaskId, KindWithContent)>,
|
||||||
index_already_exists: bool,
|
index_already_exists: bool,
|
||||||
|
primary_key: Option<&str>,
|
||||||
) -> Option<(BatchKind, bool)> {
|
) -> Option<(BatchKind, bool)> {
|
||||||
let mut enqueued = enqueued.into_iter();
|
let mut enqueued = enqueued.into_iter();
|
||||||
let (id, kind) = enqueued.next()?;
|
let (id, kind) = enqueued.next()?;
|
||||||
@ -413,7 +493,7 @@ pub fn autobatch(
|
|||||||
// index_exist will keep track of if the index should exist at this point after the tasks we batched.
|
// index_exist will keep track of if the index should exist at this point after the tasks we batched.
|
||||||
let mut index_exist = index_already_exists;
|
let mut index_exist = index_already_exists;
|
||||||
|
|
||||||
let (mut acc, must_create_index) = match BatchKind::new(id, kind) {
|
let (mut acc, must_create_index) = match BatchKind::new(id, kind, primary_key) {
|
||||||
(Continue(acc), create) => (acc, create),
|
(Continue(acc), create) => (acc, create),
|
||||||
(Break(acc), create) => return Some((acc, create)),
|
(Break(acc), create) => return Some((acc, create)),
|
||||||
};
|
};
|
||||||
@ -422,7 +502,7 @@ pub fn autobatch(
|
|||||||
index_exist |= must_create_index;
|
index_exist |= must_create_index;
|
||||||
|
|
||||||
for (id, kind) in enqueued {
|
for (id, kind) in enqueued {
|
||||||
acc = match acc.accumulate(id, kind.into(), index_exist) {
|
acc = match acc.accumulate(id, kind.into(), index_exist, primary_key) {
|
||||||
Continue(acc) => acc,
|
Continue(acc) => acc,
|
||||||
Break(acc) => return Some((acc, must_create_index)),
|
Break(acc) => return Some((acc, must_create_index)),
|
||||||
};
|
};
|
||||||
@ -441,18 +521,24 @@ mod tests {
|
|||||||
|
|
||||||
fn autobatch_from(
|
fn autobatch_from(
|
||||||
index_already_exists: bool,
|
index_already_exists: bool,
|
||||||
|
primary_key: Option<&str>,
|
||||||
input: impl IntoIterator<Item = KindWithContent>,
|
input: impl IntoIterator<Item = KindWithContent>,
|
||||||
) -> Option<(BatchKind, bool)> {
|
) -> Option<(BatchKind, bool)> {
|
||||||
autobatch(
|
autobatch(
|
||||||
input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind)).collect(),
|
input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind)).collect(),
|
||||||
index_already_exists,
|
index_already_exists,
|
||||||
|
primary_key,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn doc_imp(method: IndexDocumentsMethod, allow_index_creation: bool) -> KindWithContent {
|
fn doc_imp(
|
||||||
|
method: IndexDocumentsMethod,
|
||||||
|
allow_index_creation: bool,
|
||||||
|
primary_key: Option<&str>,
|
||||||
|
) -> KindWithContent {
|
||||||
KindWithContent::DocumentAdditionOrUpdate {
|
KindWithContent::DocumentAdditionOrUpdate {
|
||||||
index_uid: String::from("doggo"),
|
index_uid: String::from("doggo"),
|
||||||
primary_key: None,
|
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||||
method,
|
method,
|
||||||
content_file: Uuid::new_v4(),
|
content_file: Uuid::new_v4(),
|
||||||
documents_count: 0,
|
documents_count: 0,
|
||||||
@ -502,226 +588,268 @@ mod tests {
|
|||||||
fn autobatch_simple_operation_together() {
|
fn autobatch_simple_operation_together() {
|
||||||
// we can autobatch one or multiple `ReplaceDocuments` together.
|
// we can autobatch one or multiple `ReplaceDocuments` together.
|
||||||
// if the index exists.
|
// if the index exists.
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, false ), doc_imp(ReplaceDocuments, false )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, false , None), doc_imp(ReplaceDocuments, false , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// if it doesn't exists.
|
// if it doesn't exists.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
|
|
||||||
// we can autobatch one or multiple `UpdateDocuments` together.
|
// we can autobatch one or multiple `UpdateDocuments` together.
|
||||||
// if the index exists.
|
// if the index exists.
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// if it doesn't exists.
|
// if it doesn't exists.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// we can autobatch one or multiple DocumentDeletion together
|
// we can autobatch one or multiple DocumentDeletion together
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// we can autobatch one or multiple Settings together
|
// we can autobatch one or multiple Settings together
|
||||||
debug_snapshot!(autobatch_from(true, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(false, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn simple_document_operation_dont_autobatch_with_other() {
|
fn simple_document_operation_dont_autobatch_with_other() {
|
||||||
// addition, updates and deletion can't batch together
|
// addition, updates and deletion can't batch together
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn document_addition_batch_with_settings() {
|
fn document_addition_batch_with_settings() {
|
||||||
// simple case
|
// simple case
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
|
|
||||||
// multiple settings and doc addition
|
// multiple settings and doc addition
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||||
|
|
||||||
// addition and setting unordered
|
// addition and setting unordered
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))");
|
||||||
|
|
||||||
// We ensure this kind of batch doesn't batch with forbidden operations
|
// We ensure this kind of batch doesn't batch with forbidden operations
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(UpdateDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(ReplaceDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn clear_and_additions() {
|
fn clear_and_additions() {
|
||||||
// these two doesn't need to batch
|
// these two doesn't need to batch
|
||||||
debug_snapshot!(autobatch_from(true, [doc_clr(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_clr(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_clr(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_clr(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||||
|
|
||||||
// Basic use case
|
// Basic use case
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||||
|
|
||||||
// This batch kind doesn't mix with other document addition
|
// This batch kind doesn't mix with other document addition
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||||
|
|
||||||
// But you can batch multiple clear together
|
// But you can batch multiple clear together
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn clear_and_additions_and_settings() {
|
fn clear_and_additions_and_settings() {
|
||||||
// A clear don't need to autobatch the settings that happens AFTER there is no documents
|
// A clear don't need to autobatch the settings that happens AFTER there is no documents
|
||||||
debug_snapshot!(autobatch_from(true, [doc_clr(), settings(true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_clr(), settings(true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(true, [settings(true), doc_clr(), settings(true)]), @"Some((ClearAndSettings { other: [1], allow_index_creation: true, settings_ids: [0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [settings(true), doc_clr(), settings(true)]), @"Some((ClearAndSettings { other: [1], allow_index_creation: true, settings_ids: [0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn anything_and_index_deletion() {
|
fn anything_and_index_deletion() {
|
||||||
// The `IndexDeletion` doesn't batch with anything that happens AFTER.
|
// The `IndexDeletion` doesn't batch with anything that happens AFTER.
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(ReplaceDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(UpdateDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(ReplaceDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(UpdateDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(ReplaceDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(UpdateDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(ReplaceDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(UpdateDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
|
|
||||||
// The index deletion can accept almost any type of `BatchKind` and transform it to an `IndexDeletion`.
|
// The index deletion can accept almost any type of `BatchKind` and transform it to an `IndexDeletion`.
|
||||||
// First, the basic cases
|
// First, the basic cases
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
|
|
||||||
// Then the mixed cases.
|
// Then the mixed cases.
|
||||||
// The index already exists, whatever is the right of the tasks it shouldn't change the result.
|
// The index already exists, whatever is the right of the tasks it shouldn't change the result.
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
|
|
||||||
// When the index doesn't exists yet it's more complicated.
|
// When the index doesn't exists yet it's more complicated.
|
||||||
// Either the first task we encounter create it, in which case we can create a big batch with everything.
|
// Either the first task we encounter create it, in which case we can create a big batch with everything.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
// The right of the tasks following isn't really important.
|
// The right of the tasks following isn't really important.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
// Or, the second case; the first task doesn't create the index and thus we wants to batch it with only tasks that can't create an index.
|
// Or, the second case; the first task doesn't create the index and thus we wants to batch it with only tasks that can't create an index.
|
||||||
// that can be a second task that don't have the right to create an index. Or anything that can't create an index like an index deletion, document deletion, document clear, etc.
|
// that can be a second task that don't have the right to create an index. Or anything that can't create an index like an index deletion, document deletion, document clear, etc.
|
||||||
// All theses tasks are going to throw an error `Index doesn't exist` once the batch is processed.
|
// All theses tasks are going to throw an error `Index doesn't exist` once the batch is processed.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
// The third and final case is when the first task doesn't create an index but is directly followed by a task creating an index. In this case we can't batch whith what
|
// The third and final case is when the first task doesn't create an index but is directly followed by a task creating an index. In this case we can't batch whith what
|
||||||
// follows because we first need to process the erronous batch.
|
// follows because we first need to process the erronous batch.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn allowed_and_disallowed_index_creation() {
|
fn allowed_and_disallowed_index_creation() {
|
||||||
// `DocumentImport` can't be mixed with those disallowed to do so except if the index already exists.
|
// `DocumentImport` can't be mixed with those disallowed to do so except if the index already exists.
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn autobatch_primary_key() {
|
||||||
|
// ==> If I have a pk
|
||||||
|
// With a single update
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||||
|
|
||||||
|
// With a multiple updates
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
|
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||||
|
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||||
|
|
||||||
|
// ==> If I don't have a pk
|
||||||
|
// With a single update
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||||
|
|
||||||
|
// With a multiple updates
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -217,7 +217,8 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let mut documents_counts = Vec::new();
|
let mut documents_counts = Vec::new();
|
||||||
let mut content_files = Vec::new();
|
let mut content_files = Vec::new();
|
||||||
for task in &tasks {
|
|
||||||
|
for task in tasks.iter() {
|
||||||
match task.kind {
|
match task.kind {
|
||||||
KindWithContent::DocumentAdditionOrUpdate {
|
KindWithContent::DocumentAdditionOrUpdate {
|
||||||
content_file,
|
content_file,
|
||||||
@ -325,6 +326,7 @@ impl IndexScheduler {
|
|||||||
settings_ids,
|
settings_ids,
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
import_ids,
|
import_ids,
|
||||||
} => {
|
} => {
|
||||||
let settings = self.create_next_batch_index(
|
let settings = self.create_next_batch_index(
|
||||||
@ -337,7 +339,12 @@ impl IndexScheduler {
|
|||||||
let document_import = self.create_next_batch_index(
|
let document_import = self.create_next_batch_index(
|
||||||
rtxn,
|
rtxn,
|
||||||
index_uid.clone(),
|
index_uid.clone(),
|
||||||
BatchKind::DocumentImport { method, allow_index_creation, import_ids },
|
BatchKind::DocumentImport {
|
||||||
|
method,
|
||||||
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
|
import_ids,
|
||||||
|
},
|
||||||
must_create_index,
|
must_create_index,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@ -467,6 +474,12 @@ impl IndexScheduler {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
|
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
|
||||||
|
let mut primary_key = None;
|
||||||
|
if index_already_exists {
|
||||||
|
let index = self.index_mapper.index(rtxn, index_name)?;
|
||||||
|
let rtxn = index.read_txn()?;
|
||||||
|
primary_key = index.primary_key(&rtxn)?.map(|pk| pk.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
||||||
|
|
||||||
@ -484,7 +497,7 @@ impl IndexScheduler {
|
|||||||
.collect::<Result<Vec<_>>>()?;
|
.collect::<Result<Vec<_>>>()?;
|
||||||
|
|
||||||
if let Some((batchkind, create_index)) =
|
if let Some((batchkind, create_index)) =
|
||||||
autobatcher::autobatch(enqueued, index_already_exists)
|
autobatcher::autobatch(enqueued, index_already_exists, primary_key.as_deref())
|
||||||
{
|
{
|
||||||
return self.create_next_batch_index(
|
return self.create_next_batch_index(
|
||||||
rtxn,
|
rtxn,
|
||||||
@ -949,7 +962,7 @@ impl IndexScheduler {
|
|||||||
/// The list of processed tasks.
|
/// The list of processed tasks.
|
||||||
fn apply_index_operation<'i>(
|
fn apply_index_operation<'i>(
|
||||||
&self,
|
&self,
|
||||||
index_wtxn: &'_ mut RwTxn<'i, '_>,
|
index_wtxn: &mut RwTxn<'i, '_>,
|
||||||
index: &'i Index,
|
index: &'i Index,
|
||||||
operation: IndexOperation,
|
operation: IndexOperation,
|
||||||
) -> Result<Vec<Task>> {
|
) -> Result<Vec<Task>> {
|
||||||
@ -985,9 +998,22 @@ impl IndexScheduler {
|
|||||||
let mut primary_key_has_been_set = false;
|
let mut primary_key_has_been_set = false;
|
||||||
let must_stop_processing = self.must_stop_processing.clone();
|
let must_stop_processing = self.must_stop_processing.clone();
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
// TODO use the code from the IndexCreate operation
|
|
||||||
if let Some(primary_key) = primary_key {
|
if let Some(primary_key) = primary_key {
|
||||||
if index.primary_key(index_wtxn)?.is_none() {
|
match index.primary_key(index_wtxn)? {
|
||||||
|
// if a primary key was set AND had already been defined in the index
|
||||||
|
// but to a different value, we can make the whole batch fail.
|
||||||
|
Some(pk) => {
|
||||||
|
if primary_key != pk {
|
||||||
|
return Err(milli::Error::from(
|
||||||
|
milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()),
|
||||||
|
)
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// if the primary key was set and there was no primary key set for this index
|
||||||
|
// we set it to the received value before starting the indexing process.
|
||||||
|
None => {
|
||||||
let mut builder =
|
let mut builder =
|
||||||
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||||
builder.set_primary_key(primary_key);
|
builder.set_primary_key(primary_key);
|
||||||
@ -998,6 +1024,7 @@ impl IndexScheduler {
|
|||||||
primary_key_has_been_set = true;
|
primary_key_has_been_set = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let config = IndexDocumentsConfig { update_method: method, ..Default::default() };
|
let config = IndexDocumentsConfig { update_method: method, ..Default::default() };
|
||||||
|
|
||||||
@ -1059,7 +1086,8 @@ impl IndexScheduler {
|
|||||||
task.status = Status::Failed;
|
task.status = Status::Failed;
|
||||||
task.details = Some(Details::DocumentAdditionOrUpdate {
|
task.details = Some(Details::DocumentAdditionOrUpdate {
|
||||||
received_documents: count,
|
received_documents: count,
|
||||||
indexed_documents: Some(count),
|
// if there was an error we indexed 0 documents.
|
||||||
|
indexed_documents: Some(0),
|
||||||
});
|
});
|
||||||
task.error = Some(error.into())
|
task.error = Some(error.into())
|
||||||
}
|
}
|
||||||
|
@ -100,9 +100,9 @@ pub enum Error {
|
|||||||
InvalidIndexUid { index_uid: String },
|
InvalidIndexUid { index_uid: String },
|
||||||
#[error("Task `{0}` not found.")]
|
#[error("Task `{0}` not found.")]
|
||||||
TaskNotFound(TaskId),
|
TaskNotFound(TaskId),
|
||||||
#[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
#[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||||
TaskDeletionWithEmptyQuery,
|
TaskDeletionWithEmptyQuery,
|
||||||
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||||
TaskCancelationWithEmptyQuery,
|
TaskCancelationWithEmptyQuery,
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
@ -141,8 +141,8 @@ impl ErrorCode for Error {
|
|||||||
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
||||||
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||||
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||||
Error::SwapIndexNotFound(_) => Code::InvalidSwapIndexes,
|
Error::SwapIndexNotFound(_) => Code::IndexNotFound,
|
||||||
Error::SwapIndexesNotFound(_) => Code::InvalidSwapIndexes,
|
Error::SwapIndexesNotFound(_) => Code::IndexNotFound,
|
||||||
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
||||||
Error::InvalidTaskUids { .. } => Code::InvalidTaskUids,
|
Error::InvalidTaskUids { .. } => Code::InvalidTaskUids,
|
||||||
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses,
|
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses,
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use std::collections::BTreeSet;
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
||||||
@ -92,7 +93,9 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
|
|
||||||
pub fn snapshot_file_store(file_store: &file_store::FileStore) -> String {
|
pub fn snapshot_file_store(file_store: &file_store::FileStore) -> String {
|
||||||
let mut snap = String::new();
|
let mut snap = String::new();
|
||||||
for uuid in file_store.__all_uuids() {
|
// we store the uuid in a `BTreeSet` to keep them ordered.
|
||||||
|
let all_uuids = file_store.all_uuids().unwrap().collect::<Result<BTreeSet<_>, _>>().unwrap();
|
||||||
|
for uuid in all_uuids {
|
||||||
snap.push_str(&format!("{uuid}\n"));
|
snap.push_str(&format!("{uuid}\n"));
|
||||||
}
|
}
|
||||||
snap
|
snap
|
||||||
|
@ -452,6 +452,10 @@ impl IndexScheduler {
|
|||||||
&self.index_mapper.indexer_config
|
&self.index_mapper.indexer_config
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn size(&self) -> Result<u64> {
|
||||||
|
Ok(self.env.real_disk_size()?)
|
||||||
|
}
|
||||||
|
|
||||||
/// Return the index corresponding to the name.
|
/// Return the index corresponding to the name.
|
||||||
///
|
///
|
||||||
/// * If the index wasn't opened before, the index will be opened.
|
/// * If the index wasn't opened before, the index will be opened.
|
||||||
@ -502,13 +506,22 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(canceled_by) = &query.canceled_by {
|
if let Some(canceled_by) = &query.canceled_by {
|
||||||
|
let mut all_canceled_tasks = RoaringBitmap::new();
|
||||||
for cancel_task_uid in canceled_by {
|
for cancel_task_uid in canceled_by {
|
||||||
if let Some(canceled_by_uid) =
|
if let Some(canceled_by_uid) =
|
||||||
self.canceled_by.get(rtxn, &BEU32::new(*cancel_task_uid))?
|
self.canceled_by.get(rtxn, &BEU32::new(*cancel_task_uid))?
|
||||||
{
|
{
|
||||||
tasks &= canceled_by_uid;
|
all_canceled_tasks |= canceled_by_uid;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// if the canceled_by has been specified but no task
|
||||||
|
// matches then we prefer matching zero than all tasks.
|
||||||
|
if all_canceled_tasks.is_empty() {
|
||||||
|
return Ok(RoaringBitmap::new());
|
||||||
|
} else {
|
||||||
|
tasks &= all_canceled_tasks;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(kind) = &query.types {
|
if let Some(kind) = &query.types {
|
||||||
@ -889,6 +902,11 @@ impl IndexScheduler {
|
|||||||
Ok(self.file_store.new_update_with_uuid(uuid)?)
|
Ok(self.file_store.new_update_with_uuid(uuid)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The size on disk taken by all the updates files contained in the `IndexScheduler`, in bytes.
|
||||||
|
pub fn compute_update_file_size(&self) -> Result<u64> {
|
||||||
|
Ok(self.file_store.compute_total_size()?)
|
||||||
|
}
|
||||||
|
|
||||||
/// Delete a file from the index scheduler.
|
/// Delete a file from the index scheduler.
|
||||||
///
|
///
|
||||||
/// Counterpart to the [`create_update_file`](IndexScheduler::create_update_file) method.
|
/// Counterpart to the [`create_update_file`](IndexScheduler::create_update_file) method.
|
||||||
@ -1991,7 +2009,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2038,7 +2056,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2090,7 +2108,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2141,7 +2159,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2192,7 +2210,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
@ -2831,7 +2849,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2894,7 +2912,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2954,7 +2972,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -3011,7 +3029,361 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_multiple_primary_key() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in ["id", "bork", "bloup"].iter().enumerate() {
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"id": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: Some(S(primary_key)),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_3_tasks");
|
||||||
|
|
||||||
|
// A first batch should be processed with only the first documentAddition.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_succeed");
|
||||||
|
|
||||||
|
// The second batch should fail.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||||
|
|
||||||
|
// The second batch should fail.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_fails");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"id");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_multiple_primary_key_batch_wrong_key() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in ["id", "bork", "bork"].iter().enumerate() {
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"id": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: Some(S(primary_key)),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_3_tasks");
|
||||||
|
|
||||||
|
// A first batch should be processed with only the first documentAddition.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_succeed");
|
||||||
|
|
||||||
|
// The second batch should fail and contains two tasks.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_and_third_tasks_fails");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"id");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_bad_primary_key() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in ["bork", "bork", "id", "bork", "id"].iter().enumerate() {
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"id": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: Some(S(primary_key)),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_5_tasks");
|
||||||
|
|
||||||
|
// A first batch should be processed with only the first two documentAddition.
|
||||||
|
// it should fails because the documents don't contains any `bork` field.
|
||||||
|
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_and_second_task_fails");
|
||||||
|
|
||||||
|
// The primary key should be set to none since we failed the batch.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||||
|
snapshot!(primary_key.is_none(), @"true");
|
||||||
|
|
||||||
|
// The second batch should succeed and only contains one task.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||||
|
|
||||||
|
// The primary key should be set to `id` since this batch succeeded.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"id");
|
||||||
|
|
||||||
|
// We're trying to `bork` again, but now there is already a primary key set for this index.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth_task_fails");
|
||||||
|
|
||||||
|
// Finally the last task should succeed since its primary key is the same as the valid one.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fifth_task_succeeds");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"id");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_set_and_null_primary_key() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in
|
||||||
|
[None, Some("bork"), Some("paw"), None, None, Some("paw")].into_iter().enumerate()
|
||||||
|
{
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"paw": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_6_tasks");
|
||||||
|
|
||||||
|
// A first batch should contains only one task that fails because we can't infer the primary key.
|
||||||
|
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_fails");
|
||||||
|
|
||||||
|
// The second batch should contains only one task that fails because we bork is not a valid primary key.
|
||||||
|
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||||
|
|
||||||
|
// No primary key should be set at this point.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||||
|
snapshot!(primary_key.is_none(), @"true");
|
||||||
|
|
||||||
|
// The third batch should succeed and only contains one task.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||||
|
|
||||||
|
// The primary key should be set to `id` since this batch succeeded.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"paw");
|
||||||
|
|
||||||
|
// We should be able to batch together the next two tasks that don't specify any primary key
|
||||||
|
// + the last task that matches the current primary-key. Everything should succeed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_other_tasks_succeeds");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"paw");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_set_and_null_primary_key_inference_works() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in [None, Some("bork"), Some("doggoid"), None, None, Some("doggoid")]
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
{
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"doggoid": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_6_tasks");
|
||||||
|
|
||||||
|
// A first batch should contains only one task that succeed and sets the primary key to `doggoid`.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_succeed");
|
||||||
|
|
||||||
|
// Checking the primary key.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||||
|
snapshot!(primary_key.is_none(), @"false");
|
||||||
|
|
||||||
|
// The second batch should contains only one task that fails because it tries to update the primary key to `bork`.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||||
|
|
||||||
|
// The third batch should succeed and only contains one task.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||||
|
|
||||||
|
// We should be able to batch together the next two tasks that don't specify any primary key
|
||||||
|
// + the last task that matches the current primary-key. Everything should succeed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_other_tasks_succeeds");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"doggoid");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -10,7 +10,7 @@ source: index-scheduler/src/lib.rs
|
|||||||
1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
|
1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
|
||||||
2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }}
|
2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }}
|
||||||
3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
|
3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
|
||||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "invalid_swap_indexes", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-swap-indexes" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }}
|
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
||||||
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
||||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
||||||
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
||||||
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
||||||
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
||||||
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
|
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
|
||||||
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
|
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
||||||
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
||||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
||||||
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
||||||
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
||||||
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
||||||
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
|
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
|
||||||
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
|
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
@ -6,11 +6,11 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
||||||
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
||||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
||||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
||||||
6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
||||||
7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
||||||
|
@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
@ -0,0 +1,49 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,13 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,54 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [2,4,]
|
||||||
|
failed [0,1,3,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,50 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,3,4,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,53 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [4,]
|
||||||
|
succeeded [2,]
|
||||||
|
failed [0,1,3,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [3,4,]
|
||||||
|
succeeded [2,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [1,2,]
|
||||||
|
succeeded [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,]
|
||||||
|
succeeded [0,]
|
||||||
|
failed [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,48 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [0,]
|
||||||
|
failed [1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [1,2,]
|
||||||
|
succeeded [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,]
|
||||||
|
succeeded [0,]
|
||||||
|
failed [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,56 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [2,3,4,5,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,21 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paw": 2,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paw": 3,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paw": 4,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paw": 5,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,54 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [1,2,3,4,5,]
|
||||||
|
failed [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,55 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,3,4,5,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [3,4,5,]
|
||||||
|
succeeded [2,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,56 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [0,2,3,4,5,]
|
||||||
|
failed [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"doggoid": 0,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doggoid": 2,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doggoid": 3,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doggoid": 4,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doggoid": 5,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,54 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [1,2,3,4,5,]
|
||||||
|
succeeded [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,56 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,3,4,5,]
|
||||||
|
succeeded [0,]
|
||||||
|
failed [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [3,4,5,]
|
||||||
|
succeeded [0,2,]
|
||||||
|
failed [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -404,15 +404,19 @@ impl IndexScheduler {
|
|||||||
Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => {
|
Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => {
|
||||||
assert_eq!(kind.as_kind(), Kind::DocumentAdditionOrUpdate);
|
assert_eq!(kind.as_kind(), Kind::DocumentAdditionOrUpdate);
|
||||||
match indexed_documents {
|
match indexed_documents {
|
||||||
Some(0) => assert_ne!(status, Status::Enqueued),
|
|
||||||
Some(indexed_documents) => {
|
Some(indexed_documents) => {
|
||||||
assert_eq!(status, Status::Succeeded);
|
assert!(matches!(
|
||||||
assert!(indexed_documents <= received_documents);
|
status,
|
||||||
|
Status::Succeeded | Status::Failed | Status::Canceled
|
||||||
|
));
|
||||||
|
match status {
|
||||||
|
Status::Succeeded => assert!(indexed_documents <= received_documents),
|
||||||
|
Status::Failed | Status::Canceled => assert_eq!(indexed_documents, 0),
|
||||||
|
status => panic!("DocumentAddition can't have an indexed_document set if it's {}", status),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
assert_ne!(status, Status::Succeeded);
|
assert!(matches!(status, Status::Enqueued | Status::Processing))
|
||||||
assert_ne!(status, Status::Canceled);
|
|
||||||
assert_ne!(status, Status::Failed);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -504,10 +508,21 @@ impl IndexScheduler {
|
|||||||
if let KindWithContent::DocumentAdditionOrUpdate { content_file, .. } = kind {
|
if let KindWithContent::DocumentAdditionOrUpdate { content_file, .. } = kind {
|
||||||
match status {
|
match status {
|
||||||
Status::Enqueued | Status::Processing => {
|
Status::Enqueued | Status::Processing => {
|
||||||
assert!(self.file_store.__all_uuids().contains(&content_file));
|
assert!(self
|
||||||
|
.file_store
|
||||||
|
.all_uuids()
|
||||||
|
.unwrap()
|
||||||
|
.any(|uuid| uuid.as_ref().unwrap() == &content_file),
|
||||||
|
"Could not find uuid `{content_file}` in the file_store. Available uuids are {:?}.",
|
||||||
|
self.file_store.all_uuids().unwrap().collect::<std::result::Result<Vec<_>, file_store::Error>>().unwrap(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Status::Succeeded | Status::Failed | Status::Canceled => {
|
Status::Succeeded | Status::Failed | Status::Canceled => {
|
||||||
assert!(!self.file_store.__all_uuids().contains(&content_file));
|
assert!(self
|
||||||
|
.file_store
|
||||||
|
.all_uuids()
|
||||||
|
.unwrap()
|
||||||
|
.all(|uuid| uuid.as_ref().unwrap() != &content_file));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,12 +3,12 @@ pub mod error;
|
|||||||
mod store;
|
mod store;
|
||||||
|
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::ops::Deref;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use error::{AuthControllerError, Result};
|
use error::{AuthControllerError, Result};
|
||||||
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
||||||
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::star_or::StarOr;
|
use meilisearch_types::star_or::StarOr;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
pub use store::open_auth_store_env;
|
pub use store::open_auth_store_env;
|
||||||
@ -33,6 +33,11 @@ impl AuthController {
|
|||||||
Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
|
Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the size of the `AuthController` database in bytes.
|
||||||
|
pub fn size(&self) -> Result<u64> {
|
||||||
|
self.store.size()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
|
pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
|
||||||
match self.store.get_api_key(create_key.uid)? {
|
match self.store.get_api_key(create_key.uid)? {
|
||||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
|
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
|
||||||
@ -42,8 +47,14 @@ impl AuthController {
|
|||||||
|
|
||||||
pub fn update_key(&self, uid: Uuid, patch: PatchApiKey) -> Result<Key> {
|
pub fn update_key(&self, uid: Uuid, patch: PatchApiKey) -> Result<Key> {
|
||||||
let mut key = self.get_key(uid)?;
|
let mut key = self.get_key(uid)?;
|
||||||
key.description = patch.description;
|
match patch.description {
|
||||||
key.name = patch.name;
|
Setting::NotSet => (),
|
||||||
|
description => key.description = description.set(),
|
||||||
|
};
|
||||||
|
match patch.name {
|
||||||
|
Setting::NotSet => (),
|
||||||
|
name => key.name = name.set(),
|
||||||
|
};
|
||||||
key.updated_at = OffsetDateTime::now_utc();
|
key.updated_at = OffsetDateTime::now_utc();
|
||||||
self.store.put_api_key(key)
|
self.store.put_api_key(key)
|
||||||
}
|
}
|
||||||
@ -86,15 +97,13 @@ impl AuthController {
|
|||||||
key.indexes
|
key.indexes
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|index| {
|
.filter_map(|index| {
|
||||||
search_rules.get_index_search_rules(index.deref()).map(
|
search_rules.get_index_search_rules(&format!("{index}")).map(
|
||||||
|index_search_rules| {
|
|index_search_rules| (index.to_string(), Some(index_search_rules)),
|
||||||
(String::from(index), Some(index_search_rules))
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()),
|
None => SearchRules::Set(key.indexes.into_iter().map(|x| x.to_string()).collect()),
|
||||||
};
|
};
|
||||||
} else if let Some(search_rules) = search_rules {
|
} else if let Some(search_rules) = search_rules {
|
||||||
filters.search_rules = search_rules;
|
filters.search_rules = search_rules;
|
||||||
|
@ -3,7 +3,6 @@ use std::cmp::Reverse;
|
|||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::convert::{TryFrom, TryInto};
|
use std::convert::{TryFrom, TryInto};
|
||||||
use std::fs::create_dir_all;
|
use std::fs::create_dir_all;
|
||||||
use std::ops::Deref;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@ -61,6 +60,11 @@ impl HeedAuthStore {
|
|||||||
Ok(Self { env, keys, action_keyid_index_expiration, should_close_on_drop: true })
|
Ok(Self { env, keys, action_keyid_index_expiration, should_close_on_drop: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the size in bytes of database
|
||||||
|
pub fn size(&self) -> Result<u64> {
|
||||||
|
Ok(self.env.real_disk_size()?)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_drop_on_close(&mut self, v: bool) {
|
pub fn set_drop_on_close(&mut self, v: bool) {
|
||||||
self.should_close_on_drop = v;
|
self.should_close_on_drop = v;
|
||||||
}
|
}
|
||||||
@ -135,7 +139,7 @@ impl HeedAuthStore {
|
|||||||
for index in key.indexes.iter() {
|
for index in key.indexes.iter() {
|
||||||
db.put(
|
db.put(
|
||||||
&mut wtxn,
|
&mut wtxn,
|
||||||
&(&uid, &action, Some(index.deref().as_bytes())),
|
&(&uid, &action, Some(index.to_string().as_bytes())),
|
||||||
&key.expires_at,
|
&key.expires_at,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
@ -9,18 +9,17 @@ actix-web = { version = "4.2.1", default-features = false }
|
|||||||
anyhow = "1.0.65"
|
anyhow = "1.0.65"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.1.6"
|
csv = "1.1.6"
|
||||||
deserr = "0.1.4"
|
deserr = "0.3.0"
|
||||||
either = { version = "1.6.1", features = ["serde"] }
|
either = { version = "1.6.1", features = ["serde"] }
|
||||||
enum-iterator = "1.1.3"
|
enum-iterator = "1.1.3"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.24"
|
flate2 = "1.0.24"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.5.7"
|
memmap2 = "0.5.7"
|
||||||
milli = { path = "../milli", default-features = false }
|
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.41.1", default-features = false }
|
||||||
proptest = { version = "1.0.0", optional = true }
|
|
||||||
proptest-derive = { version = "0.3.0", optional = true }
|
|
||||||
roaring = { version = "0.10.0", features = ["serde"] }
|
roaring = { version = "0.10.0", features = ["serde"] }
|
||||||
serde = { version = "1.0.145", features = ["derive"] }
|
serde = { version = "1.0.145", features = ["derive"] }
|
||||||
|
serde-cs = "0.2.4"
|
||||||
serde_json = "1.0.85"
|
serde_json = "1.0.85"
|
||||||
tar = "0.4.38"
|
tar = "0.4.38"
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.3.0"
|
||||||
@ -32,8 +31,6 @@ uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.19.1"
|
insta = "1.19.1"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
proptest = "1.0.0"
|
|
||||||
proptest-derive = "0.3.0"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
# all specialized tokenizations
|
# all specialized tokenizations
|
||||||
@ -47,4 +44,3 @@ hebrew = ["milli/hebrew"]
|
|||||||
japanese = ["milli/japanese"]
|
japanese = ["milli/japanese"]
|
||||||
# thai specialized tokenization
|
# thai specialized tokenization
|
||||||
thai = ["milli/thai"]
|
thai = ["milli/thai"]
|
||||||
test-traits = ["proptest", "proptest-derive"]
|
|
||||||
|
328
meilisearch-types/src/deserr/error_messages.rs
Normal file
328
meilisearch-types/src/deserr/error_messages.rs
Normal file
@ -0,0 +1,328 @@
|
|||||||
|
/*!
|
||||||
|
This module implements the error messages of deserialization errors.
|
||||||
|
|
||||||
|
We try to:
|
||||||
|
1. Give a human-readable description of where the error originated.
|
||||||
|
2. Use the correct terms depending on the format of the request (json/query param)
|
||||||
|
3. Categorise the type of the error (e.g. missing field, wrong value type, unexpected error, etc.)
|
||||||
|
*/
|
||||||
|
use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef};
|
||||||
|
|
||||||
|
use super::{DeserrJsonError, DeserrQueryParamError};
|
||||||
|
use crate::error::{Code, ErrorCode};
|
||||||
|
|
||||||
|
/// Return a description of the given location in a Json, preceded by the given article.
|
||||||
|
/// e.g. `at .key1[8].key2`. If the location is the origin, the given article will not be
|
||||||
|
/// included in the description.
|
||||||
|
pub fn location_json_description(location: ValuePointerRef, article: &str) -> String {
|
||||||
|
fn rec(location: ValuePointerRef) -> String {
|
||||||
|
match location {
|
||||||
|
ValuePointerRef::Origin => String::new(),
|
||||||
|
ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key,
|
||||||
|
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match location {
|
||||||
|
ValuePointerRef::Origin => String::new(),
|
||||||
|
_ => {
|
||||||
|
format!("{article} `{}`", rec(location))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a description of the list of value kinds for a Json payload.
|
||||||
|
fn value_kinds_description_json(kinds: &[ValueKind]) -> String {
|
||||||
|
// Rank each value kind so that they can be sorted (and deduplicated)
|
||||||
|
// Having a predictable order helps with pattern matching
|
||||||
|
fn order(kind: &ValueKind) -> u8 {
|
||||||
|
match kind {
|
||||||
|
ValueKind::Null => 0,
|
||||||
|
ValueKind::Boolean => 1,
|
||||||
|
ValueKind::Integer => 2,
|
||||||
|
ValueKind::NegativeInteger => 3,
|
||||||
|
ValueKind::Float => 4,
|
||||||
|
ValueKind::String => 5,
|
||||||
|
ValueKind::Sequence => 6,
|
||||||
|
ValueKind::Map => 7,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Return a description of a single value kind, preceded by an article
|
||||||
|
fn single_description(kind: &ValueKind) -> &'static str {
|
||||||
|
match kind {
|
||||||
|
ValueKind::Null => "null",
|
||||||
|
ValueKind::Boolean => "a boolean",
|
||||||
|
ValueKind::Integer => "a positive integer",
|
||||||
|
ValueKind::NegativeInteger => "a negative integer",
|
||||||
|
ValueKind::Float => "a number",
|
||||||
|
ValueKind::String => "a string",
|
||||||
|
ValueKind::Sequence => "an array",
|
||||||
|
ValueKind::Map => "an object",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) {
|
||||||
|
let (msg_part, rest): (_, &[ValueKind]) = match kinds {
|
||||||
|
[] => (String::new(), &[]),
|
||||||
|
[ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
|
||||||
|
("a number".to_owned(), rest)
|
||||||
|
}
|
||||||
|
[ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
|
||||||
|
("a number".to_owned(), rest)
|
||||||
|
}
|
||||||
|
[ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => {
|
||||||
|
("an integer".to_owned(), rest)
|
||||||
|
}
|
||||||
|
[a] => (single_description(a).to_owned(), &[]),
|
||||||
|
[a, rest @ ..] => (single_description(a).to_owned(), rest),
|
||||||
|
};
|
||||||
|
|
||||||
|
if rest.is_empty() {
|
||||||
|
if *count_items == 0 {
|
||||||
|
message.push_str(&msg_part);
|
||||||
|
} else if *count_items == 1 {
|
||||||
|
message.push_str(&format!(" or {msg_part}"));
|
||||||
|
} else {
|
||||||
|
message.push_str(&format!(", or {msg_part}"));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if *count_items == 0 {
|
||||||
|
message.push_str(&msg_part);
|
||||||
|
} else {
|
||||||
|
message.push_str(&format!(", {msg_part}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
*count_items += 1;
|
||||||
|
description_rec(rest, count_items, message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut kinds = kinds.to_owned();
|
||||||
|
kinds.sort_by_key(order);
|
||||||
|
kinds.dedup();
|
||||||
|
|
||||||
|
if kinds.is_empty() {
|
||||||
|
// Should not happen ideally
|
||||||
|
"a different value".to_owned()
|
||||||
|
} else {
|
||||||
|
let mut message = String::new();
|
||||||
|
description_rec(kinds.as_slice(), &mut 0, &mut message);
|
||||||
|
message
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the JSON string of the value preceded by a description of its kind
|
||||||
|
fn value_description_with_kind_json(v: &serde_json::Value) -> String {
|
||||||
|
match v.kind() {
|
||||||
|
ValueKind::Null => "null".to_owned(),
|
||||||
|
kind => {
|
||||||
|
format!(
|
||||||
|
"{}: `{}`",
|
||||||
|
value_kinds_description_json(&[kind]),
|
||||||
|
serde_json::to_string(v).unwrap()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrJsonError<C> {
|
||||||
|
fn error<V: IntoValue>(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
error: deserr::ErrorKind<V>,
|
||||||
|
location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
let mut message = String::new();
|
||||||
|
|
||||||
|
message.push_str(&match error {
|
||||||
|
ErrorKind::IncorrectValueKind { actual, accepted } => {
|
||||||
|
let expected = value_kinds_description_json(accepted);
|
||||||
|
let received = value_description_with_kind_json(&serde_json::Value::from(actual));
|
||||||
|
|
||||||
|
let location = location_json_description(location, " at");
|
||||||
|
|
||||||
|
format!("Invalid value type{location}: expected {expected}, but found {received}")
|
||||||
|
}
|
||||||
|
ErrorKind::MissingField { field } => {
|
||||||
|
let location = location_json_description(location, " inside");
|
||||||
|
format!("Missing field `{field}`{location}")
|
||||||
|
}
|
||||||
|
ErrorKind::UnknownKey { key, accepted } => {
|
||||||
|
let location = location_json_description(location, " inside");
|
||||||
|
format!(
|
||||||
|
"Unknown field `{}`{location}: expected one of {}",
|
||||||
|
key,
|
||||||
|
accepted
|
||||||
|
.iter()
|
||||||
|
.map(|accepted| format!("`{}`", accepted))
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", ")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ErrorKind::UnknownValue { value, accepted } => {
|
||||||
|
let location = location_json_description(location, " at");
|
||||||
|
format!(
|
||||||
|
"Unknown value `{}`{location}: expected one of {}",
|
||||||
|
value,
|
||||||
|
accepted
|
||||||
|
.iter()
|
||||||
|
.map(|accepted| format!("`{}`", accepted))
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", "),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ErrorKind::Unexpected { msg } => {
|
||||||
|
let location = location_json_description(location, " at");
|
||||||
|
format!("Invalid value{location}: {msg}")
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Err(DeserrJsonError::new(message, C::default().error_code()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError {
|
||||||
|
let msg = format!(
|
||||||
|
"Immutable field `{field}`: expected one of {}",
|
||||||
|
accepted
|
||||||
|
.iter()
|
||||||
|
.map(|accepted| format!("`{}`", accepted))
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
|
|
||||||
|
DeserrJsonError::new(msg, code)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a description of the given location in query parameters, preceded by the
|
||||||
|
/// given article. e.g. `at key5[2]`. If the location is the origin, the given article
|
||||||
|
/// will not be included in the description.
|
||||||
|
pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String {
|
||||||
|
fn rec(location: ValuePointerRef) -> String {
|
||||||
|
match location {
|
||||||
|
ValuePointerRef::Origin => String::new(),
|
||||||
|
ValuePointerRef::Key { key, prev } => {
|
||||||
|
if matches!(prev, ValuePointerRef::Origin) {
|
||||||
|
key.to_owned()
|
||||||
|
} else {
|
||||||
|
rec(*prev) + "." + key
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match location {
|
||||||
|
ValuePointerRef::Origin => String::new(),
|
||||||
|
_ => {
|
||||||
|
format!("{article} `{}`", rec(location))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrQueryParamError<C> {
|
||||||
|
fn error<V: IntoValue>(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
error: deserr::ErrorKind<V>,
|
||||||
|
location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
let mut message = String::new();
|
||||||
|
|
||||||
|
message.push_str(&match error {
|
||||||
|
ErrorKind::IncorrectValueKind { actual, accepted } => {
|
||||||
|
let expected = value_kinds_description_query_param(accepted);
|
||||||
|
let received = value_description_with_kind_query_param(actual);
|
||||||
|
|
||||||
|
let location = location_query_param_description(location, " for parameter");
|
||||||
|
|
||||||
|
format!("Invalid value type{location}: expected {expected}, but found {received}")
|
||||||
|
}
|
||||||
|
ErrorKind::MissingField { field } => {
|
||||||
|
let location = location_query_param_description(location, " inside");
|
||||||
|
format!("Missing parameter `{field}`{location}")
|
||||||
|
}
|
||||||
|
ErrorKind::UnknownKey { key, accepted } => {
|
||||||
|
let location = location_query_param_description(location, " inside");
|
||||||
|
format!(
|
||||||
|
"Unknown parameter `{}`{location}: expected one of {}",
|
||||||
|
key,
|
||||||
|
accepted
|
||||||
|
.iter()
|
||||||
|
.map(|accepted| format!("`{}`", accepted))
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", ")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ErrorKind::UnknownValue { value, accepted } => {
|
||||||
|
let location = location_query_param_description(location, " for parameter");
|
||||||
|
format!(
|
||||||
|
"Unknown value `{}`{location}: expected one of {}",
|
||||||
|
value,
|
||||||
|
accepted
|
||||||
|
.iter()
|
||||||
|
.map(|accepted| format!("`{}`", accepted))
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", "),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ErrorKind::Unexpected { msg } => {
|
||||||
|
let location = location_query_param_description(location, " in parameter");
|
||||||
|
format!("Invalid value{location}: {msg}")
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Err(DeserrQueryParamError::new(message, C::default().error_code()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a description of the list of value kinds for query parameters
|
||||||
|
/// Since query parameters are always treated as strings, we always return
|
||||||
|
/// "a string" for now.
|
||||||
|
fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String {
|
||||||
|
"a string".to_owned()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn value_description_with_kind_query_param<V: IntoValue>(actual: deserr::Value<V>) -> String {
|
||||||
|
match actual {
|
||||||
|
deserr::Value::Null => "null".to_owned(),
|
||||||
|
deserr::Value::Boolean(x) => format!("a boolean: `{x}`"),
|
||||||
|
deserr::Value::Integer(x) => format!("an integer: `{x}`"),
|
||||||
|
deserr::Value::NegativeInteger(x) => {
|
||||||
|
format!("an integer: `{x}`")
|
||||||
|
}
|
||||||
|
deserr::Value::Float(x) => {
|
||||||
|
format!("a number: `{x}`")
|
||||||
|
}
|
||||||
|
deserr::Value::String(x) => {
|
||||||
|
format!("a string: `{x}`")
|
||||||
|
}
|
||||||
|
deserr::Value::Sequence(_) => "multiple values".to_owned(),
|
||||||
|
deserr::Value::Map(_) => "multiple parameters".to_owned(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use deserr::ValueKind;
|
||||||
|
|
||||||
|
use crate::deserr::error_messages::value_kinds_description_json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_value_kinds_description_json() {
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value");
|
||||||
|
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"a negative integer");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object");
|
||||||
|
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"a negative integer or an array");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
|
||||||
|
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
|
||||||
|
}
|
||||||
|
}
|
134
meilisearch-types/src/deserr/mod.rs
Normal file
134
meilisearch-types/src/deserr/mod.rs
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
use std::convert::Infallible;
|
||||||
|
use std::fmt;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
|
use deserr::{DeserializeError, MergeWithError, ValuePointerRef};
|
||||||
|
|
||||||
|
use crate::error::deserr_codes::{self, *};
|
||||||
|
use crate::error::{
|
||||||
|
unwrap_any, Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError,
|
||||||
|
ParseOffsetDateTimeError,
|
||||||
|
};
|
||||||
|
use crate::index_uid::IndexUidFormatError;
|
||||||
|
use crate::tasks::{ParseTaskKindError, ParseTaskStatusError};
|
||||||
|
|
||||||
|
pub mod error_messages;
|
||||||
|
pub mod query_params;
|
||||||
|
|
||||||
|
/// Marker type for the Json format
|
||||||
|
pub struct DeserrJson;
|
||||||
|
/// Marker type for the Query Parameter format
|
||||||
|
pub struct DeserrQueryParam;
|
||||||
|
|
||||||
|
pub type DeserrJsonError<C = deserr_codes::BadRequest> = DeserrError<DeserrJson, C>;
|
||||||
|
pub type DeserrQueryParamError<C = deserr_codes::BadRequest> = DeserrError<DeserrQueryParam, C>;
|
||||||
|
|
||||||
|
/// A request deserialization error.
|
||||||
|
///
|
||||||
|
/// The first generic paramater is a marker type describing the format of the request: either json (e.g. [`DeserrJson`] or [`DeserrQueryParam`]).
|
||||||
|
/// The second generic parameter is the default error code for the deserialization error, in case it is not given.
|
||||||
|
pub struct DeserrError<Format, C: Default + ErrorCode> {
|
||||||
|
pub msg: String,
|
||||||
|
pub code: Code,
|
||||||
|
_phantom: PhantomData<(Format, C)>,
|
||||||
|
}
|
||||||
|
impl<Format, C: Default + ErrorCode> DeserrError<Format, C> {
|
||||||
|
pub fn new(msg: String, code: Code) -> Self {
|
||||||
|
Self { msg, code, _phantom: PhantomData }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<Format, C: Default + ErrorCode> std::fmt::Debug for DeserrError<Format, C> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Format, C: Default + ErrorCode> std::fmt::Display for DeserrError<Format, C> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Format, C: Default + ErrorCode> std::error::Error for DeserrError<Format, C> {}
|
||||||
|
impl<Format, C: Default + ErrorCode> ErrorCode for DeserrError<Format, C> {
|
||||||
|
fn error_code(&self) -> Code {
|
||||||
|
self.code
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For now, we don't accumulate errors. Only one deserialisation error is ever returned at a time.
|
||||||
|
impl<Format, C1: Default + ErrorCode, C2: Default + ErrorCode>
|
||||||
|
MergeWithError<DeserrError<Format, C2>> for DeserrError<Format, C1>
|
||||||
|
{
|
||||||
|
fn merge(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
other: DeserrError<Format, C2>,
|
||||||
|
_merge_location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Format, C: Default + ErrorCode> MergeWithError<Infallible> for DeserrError<Format, C> {
|
||||||
|
fn merge(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
_other: Infallible,
|
||||||
|
_merge_location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement a convenience function to build a `missing_field` error
|
||||||
|
macro_rules! make_missing_field_convenience_builder {
|
||||||
|
($err_code:ident, $fn_name:ident) => {
|
||||||
|
impl DeserrJsonError<$err_code> {
|
||||||
|
pub fn $fn_name(field: &str, location: ValuePointerRef) -> Self {
|
||||||
|
let x = unwrap_any(Self::error::<Infallible>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::MissingField { field },
|
||||||
|
location,
|
||||||
|
));
|
||||||
|
Self { msg: x.msg, code: $err_code.error_code(), _phantom: PhantomData }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
make_missing_field_convenience_builder!(MissingIndexUid, missing_index_uid);
|
||||||
|
make_missing_field_convenience_builder!(MissingApiKeyActions, missing_api_key_actions);
|
||||||
|
make_missing_field_convenience_builder!(MissingApiKeyExpiresAt, missing_api_key_expires_at);
|
||||||
|
make_missing_field_convenience_builder!(MissingApiKeyIndexes, missing_api_key_indexes);
|
||||||
|
make_missing_field_convenience_builder!(MissingSwapIndexes, missing_swap_indexes);
|
||||||
|
|
||||||
|
// Integrate a sub-error into a [`DeserrError`] by taking its error message but using
|
||||||
|
// the default error code (C) from `Self`
|
||||||
|
macro_rules! merge_with_error_impl_take_error_message {
|
||||||
|
($err_type:ty) => {
|
||||||
|
impl<Format, C: Default + ErrorCode> MergeWithError<$err_type> for DeserrError<Format, C>
|
||||||
|
where
|
||||||
|
DeserrError<Format, C>: deserr::DeserializeError,
|
||||||
|
{
|
||||||
|
fn merge(
|
||||||
|
_self_: Option<Self>,
|
||||||
|
other: $err_type,
|
||||||
|
merge_location: ValuePointerRef,
|
||||||
|
) -> Result<Self, Self> {
|
||||||
|
DeserrError::<Format, C>::error::<Infallible>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::Unexpected { msg: other.to_string() },
|
||||||
|
merge_location,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// All these errors can be merged into a `DeserrError`
|
||||||
|
merge_with_error_impl_take_error_message!(DeserrParseIntError);
|
||||||
|
merge_with_error_impl_take_error_message!(DeserrParseBoolError);
|
||||||
|
merge_with_error_impl_take_error_message!(uuid::Error);
|
||||||
|
merge_with_error_impl_take_error_message!(InvalidTaskDateError);
|
||||||
|
merge_with_error_impl_take_error_message!(ParseOffsetDateTimeError);
|
||||||
|
merge_with_error_impl_take_error_message!(ParseTaskKindError);
|
||||||
|
merge_with_error_impl_take_error_message!(ParseTaskStatusError);
|
||||||
|
merge_with_error_impl_take_error_message!(IndexUidFormatError);
|
115
meilisearch-types/src/deserr/query_params.rs
Normal file
115
meilisearch-types/src/deserr/query_params.rs
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
/*!
|
||||||
|
This module provides helper traits, types, and functions to deserialize query parameters.
|
||||||
|
|
||||||
|
The source of the problem is that query parameters only give us a string to work with.
|
||||||
|
This means `deserr` is never given a sequence or numbers, and thus the default deserialization
|
||||||
|
code for common types such as `usize` or `Vec<T>` does not work. To work around it, we create a
|
||||||
|
wrapper type called `Param<T>`, which is deserialised using the `from_query_param` method of the trait
|
||||||
|
`FromQueryParameter`.
|
||||||
|
|
||||||
|
We also use other helper types such as `CS` (i.e. comma-separated) from `serde_cs` as well as
|
||||||
|
`StarOr`, `OptionStarOr`, and `OptionStarOrList`.
|
||||||
|
*/
|
||||||
|
|
||||||
|
use std::convert::Infallible;
|
||||||
|
use std::ops::Deref;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
|
||||||
|
|
||||||
|
use super::{DeserrParseBoolError, DeserrParseIntError};
|
||||||
|
use crate::error::unwrap_any;
|
||||||
|
use crate::index_uid::IndexUid;
|
||||||
|
use crate::tasks::{Kind, Status};
|
||||||
|
|
||||||
|
/// A wrapper type indicating that the inner value should be
|
||||||
|
/// deserialised from a query parameter string.
|
||||||
|
///
|
||||||
|
/// Note that if the field is optional, it is better to use
|
||||||
|
/// `Option<Param<T>>` instead of `Param<Option<T>>`.
|
||||||
|
#[derive(Default, Debug, Clone, Copy)]
|
||||||
|
pub struct Param<T>(pub T);
|
||||||
|
|
||||||
|
impl<T> Deref for Param<T> {
|
||||||
|
type Target = T;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> DeserializeFromValue<E> for Param<T>
|
||||||
|
where
|
||||||
|
E: DeserializeError + MergeWithError<T::Err>,
|
||||||
|
T: FromQueryParameter,
|
||||||
|
{
|
||||||
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
|
value: deserr::Value<V>,
|
||||||
|
location: deserr::ValuePointerRef,
|
||||||
|
) -> Result<Self, E> {
|
||||||
|
match value {
|
||||||
|
deserr::Value::String(s) => match T::from_query_param(&s) {
|
||||||
|
Ok(x) => Ok(Param(x)),
|
||||||
|
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||||
|
},
|
||||||
|
_ => Err(unwrap_any(E::error(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::IncorrectValueKind {
|
||||||
|
actual: value,
|
||||||
|
accepted: &[ValueKind::String],
|
||||||
|
},
|
||||||
|
location,
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a value from a query parameter string.
|
||||||
|
///
|
||||||
|
/// This trait is functionally equivalent to `FromStr`.
|
||||||
|
/// Having a separate trait trait allows us to return better
|
||||||
|
/// deserializatio error messages.
|
||||||
|
pub trait FromQueryParameter: Sized {
|
||||||
|
type Err;
|
||||||
|
fn from_query_param(p: &str) -> Result<Self, Self::Err>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Implement `FromQueryParameter` for the given type using its `FromStr`
|
||||||
|
/// trait implementation.
|
||||||
|
macro_rules! impl_from_query_param_from_str {
|
||||||
|
($type:ty) => {
|
||||||
|
impl FromQueryParameter for $type {
|
||||||
|
type Err = <$type as FromStr>::Err;
|
||||||
|
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
|
||||||
|
p.parse()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
impl_from_query_param_from_str!(Kind);
|
||||||
|
impl_from_query_param_from_str!(Status);
|
||||||
|
impl_from_query_param_from_str!(IndexUid);
|
||||||
|
|
||||||
|
/// Implement `FromQueryParameter` for the given type using its `FromStr`
|
||||||
|
/// trait implementation, replacing the returned error with a struct
|
||||||
|
/// that wraps the original query parameter.
|
||||||
|
macro_rules! impl_from_query_param_wrap_original_value_in_error {
|
||||||
|
($type:ty, $err_type:path) => {
|
||||||
|
impl FromQueryParameter for $type {
|
||||||
|
type Err = $err_type;
|
||||||
|
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
|
||||||
|
p.parse().map_err(|_| $err_type(p.to_owned()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
impl_from_query_param_wrap_original_value_in_error!(usize, DeserrParseIntError);
|
||||||
|
impl_from_query_param_wrap_original_value_in_error!(u32, DeserrParseIntError);
|
||||||
|
impl_from_query_param_wrap_original_value_in_error!(bool, DeserrParseBoolError);
|
||||||
|
|
||||||
|
impl FromQueryParameter for String {
|
||||||
|
type Err = Infallible;
|
||||||
|
fn from_query_param(p: &str) -> Result<Self, Infallible> {
|
||||||
|
Ok(p.to_owned())
|
||||||
|
}
|
||||||
|
}
|
@ -1,23 +1,16 @@
|
|||||||
use std::convert::Infallible;
|
|
||||||
use std::marker::PhantomData;
|
|
||||||
use std::{fmt, io};
|
use std::{fmt, io};
|
||||||
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use actix_web::{self as aweb, HttpResponseBuilder};
|
use actix_web::{self as aweb, HttpResponseBuilder};
|
||||||
use aweb::rt::task::JoinError;
|
use aweb::rt::task::JoinError;
|
||||||
use convert_case::Casing;
|
use convert_case::Casing;
|
||||||
use deserr::{DeserializeError, IntoValue, MergeWithError, ValuePointerRef};
|
|
||||||
use milli::heed::{Error as HeedError, MdbError};
|
use milli::heed::{Error as HeedError, MdbError};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use self::deserr_codes::MissingIndexUid;
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
|
||||||
pub struct ResponseError {
|
pub struct ResponseError {
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
|
|
||||||
code: StatusCode,
|
code: StatusCode,
|
||||||
message: String,
|
message: String,
|
||||||
#[serde(rename = "code")]
|
#[serde(rename = "code")]
|
||||||
@ -36,7 +29,7 @@ impl ResponseError {
|
|||||||
Self {
|
Self {
|
||||||
code: code.http(),
|
code: code.http(),
|
||||||
message,
|
message,
|
||||||
error_code: code.err_code().error_name,
|
error_code: code.name(),
|
||||||
error_type: code.type_(),
|
error_type: code.type_(),
|
||||||
error_link: code.url(),
|
error_link: code.url(),
|
||||||
}
|
}
|
||||||
@ -97,9 +90,9 @@ pub trait ErrorCode {
|
|||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
#[allow(clippy::enum_variant_names)]
|
||||||
enum ErrorType {
|
enum ErrorType {
|
||||||
InternalError,
|
Internal,
|
||||||
InvalidRequestError,
|
InvalidRequest,
|
||||||
AuthenticationError,
|
Auth,
|
||||||
System,
|
System,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,14 +101,24 @@ impl fmt::Display for ErrorType {
|
|||||||
use ErrorType::*;
|
use ErrorType::*;
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
InternalError => write!(f, "internal"),
|
Internal => write!(f, "internal"),
|
||||||
InvalidRequestError => write!(f, "invalid_request"),
|
InvalidRequest => write!(f, "invalid_request"),
|
||||||
AuthenticationError => write!(f, "auth"),
|
Auth => write!(f, "auth"),
|
||||||
System => write!(f, "system"),
|
System => write!(f, "system"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Implement all the error codes.
|
||||||
|
///
|
||||||
|
/// 1. Make an enum `Code` where each error code is a variant
|
||||||
|
/// 2. Implement the `http`, `name`, and `type_` method on the enum
|
||||||
|
/// 3. Make a unit type for each error code in the module `deserr_codes`.
|
||||||
|
///
|
||||||
|
/// The unit type's purpose is to be used as a marker type parameter, e.g.
|
||||||
|
/// `DeserrJsonError<MyErrorCode>`. It implements `Default` and `ErrorCode`,
|
||||||
|
/// so we can get a value of the `Code` enum with the correct variant by calling
|
||||||
|
/// `MyErrorCode::default().error_code()`.
|
||||||
macro_rules! make_error_codes {
|
macro_rules! make_error_codes {
|
||||||
($($code_ident:ident, $err_type:ident, $status:ident);*) => {
|
($($code_ident:ident, $err_type:ident, $status:ident);*) => {
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
@ -123,37 +126,36 @@ macro_rules! make_error_codes {
|
|||||||
$($code_ident),*
|
$($code_ident),*
|
||||||
}
|
}
|
||||||
impl Code {
|
impl Code {
|
||||||
/// associate a `Code` variant to the actual ErrCode
|
|
||||||
fn err_code(&self) -> ErrCode {
|
|
||||||
match self {
|
|
||||||
$(
|
|
||||||
Code::$code_ident => {
|
|
||||||
ErrCode::$err_type( stringify!($code_ident).to_case(convert_case::Case::Snake), StatusCode::$status)
|
|
||||||
}
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// return the HTTP status code associated with the `Code`
|
/// return the HTTP status code associated with the `Code`
|
||||||
fn http(&self) -> StatusCode {
|
fn http(&self) -> StatusCode {
|
||||||
self.err_code().status_code
|
match self {
|
||||||
|
$(
|
||||||
|
Code::$code_ident => StatusCode::$status
|
||||||
|
),*
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// return error name, used as error code
|
/// return error name, used as error code
|
||||||
fn name(&self) -> String {
|
fn name(&self) -> String {
|
||||||
self.err_code().error_name.to_string()
|
match self {
|
||||||
|
$(
|
||||||
|
Code::$code_ident => stringify!($code_ident).to_case(convert_case::Case::Snake)
|
||||||
|
),*
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// return the error type
|
/// return the error type
|
||||||
fn type_(&self) -> String {
|
fn type_(&self) -> String {
|
||||||
self.err_code().error_type.to_string()
|
match self {
|
||||||
|
$(
|
||||||
|
Code::$code_ident => ErrorType::$err_type.to_string()
|
||||||
|
),*
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// return the doc url associated with the error
|
/// return the doc url associated with the error
|
||||||
fn url(&self) -> String {
|
fn url(&self) -> String {
|
||||||
format!(
|
format!("https://docs.meilisearch.com/errors#{}", self.name())
|
||||||
"https://docs.meilisearch.com/errors#{}",
|
|
||||||
self.name().to_case(convert_case::Case::Kebab)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub mod deserr_codes {
|
pub mod deserr_codes {
|
||||||
@ -170,146 +172,120 @@ macro_rules! make_error_codes {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// An exhaustive list of all the error codes used by meilisearch.
|
||||||
make_error_codes! {
|
make_error_codes! {
|
||||||
ApiKeyAlreadyExists , invalid , CONFLICT ;
|
ApiKeyAlreadyExists , InvalidRequest , CONFLICT ;
|
||||||
ApiKeyNotFound , invalid , NOT_FOUND ;
|
ApiKeyNotFound , InvalidRequest , NOT_FOUND ;
|
||||||
BadParameter , invalid , BAD_REQUEST;
|
BadParameter , InvalidRequest , BAD_REQUEST;
|
||||||
BadRequest , invalid , BAD_REQUEST;
|
BadRequest , InvalidRequest , BAD_REQUEST;
|
||||||
DatabaseSizeLimitReached , internal , INTERNAL_SERVER_ERROR;
|
DatabaseSizeLimitReached , Internal , INTERNAL_SERVER_ERROR;
|
||||||
DocumentNotFound , invalid , NOT_FOUND;
|
DocumentNotFound , InvalidRequest , NOT_FOUND;
|
||||||
DumpAlreadyProcessing , invalid , CONFLICT;
|
DumpAlreadyProcessing , InvalidRequest , CONFLICT;
|
||||||
DumpNotFound , invalid , NOT_FOUND;
|
DumpNotFound , InvalidRequest , NOT_FOUND;
|
||||||
DumpProcessFailed , internal , INTERNAL_SERVER_ERROR;
|
DumpProcessFailed , Internal , INTERNAL_SERVER_ERROR;
|
||||||
DuplicateIndexFound , invalid , BAD_REQUEST;
|
DuplicateIndexFound , InvalidRequest , BAD_REQUEST;
|
||||||
|
ImmutableApiKeyActions , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableApiKeyUid , invalid , BAD_REQUEST;
|
ImmutableApiKeyCreatedAt , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableApiKeyKey , invalid , BAD_REQUEST;
|
ImmutableApiKeyExpiresAt , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableApiKeyActions , invalid , BAD_REQUEST;
|
ImmutableApiKeyIndexes , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableApiKeyIndexes , invalid , BAD_REQUEST;
|
ImmutableApiKeyKey , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableApiKeyExpiresAt , invalid , BAD_REQUEST;
|
ImmutableApiKeyUid , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableApiKeyCreatedAt , invalid , BAD_REQUEST;
|
ImmutableApiKeyUpdatedAt , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableApiKeyUpdatedAt , invalid , BAD_REQUEST;
|
ImmutableIndexCreatedAt , InvalidRequest , BAD_REQUEST;
|
||||||
|
ImmutableIndexUid , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableIndexUid , invalid , BAD_REQUEST;
|
ImmutableIndexUpdatedAt , InvalidRequest , BAD_REQUEST;
|
||||||
ImmutableIndexCreatedAt , invalid , BAD_REQUEST;
|
IndexAlreadyExists , InvalidRequest , CONFLICT ;
|
||||||
ImmutableIndexUpdatedAt , invalid , BAD_REQUEST;
|
IndexCreationFailed , Internal , INTERNAL_SERVER_ERROR;
|
||||||
|
IndexNotFound , InvalidRequest , NOT_FOUND;
|
||||||
IndexAlreadyExists , invalid , CONFLICT ;
|
IndexPrimaryKeyAlreadyExists , InvalidRequest , BAD_REQUEST ;
|
||||||
IndexCreationFailed , internal , INTERNAL_SERVER_ERROR;
|
IndexPrimaryKeyMultipleCandidatesFound, InvalidRequest , BAD_REQUEST;
|
||||||
IndexNotFound , invalid , NOT_FOUND;
|
IndexPrimaryKeyNoCandidateFound , InvalidRequest , BAD_REQUEST ;
|
||||||
IndexPrimaryKeyAlreadyExists , invalid , BAD_REQUEST ;
|
Internal , Internal , INTERNAL_SERVER_ERROR ;
|
||||||
IndexPrimaryKeyNoCandidateFound , invalid , BAD_REQUEST ;
|
InvalidApiKey , Auth , FORBIDDEN ;
|
||||||
IndexPrimaryKeyMultipleCandidatesFound, invalid , BAD_REQUEST;
|
InvalidApiKeyActions , InvalidRequest , BAD_REQUEST ;
|
||||||
Internal , internal , INTERNAL_SERVER_ERROR ;
|
InvalidApiKeyDescription , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidApiKeyActions , invalid , BAD_REQUEST ;
|
InvalidApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidApiKeyDescription , invalid , BAD_REQUEST ;
|
InvalidApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidApiKeyExpiresAt , invalid , BAD_REQUEST ;
|
InvalidApiKeyLimit , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidApiKeyIndexes , invalid , BAD_REQUEST ;
|
InvalidApiKeyName , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidApiKeyLimit , invalid , BAD_REQUEST ;
|
InvalidApiKeyOffset , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidApiKeyName , invalid , BAD_REQUEST ;
|
InvalidApiKeyUid , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidApiKeyOffset , invalid , BAD_REQUEST ;
|
InvalidContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
||||||
InvalidApiKeyUid , invalid , BAD_REQUEST ;
|
InvalidDocumentFields , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidApiKey , authentication, FORBIDDEN ;
|
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidContentType , invalid , UNSUPPORTED_MEDIA_TYPE ;
|
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentFields , invalid , BAD_REQUEST ;
|
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentGeoField , invalid , BAD_REQUEST ;
|
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentId , invalid , BAD_REQUEST ;
|
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentLimit , invalid , BAD_REQUEST ;
|
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentOffset , invalid , BAD_REQUEST ;
|
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexLimit , invalid , BAD_REQUEST ;
|
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexOffset , invalid , BAD_REQUEST ;
|
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexPrimaryKey , invalid , BAD_REQUEST ;
|
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexUid , invalid , BAD_REQUEST ;
|
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidMinWordLengthForTypo , invalid , BAD_REQUEST ;
|
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchAttributesToCrop , invalid , BAD_REQUEST ;
|
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchAttributesToHighlight , invalid , BAD_REQUEST ;
|
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchAttributesToRetrieve , invalid , BAD_REQUEST ;
|
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchCropLength , invalid , BAD_REQUEST ;
|
InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchCropMarker , invalid , BAD_REQUEST ;
|
InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchFacets , invalid , BAD_REQUEST ;
|
InvalidSearchHitsPerPage , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchFilter , invalid , BAD_REQUEST ;
|
InvalidSearchLimit , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchHighlightPostTag , invalid , BAD_REQUEST ;
|
InvalidSearchMatchingStrategy , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchHighlightPreTag , invalid , BAD_REQUEST ;
|
InvalidSearchOffset , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchHitsPerPage , invalid , BAD_REQUEST ;
|
InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchLimit , invalid , BAD_REQUEST ;
|
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchMatchingStrategy , invalid , BAD_REQUEST ;
|
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchOffset , invalid , BAD_REQUEST ;
|
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchPage , invalid , BAD_REQUEST ;
|
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchQ , invalid , BAD_REQUEST ;
|
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchShowMatchesPosition , invalid , BAD_REQUEST ;
|
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchSort , invalid , BAD_REQUEST ;
|
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsDisplayedAttributes , invalid , BAD_REQUEST ;
|
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsDistinctAttribute , invalid , BAD_REQUEST ;
|
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsFaceting , invalid , BAD_REQUEST ;
|
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsFilterableAttributes , invalid , BAD_REQUEST ;
|
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsPagination , invalid , BAD_REQUEST ;
|
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsRankingRules , invalid , BAD_REQUEST ;
|
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSearchableAttributes , invalid , BAD_REQUEST ;
|
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSortableAttributes , invalid , BAD_REQUEST ;
|
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
||||||
InvalidSettingsStopWords , invalid , BAD_REQUEST ;
|
InvalidStoreFile , Internal , INTERNAL_SERVER_ERROR ;
|
||||||
InvalidSettingsSynonyms , invalid , BAD_REQUEST ;
|
InvalidSwapDuplicateIndexFound , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsTypoTolerance , invalid , BAD_REQUEST ;
|
InvalidSwapIndexes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidState , internal , INTERNAL_SERVER_ERROR ;
|
InvalidTaskAfterEnqueuedAt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidStoreFile , internal , INTERNAL_SERVER_ERROR ;
|
InvalidTaskAfterFinishedAt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSwapDuplicateIndexFound , invalid , BAD_REQUEST ;
|
InvalidTaskAfterStartedAt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSwapIndexes , invalid , BAD_REQUEST ;
|
InvalidTaskBeforeEnqueuedAt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskAfterEnqueuedAt , invalid , BAD_REQUEST ;
|
InvalidTaskBeforeFinishedAt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskAfterFinishedAt , invalid , BAD_REQUEST ;
|
InvalidTaskBeforeStartedAt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskAfterStartedAt , invalid , BAD_REQUEST ;
|
InvalidTaskCanceledBy , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskBeforeEnqueuedAt , invalid , BAD_REQUEST ;
|
InvalidTaskFrom , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskBeforeFinishedAt , invalid , BAD_REQUEST ;
|
InvalidTaskLimit , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskBeforeStartedAt , invalid , BAD_REQUEST ;
|
InvalidTaskStatuses , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskCanceledBy , invalid , BAD_REQUEST ;
|
InvalidTaskTypes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskFrom , invalid , BAD_REQUEST ;
|
InvalidTaskUids , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskLimit , invalid , BAD_REQUEST ;
|
IoError , System , UNPROCESSABLE_ENTITY;
|
||||||
InvalidTaskStatuses , invalid , BAD_REQUEST ;
|
MalformedPayload , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskTypes , invalid , BAD_REQUEST ;
|
MaxFieldsLimitExceeded , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskUids , invalid , BAD_REQUEST ;
|
MissingApiKeyActions , InvalidRequest , BAD_REQUEST ;
|
||||||
IoError , system , UNPROCESSABLE_ENTITY;
|
MissingApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
|
||||||
MalformedPayload , invalid , BAD_REQUEST ;
|
MissingApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
|
||||||
MaxFieldsLimitExceeded , invalid , BAD_REQUEST ;
|
MissingAuthorizationHeader , Auth , UNAUTHORIZED ;
|
||||||
MissingApiKeyActions , invalid , BAD_REQUEST ;
|
MissingContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
||||||
MissingApiKeyExpiresAt , invalid , BAD_REQUEST ;
|
MissingDocumentId , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingApiKeyIndexes , invalid , BAD_REQUEST ;
|
MissingIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingAuthorizationHeader , authentication, UNAUTHORIZED ;
|
MissingMasterKey , Auth , UNAUTHORIZED ;
|
||||||
MissingContentType , invalid , UNSUPPORTED_MEDIA_TYPE ;
|
MissingPayload , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingDocumentId , invalid , BAD_REQUEST ;
|
MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingIndexUid , invalid , BAD_REQUEST ;
|
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingMasterKey , authentication, UNAUTHORIZED ;
|
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
|
||||||
MissingPayload , invalid , BAD_REQUEST ;
|
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
|
||||||
MissingTaskFilters , invalid , BAD_REQUEST ;
|
TaskNotFound , InvalidRequest , NOT_FOUND ;
|
||||||
NoSpaceLeftOnDevice , system , UNPROCESSABLE_ENTITY;
|
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
|
||||||
PayloadTooLarge , invalid , PAYLOAD_TOO_LARGE ;
|
UnretrievableDocument , Internal , BAD_REQUEST ;
|
||||||
TaskNotFound , invalid , NOT_FOUND ;
|
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
|
||||||
TooManyOpenFiles , system , UNPROCESSABLE_ENTITY ;
|
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE
|
||||||
UnretrievableDocument , internal , BAD_REQUEST ;
|
|
||||||
UnretrievableErrorCode , invalid , BAD_REQUEST ;
|
|
||||||
UnsupportedMediaType , invalid , UNSUPPORTED_MEDIA_TYPE
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Internal structure providing a convenient way to create error codes
|
|
||||||
struct ErrCode {
|
|
||||||
status_code: StatusCode,
|
|
||||||
error_type: ErrorType,
|
|
||||||
error_name: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ErrCode {
|
|
||||||
fn authentication(error_name: String, status_code: StatusCode) -> ErrCode {
|
|
||||||
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn internal(error_name: String, status_code: StatusCode) -> ErrCode {
|
|
||||||
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn invalid(error_name: String, status_code: StatusCode) -> ErrCode {
|
|
||||||
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn system(error_name: String, status_code: StatusCode) -> ErrCode {
|
|
||||||
ErrCode { status_code, error_name, error_type: ErrorType::System }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorCode for JoinError {
|
impl ErrorCode for JoinError {
|
||||||
@ -348,13 +324,13 @@ impl ErrorCode for milli::Error {
|
|||||||
}
|
}
|
||||||
UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists,
|
UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists,
|
||||||
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
||||||
UserError::InvalidFacetsDistribution { .. } => Code::BadRequest,
|
UserError::InvalidFacetsDistribution { .. } => Code::InvalidSearchFacets,
|
||||||
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
||||||
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
||||||
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
||||||
UserError::SortError(_) => Code::InvalidSearchSort,
|
UserError::SortError(_) => Code::InvalidSearchSort,
|
||||||
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
||||||
Code::InvalidMinWordLengthForTypo
|
Code::InvalidSettingsTypoTolerance
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -367,6 +343,7 @@ impl ErrorCode for file_store::Error {
|
|||||||
match self {
|
match self {
|
||||||
Self::IoError(e) => e.error_code(),
|
Self::IoError(e) => e.error_code(),
|
||||||
Self::PersistError(e) => e.error_code(),
|
Self::PersistError(e) => e.error_code(),
|
||||||
|
Self::CouldNotParseFileNameAsUtf8 | Self::UuidError(_) => Code::Internal,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -404,6 +381,7 @@ impl ErrorCode for io::Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Unwrap a result, either its Ok or Err value.
|
||||||
pub fn unwrap_any<T>(any: Result<T, T>) -> T {
|
pub fn unwrap_any<T>(any: Result<T, T>) -> T {
|
||||||
match any {
|
match any {
|
||||||
Ok(any) => any,
|
Ok(any) => any,
|
||||||
@ -411,90 +389,41 @@ pub fn unwrap_any<T>(any: Result<T, T>) -> T {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "test-traits")]
|
/// Deserialization when `deserr` cannot parse an API key date.
|
||||||
mod strategy {
|
#[derive(Debug)]
|
||||||
use proptest::strategy::Strategy;
|
pub struct ParseOffsetDateTimeError(pub String);
|
||||||
|
impl fmt::Display for ParseOffsetDateTimeError {
|
||||||
use super::*;
|
|
||||||
|
|
||||||
pub(super) fn status_code_strategy() -> impl Strategy<Value = StatusCode> {
|
|
||||||
(100..999u16).prop_map(|i| StatusCode::from_u16(i).unwrap())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct DeserrError<C: ErrorCode = deserr_codes::BadRequest> {
|
|
||||||
pub msg: String,
|
|
||||||
pub code: Code,
|
|
||||||
_phantom: PhantomData<C>,
|
|
||||||
}
|
|
||||||
impl<C: ErrorCode> std::fmt::Debug for DeserrError<C> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<C: ErrorCode> std::fmt::Display for DeserrError<C> {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{}", self.msg)
|
writeln!(f, "`{original}` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", original = self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C: ErrorCode> std::error::Error for DeserrError<C> {}
|
/// Deserialization when `deserr` cannot parse a task date.
|
||||||
impl<C: ErrorCode> ErrorCode for DeserrError<C> {
|
#[derive(Debug)]
|
||||||
fn error_code(&self) -> Code {
|
pub struct InvalidTaskDateError(pub String);
|
||||||
self.code
|
impl std::fmt::Display for InvalidTaskDateError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "`{}` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C1: ErrorCode, C2: ErrorCode> MergeWithError<DeserrError<C2>> for DeserrError<C1> {
|
/// Deserialization error when `deserr` cannot parse a String
|
||||||
fn merge(
|
/// into a bool.
|
||||||
_self_: Option<Self>,
|
#[derive(Debug)]
|
||||||
other: DeserrError<C2>,
|
pub struct DeserrParseBoolError(pub String);
|
||||||
_merge_location: ValuePointerRef,
|
impl fmt::Display for DeserrParseBoolError {
|
||||||
) -> Result<Self, Self> {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
|
write!(f, "could not parse `{}` as a boolean, expected either `true` or `false`", self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DeserrError<MissingIndexUid> {
|
/// Deserialization error when `deserr` cannot parse a String
|
||||||
pub fn missing_index_uid(field: &str, location: ValuePointerRef) -> Self {
|
/// into an integer.
|
||||||
let x = unwrap_any(Self::error::<Infallible>(
|
#[derive(Debug)]
|
||||||
None,
|
pub struct DeserrParseIntError(pub String);
|
||||||
deserr::ErrorKind::MissingField { field },
|
impl fmt::Display for DeserrParseIntError {
|
||||||
location,
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
));
|
write!(f, "could not parse `{}` as a positive integer", self.0)
|
||||||
Self { msg: x.msg, code: MissingIndexUid.error_code(), _phantom: PhantomData }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrError<C> {
|
|
||||||
fn error<V: IntoValue>(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
error: deserr::ErrorKind<V>,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
let msg = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
|
||||||
|
|
||||||
Err(DeserrError { msg, code: C::default().error_code(), _phantom: PhantomData })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TakeErrorMessage<T>(pub T);
|
|
||||||
|
|
||||||
impl<C: Default + ErrorCode, T> MergeWithError<TakeErrorMessage<T>> for DeserrError<C>
|
|
||||||
where
|
|
||||||
T: std::error::Error,
|
|
||||||
{
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: TakeErrorMessage<T>,
|
|
||||||
merge_location: ValuePointerRef,
|
|
||||||
) -> Result<Self, Self> {
|
|
||||||
DeserrError::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
deserr::ErrorKind::Unexpected { msg: other.0.to_string() },
|
|
||||||
merge_location,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,17 +2,15 @@ use std::error::Error;
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use deserr::DeserializeFromValue;
|
||||||
|
|
||||||
use crate::error::{Code, ErrorCode};
|
use crate::error::{Code, ErrorCode};
|
||||||
|
|
||||||
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
||||||
/// bytes long
|
/// bytes long
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
#[deserr(from(String) = IndexUid::try_from -> IndexUidFormatError)]
|
||||||
pub struct IndexUid(
|
pub struct IndexUid(String);
|
||||||
#[cfg_attr(feature = "test-traits", proptest(regex("[a-zA-Z0-9_-]{1,400}")))] String,
|
|
||||||
);
|
|
||||||
|
|
||||||
impl IndexUid {
|
impl IndexUid {
|
||||||
pub fn new_unchecked(s: impl AsRef<str>) -> Self {
|
pub fn new_unchecked(s: impl AsRef<str>) -> Self {
|
||||||
@ -29,6 +27,12 @@ impl IndexUid {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for IndexUid {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(&self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl std::ops::Deref for IndexUid {
|
impl std::ops::Deref for IndexUid {
|
||||||
type Target = str;
|
type Target = str;
|
||||||
|
|
||||||
|
@ -1,54 +1,39 @@
|
|||||||
use std::convert::Infallible;
|
use std::convert::Infallible;
|
||||||
use std::fmt::Display;
|
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValuePointerRef};
|
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
|
||||||
use enum_iterator::Sequence;
|
use enum_iterator::Sequence;
|
||||||
|
use milli::update::Setting;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::macros::{format_description, time};
|
use time::macros::{format_description, time};
|
||||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::deserr::error_messages::immutable_field_error;
|
||||||
|
use crate::deserr::DeserrJsonError;
|
||||||
use crate::error::deserr_codes::*;
|
use crate::error::deserr_codes::*;
|
||||||
use crate::error::{unwrap_any, Code, DeserrError, ErrorCode, TakeErrorMessage};
|
use crate::error::{unwrap_any, Code, ParseOffsetDateTimeError};
|
||||||
use crate::index_uid::{IndexUid, IndexUidFormatError};
|
use crate::index_uid::IndexUid;
|
||||||
use crate::star_or::StarOr;
|
use crate::star_or::StarOr;
|
||||||
|
|
||||||
pub type KeyId = Uuid;
|
pub type KeyId = Uuid;
|
||||||
|
|
||||||
impl<C: Default + ErrorCode> MergeWithError<IndexUidFormatError> for DeserrError<C> {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: IndexUidFormatError,
|
|
||||||
merge_location: deserr::ValuePointerRef,
|
|
||||||
) -> std::result::Result<Self, Self> {
|
|
||||||
DeserrError::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
deserr::ErrorKind::Unexpected { msg: other.to_string() },
|
|
||||||
merge_location,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_uuid_from_str(s: &str) -> Result<Uuid, TakeErrorMessage<uuid::Error>> {
|
|
||||||
Uuid::parse_str(s).map_err(TakeErrorMessage)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, DeserializeFromValue)]
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct CreateApiKey {
|
pub struct CreateApiKey {
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyDescription>)]
|
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyName>)]
|
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
|
||||||
pub name: Option<String>,
|
pub name: Option<String>,
|
||||||
#[deserr(default = Uuid::new_v4(), error = DeserrError<InvalidApiKeyUid>, from(&String) = parse_uuid_from_str -> TakeErrorMessage<uuid::Error>)]
|
#[deserr(default = Uuid::new_v4(), error = DeserrJsonError<InvalidApiKeyUid>, from(&String) = Uuid::from_str -> uuid::Error)]
|
||||||
pub uid: KeyId,
|
pub uid: KeyId,
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyActions>)]
|
#[deserr(error = DeserrJsonError<InvalidApiKeyActions>, missing_field_error = DeserrJsonError::missing_api_key_actions)]
|
||||||
pub actions: Vec<Action>,
|
pub actions: Vec<Action>,
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyIndexes>)]
|
#[deserr(error = DeserrJsonError<InvalidApiKeyIndexes>, missing_field_error = DeserrJsonError::missing_api_key_indexes)]
|
||||||
pub indexes: Vec<StarOr<IndexUid>>,
|
pub indexes: Vec<StarOr<IndexUid>>,
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyExpiresAt>, default = None, from(&String) = parse_expiration_date -> TakeErrorMessage<ParseOffsetDateTimeError>)]
|
#[deserr(error = DeserrJsonError<InvalidApiKeyExpiresAt>, from(Option<String>) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)]
|
||||||
pub expires_at: Option<OffsetDateTime>,
|
pub expires_at: Option<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
impl CreateApiKey {
|
impl CreateApiKey {
|
||||||
@ -72,32 +57,29 @@ fn deny_immutable_fields_api_key(
|
|||||||
field: &str,
|
field: &str,
|
||||||
accepted: &[&str],
|
accepted: &[&str],
|
||||||
location: ValuePointerRef,
|
location: ValuePointerRef,
|
||||||
) -> DeserrError {
|
) -> DeserrJsonError {
|
||||||
let mut error = unwrap_any(DeserrError::<BadRequest>::error::<Infallible>(
|
match field {
|
||||||
|
"uid" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUid),
|
||||||
|
"actions" => immutable_field_error(field, accepted, Code::ImmutableApiKeyActions),
|
||||||
|
"indexes" => immutable_field_error(field, accepted, Code::ImmutableApiKeyIndexes),
|
||||||
|
"expiresAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyExpiresAt),
|
||||||
|
"createdAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyCreatedAt),
|
||||||
|
"updatedAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUpdatedAt),
|
||||||
|
_ => unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||||
None,
|
None,
|
||||||
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||||
location,
|
location,
|
||||||
));
|
)),
|
||||||
|
}
|
||||||
error.code = match field {
|
|
||||||
"uid" => Code::ImmutableApiKeyUid,
|
|
||||||
"actions" => Code::ImmutableApiKeyActions,
|
|
||||||
"indexes" => Code::ImmutableApiKeyIndexes,
|
|
||||||
"expiresAt" => Code::ImmutableApiKeyExpiresAt,
|
|
||||||
"createdAt" => Code::ImmutableApiKeyCreatedAt,
|
|
||||||
"updatedAt" => Code::ImmutableApiKeyUpdatedAt,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
error
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, DeserializeFromValue)]
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)]
|
||||||
pub struct PatchApiKey {
|
pub struct PatchApiKey {
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyDescription>)]
|
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
|
||||||
pub description: Option<String>,
|
pub description: Setting<String>,
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyName>)]
|
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
|
||||||
pub name: Option<String>,
|
pub name: Setting<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
||||||
@ -149,46 +131,40 @@ impl Key {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ParseOffsetDateTimeError(String);
|
|
||||||
impl Display for ParseOffsetDateTimeError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
writeln!(f, "`{original}` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", original = self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl std::error::Error for ParseOffsetDateTimeError {}
|
|
||||||
|
|
||||||
fn parse_expiration_date(
|
fn parse_expiration_date(
|
||||||
string: &str,
|
string: Option<String>,
|
||||||
) -> std::result::Result<Option<OffsetDateTime>, TakeErrorMessage<ParseOffsetDateTimeError>> {
|
) -> std::result::Result<Option<OffsetDateTime>, ParseOffsetDateTimeError> {
|
||||||
let datetime = if let Ok(datetime) = OffsetDateTime::parse(string, &Rfc3339) {
|
let Some(string) = string else {
|
||||||
|
return Ok(None)
|
||||||
|
};
|
||||||
|
let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) {
|
||||||
datetime
|
datetime
|
||||||
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||||
string,
|
&string,
|
||||||
format_description!(
|
format_description!(
|
||||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
||||||
),
|
),
|
||||||
) {
|
) {
|
||||||
primitive_datetime.assume_utc()
|
primitive_datetime.assume_utc()
|
||||||
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||||
string,
|
&string,
|
||||||
format_description!(
|
format_description!(
|
||||||
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
||||||
),
|
),
|
||||||
) {
|
) {
|
||||||
primitive_datetime.assume_utc()
|
primitive_datetime.assume_utc()
|
||||||
} else if let Ok(date) = Date::parse(
|
} else if let Ok(date) = Date::parse(
|
||||||
string,
|
&string,
|
||||||
format_description!("[year repr:full base:calendar]-[month repr:numerical]-[day]"),
|
format_description!("[year repr:full base:calendar]-[month repr:numerical]-[day]"),
|
||||||
) {
|
) {
|
||||||
PrimitiveDateTime::new(date, time!(00:00)).assume_utc()
|
PrimitiveDateTime::new(date, time!(00:00)).assume_utc()
|
||||||
} else {
|
} else {
|
||||||
return Err(TakeErrorMessage(ParseOffsetDateTimeError(string.to_owned())));
|
return Err(ParseOffsetDateTimeError(string));
|
||||||
};
|
};
|
||||||
if datetime > OffsetDateTime::now_utc() {
|
if datetime > OffsetDateTime::now_utc() {
|
||||||
Ok(Some(datetime))
|
Ok(Some(datetime))
|
||||||
} else {
|
} else {
|
||||||
Err(TakeErrorMessage(ParseOffsetDateTimeError(string.to_owned())))
|
Err(ParseOffsetDateTimeError(string))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
pub mod compression;
|
pub mod compression;
|
||||||
|
pub mod deserr;
|
||||||
pub mod document_formats;
|
pub mod document_formats;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod index_uid;
|
pub mod index_uid;
|
||||||
@ -7,11 +8,10 @@ pub mod settings;
|
|||||||
pub mod star_or;
|
pub mod star_or;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
pub mod versioning;
|
pub mod versioning;
|
||||||
|
|
||||||
pub use milli;
|
|
||||||
pub use milli::{heed, Index};
|
pub use milli::{heed, Index};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
pub use versioning::VERSION_FILE_NAME;
|
pub use versioning::VERSION_FILE_NAME;
|
||||||
|
pub use {milli, serde_cs};
|
||||||
|
|
||||||
pub type Document = serde_json::Map<String, serde_json::Value>;
|
pub type Document = serde_json::Map<String, serde_json::Value>;
|
||||||
pub type InstanceUid = Uuid;
|
pub type InstanceUid = Uuid;
|
||||||
|
@ -11,8 +11,9 @@ use milli::update::Setting;
|
|||||||
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
||||||
use serde::{Deserialize, Serialize, Serializer};
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
|
||||||
|
use crate::deserr::DeserrJsonError;
|
||||||
use crate::error::deserr_codes::*;
|
use crate::error::deserr_codes::*;
|
||||||
use crate::error::{unwrap_any, DeserrError};
|
use crate::error::unwrap_any;
|
||||||
|
|
||||||
/// The maximimum number of results that the engine
|
/// The maximimum number of results that the engine
|
||||||
/// will be able to return in one search call.
|
/// will be able to return in one search call.
|
||||||
@ -66,26 +67,31 @@ fn validate_min_word_size_for_typo_setting<E: DeserializeError>(
|
|||||||
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||||
#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrError<InvalidMinWordLengthForTypo>)]
|
#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||||
pub struct MinWordSizeTyposSetting {
|
pub struct MinWordSizeTyposSetting {
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default)]
|
||||||
pub one_typo: Setting<u8>,
|
pub one_typo: Setting<u8>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default)]
|
||||||
pub two_typos: Setting<u8>,
|
pub two_typos: Setting<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||||
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrError<InvalidMinWordLengthForTypo>>)]
|
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrJsonError<InvalidSettingsTypoTolerance>>)]
|
||||||
pub struct TypoSettings {
|
pub struct TypoSettings {
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default)]
|
||||||
pub enabled: Setting<bool>,
|
pub enabled: Setting<bool>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidMinWordLengthForTypo>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||||
pub min_word_size_for_typos: Setting<MinWordSizeTyposSetting>,
|
pub min_word_size_for_typos: Setting<MinWordSizeTyposSetting>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default)]
|
||||||
pub disable_on_words: Setting<BTreeSet<String>>,
|
pub disable_on_words: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default)]
|
||||||
pub disable_on_attributes: Setting<BTreeSet<String>>,
|
pub disable_on_attributes: Setting<BTreeSet<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,6 +100,7 @@ pub struct TypoSettings {
|
|||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct FacetingSettings {
|
pub struct FacetingSettings {
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default)]
|
||||||
pub max_values_per_facet: Setting<usize>,
|
pub max_values_per_facet: Setting<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -102,10 +109,11 @@ pub struct FacetingSettings {
|
|||||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct PaginationSettings {
|
pub struct PaginationSettings {
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
|
#[deserr(default)]
|
||||||
pub max_total_hits: Setting<usize>,
|
pub max_total_hits: Setting<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MergeWithError<milli::CriterionError> for DeserrError<InvalidSettingsRankingRules> {
|
impl MergeWithError<milli::CriterionError> for DeserrJsonError<InvalidSettingsRankingRules> {
|
||||||
fn merge(
|
fn merge(
|
||||||
_self_: Option<Self>,
|
_self_: Option<Self>,
|
||||||
other: milli::CriterionError,
|
other: milli::CriterionError,
|
||||||
@ -128,14 +136,14 @@ impl MergeWithError<milli::CriterionError> for DeserrError<InvalidSettingsRankin
|
|||||||
rename_all = "camelCase",
|
rename_all = "camelCase",
|
||||||
bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>")
|
bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>")
|
||||||
)]
|
)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct Settings<T> {
|
pub struct Settings<T> {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Setting::is_not_set"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsDisplayedAttributes>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsDisplayedAttributes>)]
|
||||||
pub displayed_attributes: Setting<Vec<String>>,
|
pub displayed_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(
|
#[serde(
|
||||||
@ -143,35 +151,35 @@ pub struct Settings<T> {
|
|||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Setting::is_not_set"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsSearchableAttributes>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
|
||||||
pub searchable_attributes: Setting<Vec<String>>,
|
pub searchable_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsFilterableAttributes>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsFilterableAttributes>)]
|
||||||
pub filterable_attributes: Setting<BTreeSet<String>>,
|
pub filterable_attributes: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsSortableAttributes>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSortableAttributes>)]
|
||||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsRankingRules>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsRankingRules>)]
|
||||||
pub ranking_rules: Setting<Vec<RankingRuleView>>,
|
pub ranking_rules: Setting<Vec<RankingRuleView>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsStopWords>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||||
pub stop_words: Setting<BTreeSet<String>>,
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsSynonyms>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsDistinctAttribute>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsDistinctAttribute>)]
|
||||||
pub distinct_attribute: Setting<String>,
|
pub distinct_attribute: Setting<String>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsTypoTolerance>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||||
pub typo_tolerance: Setting<TypoSettings>,
|
pub typo_tolerance: Setting<TypoSettings>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsFaceting>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsFaceting>)]
|
||||||
pub faceting: Setting<FacetingSettings>,
|
pub faceting: Setting<FacetingSettings>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(error = DeserrError<InvalidSettingsPagination>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsPagination>)]
|
||||||
pub pagination: Setting<PaginationSettings>,
|
pub pagination: Setting<PaginationSettings>,
|
||||||
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
use std::fmt::{Display, Formatter};
|
use std::fmt;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::ops::Deref;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
|
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
|
||||||
use serde::de::Visitor;
|
use serde::de::Visitor;
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
|
|
||||||
|
use crate::deserr::query_params::FromQueryParameter;
|
||||||
use crate::error::unwrap_any;
|
use crate::error::unwrap_any;
|
||||||
|
|
||||||
/// A type that tries to match either a star (*) or
|
/// A type that tries to match either a star (*) or
|
||||||
@ -17,35 +17,6 @@ pub enum StarOr<T> {
|
|||||||
Other(T),
|
Other(T),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E: DeserializeError, T> DeserializeFromValue<E> for StarOr<T>
|
|
||||||
where
|
|
||||||
T: FromStr,
|
|
||||||
E: MergeWithError<T::Err>,
|
|
||||||
{
|
|
||||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
|
||||||
value: deserr::Value<V>,
|
|
||||||
location: deserr::ValuePointerRef,
|
|
||||||
) -> Result<Self, E> {
|
|
||||||
match value {
|
|
||||||
deserr::Value::String(v) => match v.as_str() {
|
|
||||||
"*" => Ok(StarOr::Star),
|
|
||||||
v => match FromStr::from_str(v) {
|
|
||||||
Ok(x) => Ok(StarOr::Other(x)),
|
|
||||||
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
_ => Err(unwrap_any(E::error::<V>(
|
|
||||||
None,
|
|
||||||
deserr::ErrorKind::IncorrectValueKind {
|
|
||||||
actual: value,
|
|
||||||
accepted: &[ValueKind::String],
|
|
||||||
},
|
|
||||||
location,
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: FromStr> FromStr for StarOr<T> {
|
impl<T: FromStr> FromStr for StarOr<T> {
|
||||||
type Err = T::Err;
|
type Err = T::Err;
|
||||||
|
|
||||||
@ -57,23 +28,11 @@ impl<T: FromStr> FromStr for StarOr<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl<T: fmt::Display> fmt::Display for StarOr<T> {
|
||||||
impl<T: Deref<Target = str>> Deref for StarOr<T> {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
match self {
|
match self {
|
||||||
Self::Star => "*",
|
StarOr::Star => write!(f, "*"),
|
||||||
Self::Other(t) => t.deref(),
|
StarOr::Other(x) => fmt::Display::fmt(x, f),
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Into<String>> From<StarOr<T>> for String {
|
|
||||||
fn from(s: StarOr<T>) -> Self {
|
|
||||||
match s {
|
|
||||||
StarOr::Star => "*".to_string(),
|
|
||||||
StarOr::Other(t) => t.into(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -93,7 +52,7 @@ impl<T: PartialEq + Eq> Eq for StarOr<T> {}
|
|||||||
impl<'de, T, E> Deserialize<'de> for StarOr<T>
|
impl<'de, T, E> Deserialize<'de> for StarOr<T>
|
||||||
where
|
where
|
||||||
T: FromStr<Err = E>,
|
T: FromStr<Err = E>,
|
||||||
E: Display,
|
E: fmt::Display,
|
||||||
{
|
{
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
where
|
where
|
||||||
@ -109,11 +68,11 @@ where
|
|||||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
||||||
where
|
where
|
||||||
T: FromStr<Err = FE>,
|
T: FromStr<Err = FE>,
|
||||||
FE: Display,
|
FE: fmt::Display,
|
||||||
{
|
{
|
||||||
type Value = StarOr<T>;
|
type Value = StarOr<T>;
|
||||||
|
|
||||||
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> std::fmt::Result {
|
||||||
formatter.write_str("a string")
|
formatter.write_str("a string")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -139,7 +98,7 @@ where
|
|||||||
|
|
||||||
impl<T> Serialize for StarOr<T>
|
impl<T> Serialize for StarOr<T>
|
||||||
where
|
where
|
||||||
T: Deref<Target = str>,
|
T: ToString,
|
||||||
{
|
{
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
@ -147,7 +106,222 @@ where
|
|||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
StarOr::Star => serializer.serialize_str("*"),
|
StarOr::Star => serializer.serialize_str("*"),
|
||||||
StarOr::Other(other) => serializer.serialize_str(other.deref()),
|
StarOr::Other(other) => serializer.serialize_str(&other.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> DeserializeFromValue<E> for StarOr<T>
|
||||||
|
where
|
||||||
|
T: FromStr,
|
||||||
|
E: DeserializeError + MergeWithError<T::Err>,
|
||||||
|
{
|
||||||
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
|
value: deserr::Value<V>,
|
||||||
|
location: deserr::ValuePointerRef,
|
||||||
|
) -> Result<Self, E> {
|
||||||
|
match value {
|
||||||
|
deserr::Value::String(v) => {
|
||||||
|
if v == "*" {
|
||||||
|
Ok(StarOr::Star)
|
||||||
|
} else {
|
||||||
|
match T::from_str(&v) {
|
||||||
|
Ok(parsed) => Ok(StarOr::Other(parsed)),
|
||||||
|
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Err(unwrap_any(E::error::<V>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::IncorrectValueKind {
|
||||||
|
actual: value,
|
||||||
|
accepted: &[ValueKind::String],
|
||||||
|
},
|
||||||
|
location,
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A type representing the content of a query parameter that can either not exist,
|
||||||
|
/// be equal to a star (*), or another value
|
||||||
|
///
|
||||||
|
/// It is a convenient alternative to `Option<StarOr<T>>`.
|
||||||
|
#[derive(Debug, Default, Clone, Copy)]
|
||||||
|
pub enum OptionStarOr<T> {
|
||||||
|
#[default]
|
||||||
|
None,
|
||||||
|
Star,
|
||||||
|
Other(T),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> OptionStarOr<T> {
|
||||||
|
pub fn is_some(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::None => false,
|
||||||
|
Self::Star => false,
|
||||||
|
Self::Other(_) => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn merge_star_and_none(self) -> Option<T> {
|
||||||
|
match self {
|
||||||
|
Self::None | Self::Star => None,
|
||||||
|
Self::Other(x) => Some(x),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn try_map<U, E, F: Fn(T) -> Result<U, E>>(self, map_f: F) -> Result<OptionStarOr<U>, E> {
|
||||||
|
match self {
|
||||||
|
OptionStarOr::None => Ok(OptionStarOr::None),
|
||||||
|
OptionStarOr::Star => Ok(OptionStarOr::Star),
|
||||||
|
OptionStarOr::Other(x) => map_f(x).map(OptionStarOr::Other),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> FromQueryParameter for OptionStarOr<T>
|
||||||
|
where
|
||||||
|
T: FromQueryParameter,
|
||||||
|
{
|
||||||
|
type Err = T::Err;
|
||||||
|
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
|
||||||
|
match p {
|
||||||
|
"*" => Ok(OptionStarOr::Star),
|
||||||
|
s => T::from_query_param(s).map(OptionStarOr::Other),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> DeserializeFromValue<E> for OptionStarOr<T>
|
||||||
|
where
|
||||||
|
E: DeserializeError + MergeWithError<T::Err>,
|
||||||
|
T: FromQueryParameter,
|
||||||
|
{
|
||||||
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
|
value: deserr::Value<V>,
|
||||||
|
location: deserr::ValuePointerRef,
|
||||||
|
) -> Result<Self, E> {
|
||||||
|
match value {
|
||||||
|
deserr::Value::String(s) => match s.as_str() {
|
||||||
|
"*" => Ok(OptionStarOr::Star),
|
||||||
|
s => match T::from_query_param(s) {
|
||||||
|
Ok(x) => Ok(OptionStarOr::Other(x)),
|
||||||
|
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_ => Err(unwrap_any(E::error::<V>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::IncorrectValueKind {
|
||||||
|
actual: value,
|
||||||
|
accepted: &[ValueKind::String],
|
||||||
|
},
|
||||||
|
location,
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A type representing the content of a query parameter that can either not exist, be equal to a star (*), or represent a list of other values
|
||||||
|
#[derive(Debug, Default, Clone)]
|
||||||
|
pub enum OptionStarOrList<T> {
|
||||||
|
#[default]
|
||||||
|
None,
|
||||||
|
Star,
|
||||||
|
List(Vec<T>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> OptionStarOrList<T> {
|
||||||
|
pub fn is_some(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::None => false,
|
||||||
|
Self::Star => false,
|
||||||
|
Self::List(_) => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn map<U, F: Fn(T) -> U>(self, map_f: F) -> OptionStarOrList<U> {
|
||||||
|
match self {
|
||||||
|
Self::None => OptionStarOrList::None,
|
||||||
|
Self::Star => OptionStarOrList::Star,
|
||||||
|
Self::List(xs) => OptionStarOrList::List(xs.into_iter().map(map_f).collect()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn try_map<U, E, F: Fn(T) -> Result<U, E>>(
|
||||||
|
self,
|
||||||
|
map_f: F,
|
||||||
|
) -> Result<OptionStarOrList<U>, E> {
|
||||||
|
match self {
|
||||||
|
Self::None => Ok(OptionStarOrList::None),
|
||||||
|
Self::Star => Ok(OptionStarOrList::Star),
|
||||||
|
Self::List(xs) => {
|
||||||
|
xs.into_iter().map(map_f).collect::<Result<Vec<_>, _>>().map(OptionStarOrList::List)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn merge_star_and_none(self) -> Option<Vec<T>> {
|
||||||
|
match self {
|
||||||
|
Self::None | Self::Star => None,
|
||||||
|
Self::List(xs) => Some(xs),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn push(&mut self, el: T) {
|
||||||
|
match self {
|
||||||
|
Self::None => *self = Self::List(vec![el]),
|
||||||
|
Self::Star => (),
|
||||||
|
Self::List(xs) => xs.push(el),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> DeserializeFromValue<E> for OptionStarOrList<T>
|
||||||
|
where
|
||||||
|
E: DeserializeError + MergeWithError<T::Err>,
|
||||||
|
T: FromQueryParameter,
|
||||||
|
{
|
||||||
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
|
value: deserr::Value<V>,
|
||||||
|
location: deserr::ValuePointerRef,
|
||||||
|
) -> Result<Self, E> {
|
||||||
|
match value {
|
||||||
|
deserr::Value::String(s) => {
|
||||||
|
let mut error = None;
|
||||||
|
let mut is_star = false;
|
||||||
|
// CS::<String>::from_str is infaillible
|
||||||
|
let cs = serde_cs::vec::CS::<String>::from_str(&s).unwrap();
|
||||||
|
let len_cs = cs.0.len();
|
||||||
|
let mut els = vec![];
|
||||||
|
for (i, el_str) in cs.into_iter().enumerate() {
|
||||||
|
if el_str == "*" {
|
||||||
|
is_star = true;
|
||||||
|
} else {
|
||||||
|
match T::from_query_param(&el_str) {
|
||||||
|
Ok(el) => {
|
||||||
|
els.push(el);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let location =
|
||||||
|
if len_cs > 1 { location.push_index(i) } else { location };
|
||||||
|
error = Some(E::merge(error, e, location)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(error) = error {
|
||||||
|
return Err(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_star {
|
||||||
|
Ok(OptionStarOrList::Star)
|
||||||
|
} else {
|
||||||
|
Ok(OptionStarOrList::List(els))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Err(unwrap_any(E::error::<V>(
|
||||||
|
None,
|
||||||
|
deserr::ErrorKind::IncorrectValueKind {
|
||||||
|
actual: value,
|
||||||
|
accepted: &[ValueKind::String],
|
||||||
|
},
|
||||||
|
location,
|
||||||
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use core::fmt;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fmt::{Display, Write};
|
use std::fmt::{Display, Write};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
@ -9,7 +10,7 @@ use serde::{Deserialize, Serialize, Serializer};
|
|||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::error::{Code, ResponseError};
|
use crate::error::ResponseError;
|
||||||
use crate::keys::Key;
|
use crate::keys::Key;
|
||||||
use crate::settings::{Settings, Unchecked};
|
use crate::settings::{Settings, Unchecked};
|
||||||
use crate::InstanceUid;
|
use crate::InstanceUid;
|
||||||
@ -332,7 +333,7 @@ impl Display for Status {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for Status {
|
impl FromStr for Status {
|
||||||
type Err = ResponseError;
|
type Err = ParseTaskStatusError;
|
||||||
|
|
||||||
fn from_str(status: &str) -> Result<Self, Self::Err> {
|
fn from_str(status: &str) -> Result<Self, Self::Err> {
|
||||||
if status.eq_ignore_ascii_case("enqueued") {
|
if status.eq_ignore_ascii_case("enqueued") {
|
||||||
@ -346,20 +347,27 @@ impl FromStr for Status {
|
|||||||
} else if status.eq_ignore_ascii_case("canceled") {
|
} else if status.eq_ignore_ascii_case("canceled") {
|
||||||
Ok(Status::Canceled)
|
Ok(Status::Canceled)
|
||||||
} else {
|
} else {
|
||||||
Err(ResponseError::from_msg(
|
Err(ParseTaskStatusError(status.to_owned()))
|
||||||
format!(
|
}
|
||||||
"`{}` is not a status. Available status are {}.",
|
}
|
||||||
status,
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ParseTaskStatusError(pub String);
|
||||||
|
impl fmt::Display for ParseTaskStatusError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"`{}` is not a valid task status. Available statuses are {}.",
|
||||||
|
self.0,
|
||||||
enum_iterator::all::<Status>()
|
enum_iterator::all::<Status>()
|
||||||
.map(|s| format!("`{s}`"))
|
.map(|s| format!("`{s}`"))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", ")
|
.join(", ")
|
||||||
),
|
)
|
||||||
Code::BadRequest,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl std::error::Error for ParseTaskStatusError {}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Sequence)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Sequence)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
@ -412,7 +420,7 @@ impl Display for Kind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl FromStr for Kind {
|
impl FromStr for Kind {
|
||||||
type Err = ResponseError;
|
type Err = ParseTaskKindError;
|
||||||
|
|
||||||
fn from_str(kind: &str) -> Result<Self, Self::Err> {
|
fn from_str(kind: &str) -> Result<Self, Self::Err> {
|
||||||
if kind.eq_ignore_ascii_case("indexCreation") {
|
if kind.eq_ignore_ascii_case("indexCreation") {
|
||||||
@ -438,10 +446,19 @@ impl FromStr for Kind {
|
|||||||
} else if kind.eq_ignore_ascii_case("snapshotCreation") {
|
} else if kind.eq_ignore_ascii_case("snapshotCreation") {
|
||||||
Ok(Kind::SnapshotCreation)
|
Ok(Kind::SnapshotCreation)
|
||||||
} else {
|
} else {
|
||||||
Err(ResponseError::from_msg(
|
Err(ParseTaskKindError(kind.to_owned()))
|
||||||
format!(
|
}
|
||||||
"`{}` is not a type. Available types are {}.",
|
}
|
||||||
kind,
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ParseTaskKindError(pub String);
|
||||||
|
impl fmt::Display for ParseTaskKindError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"`{}` is not a valid task type. Available types are {}.",
|
||||||
|
self.0,
|
||||||
enum_iterator::all::<Kind>()
|
enum_iterator::all::<Kind>()
|
||||||
.map(|k| format!(
|
.map(|k| format!(
|
||||||
"`{}`",
|
"`{}`",
|
||||||
@ -450,12 +467,10 @@ impl FromStr for Kind {
|
|||||||
))
|
))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", ")
|
.join(", ")
|
||||||
),
|
)
|
||||||
Code::BadRequest,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl std::error::Error for ParseTaskKindError {}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||||
pub enum Details {
|
pub enum Details {
|
||||||
|
@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", "
|
|||||||
bytes = "1.2.1"
|
bytes = "1.2.1"
|
||||||
clap = { version = "4.0.9", features = ["derive", "env"] }
|
clap = { version = "4.0.9", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.6"
|
crossbeam-channel = "0.5.6"
|
||||||
deserr = "0.1.4"
|
deserr = "0.3.0"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
either = "1.8.0"
|
either = "1.8.0"
|
||||||
env_logger = "0.9.1"
|
env_logger = "0.9.1"
|
||||||
@ -55,7 +55,6 @@ rustls = "0.20.6"
|
|||||||
rustls-pemfile = "1.0.1"
|
rustls-pemfile = "1.0.1"
|
||||||
segment = { version = "0.2.1", optional = true }
|
segment = { version = "0.2.1", optional = true }
|
||||||
serde = { version = "1.0.145", features = ["derive"] }
|
serde = { version = "1.0.145", features = ["derive"] }
|
||||||
serde-cs = "0.2.4"
|
|
||||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
||||||
sha2 = "0.10.6"
|
sha2 = "0.10.6"
|
||||||
siphasher = "0.3.10"
|
siphasher = "0.3.10"
|
||||||
@ -74,6 +73,8 @@ walkdir = "2.3.2"
|
|||||||
yaup = "0.2.0"
|
yaup = "0.2.0"
|
||||||
serde_urlencoded = "0.7.1"
|
serde_urlencoded = "0.7.1"
|
||||||
actix-utils = "3.0.1"
|
actix-utils = "3.0.1"
|
||||||
|
atty = "0.2.14"
|
||||||
|
termcolor = "1.1.3"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.7.0"
|
actix-rt = "2.7.0"
|
||||||
|
@ -9,7 +9,7 @@ use actix_web::HttpRequest;
|
|||||||
use byte_unit::Byte;
|
use byte_unit::Byte;
|
||||||
use http::header::CONTENT_TYPE;
|
use http::header::CONTENT_TYPE;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch_auth::SearchRules;
|
use meilisearch_auth::{AuthController, SearchRules};
|
||||||
use meilisearch_types::InstanceUid;
|
use meilisearch_types::InstanceUid;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
@ -82,7 +82,11 @@ pub struct SegmentAnalytics {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SegmentAnalytics {
|
impl SegmentAnalytics {
|
||||||
pub async fn new(opt: &Opt, index_scheduler: Arc<IndexScheduler>) -> Arc<dyn Analytics> {
|
pub async fn new(
|
||||||
|
opt: &Opt,
|
||||||
|
index_scheduler: Arc<IndexScheduler>,
|
||||||
|
auth_controller: AuthController,
|
||||||
|
) -> Arc<dyn Analytics> {
|
||||||
let instance_uid = super::find_user_id(&opt.db_path);
|
let instance_uid = super::find_user_id(&opt.db_path);
|
||||||
let first_time_run = instance_uid.is_none();
|
let first_time_run = instance_uid.is_none();
|
||||||
let instance_uid = instance_uid.unwrap_or_else(|| Uuid::new_v4());
|
let instance_uid = instance_uid.unwrap_or_else(|| Uuid::new_v4());
|
||||||
@ -136,7 +140,7 @@ impl SegmentAnalytics {
|
|||||||
get_tasks_aggregator: TasksAggregator::default(),
|
get_tasks_aggregator: TasksAggregator::default(),
|
||||||
health_aggregator: HealthAggregator::default(),
|
health_aggregator: HealthAggregator::default(),
|
||||||
});
|
});
|
||||||
tokio::spawn(segment.run(index_scheduler.clone()));
|
tokio::spawn(segment.run(index_scheduler.clone(), auth_controller.clone()));
|
||||||
|
|
||||||
let this = Self { instance_uid, sender, user: user.clone() };
|
let this = Self { instance_uid, sender, user: user.clone() };
|
||||||
|
|
||||||
@ -361,7 +365,7 @@ impl Segment {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn run(mut self, index_scheduler: Arc<IndexScheduler>) {
|
async fn run(mut self, index_scheduler: Arc<IndexScheduler>, auth_controller: AuthController) {
|
||||||
const INTERVAL: Duration = Duration::from_secs(60 * 60); // one hour
|
const INTERVAL: Duration = Duration::from_secs(60 * 60); // one hour
|
||||||
// The first batch must be sent after one hour.
|
// The first batch must be sent after one hour.
|
||||||
let mut interval =
|
let mut interval =
|
||||||
@ -370,7 +374,7 @@ impl Segment {
|
|||||||
loop {
|
loop {
|
||||||
select! {
|
select! {
|
||||||
_ = interval.tick() => {
|
_ = interval.tick() => {
|
||||||
self.tick(index_scheduler.clone()).await;
|
self.tick(index_scheduler.clone(), auth_controller.clone()).await;
|
||||||
},
|
},
|
||||||
msg = self.inbox.recv() => {
|
msg = self.inbox.recv() => {
|
||||||
match msg {
|
match msg {
|
||||||
@ -389,8 +393,14 @@ impl Segment {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn tick(&mut self, index_scheduler: Arc<IndexScheduler>) {
|
async fn tick(
|
||||||
if let Ok(stats) = create_all_stats(index_scheduler.into(), &SearchRules::default()) {
|
&mut self,
|
||||||
|
index_scheduler: Arc<IndexScheduler>,
|
||||||
|
auth_controller: AuthController,
|
||||||
|
) {
|
||||||
|
if let Ok(stats) =
|
||||||
|
create_all_stats(index_scheduler.into(), auth_controller, &SearchRules::default())
|
||||||
|
{
|
||||||
let _ = self
|
let _ = self
|
||||||
.batcher
|
.batcher
|
||||||
.push(Identify {
|
.push(Identify {
|
||||||
|
@ -2,7 +2,7 @@ use actix_web as aweb;
|
|||||||
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
||||||
use meilisearch_types::document_formats::{DocumentFormatError, PayloadType};
|
use meilisearch_types::document_formats::{DocumentFormatError, PayloadType};
|
||||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||||
use meilisearch_types::index_uid::IndexUidFormatError;
|
use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tokio::task::JoinError;
|
use tokio::task::JoinError;
|
||||||
|
|
||||||
@ -24,10 +24,10 @@ pub enum MeilisearchHttpError {
|
|||||||
MissingPayload(PayloadType),
|
MissingPayload(PayloadType),
|
||||||
#[error("The provided payload reached the size limit.")]
|
#[error("The provided payload reached the size limit.")]
|
||||||
PayloadTooLarge,
|
PayloadTooLarge,
|
||||||
#[error("Two indexes must be given for each swap. The list `{:?}` contains {} indexes.",
|
#[error("Two indexes must be given for each swap. The list `[{}]` contains {} indexes.",
|
||||||
.0, .0.len()
|
.0.iter().map(|uid| format!("\"{uid}\"")).collect::<Vec<_>>().join(", "), .0.len()
|
||||||
)]
|
)]
|
||||||
SwapIndexPayloadWrongLength(Vec<String>),
|
SwapIndexPayloadWrongLength(Vec<IndexUid>),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
IndexUid(#[from] IndexUidFormatError),
|
IndexUid(#[from] IndexUidFormatError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use std::env;
|
use std::env;
|
||||||
|
use std::io::Write;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
@ -9,6 +10,7 @@ use index_scheduler::IndexScheduler;
|
|||||||
use meilisearch::analytics::Analytics;
|
use meilisearch::analytics::Analytics;
|
||||||
use meilisearch::{analytics, create_app, setup_meilisearch, Opt};
|
use meilisearch::{analytics, create_app, setup_meilisearch, Opt};
|
||||||
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
|
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
|
||||||
|
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
||||||
|
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||||
@ -32,24 +34,19 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
match (opt.env.as_ref(), &opt.master_key) {
|
match (opt.env.as_ref(), &opt.master_key) {
|
||||||
("production", Some(master_key)) if master_key.len() < MASTER_KEY_MIN_SIZE => {
|
("production", Some(master_key)) if master_key.len() < MASTER_KEY_MIN_SIZE => {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
"In production mode, the master key must be of at least {MASTER_KEY_MIN_SIZE} bytes, but the provided key is only {} bytes long
|
"The master key must be at least {MASTER_KEY_MIN_SIZE} bytes in a production environment. The provided key is only {} bytes.
|
||||||
|
|
||||||
We generated a secure master key for you (you can safely copy this token):
|
{}",
|
||||||
|
|
||||||
>> export MEILI_MASTER_KEY={} <<",
|
|
||||||
master_key.len(),
|
master_key.len(),
|
||||||
generate_master_key(),
|
generated_master_key_message(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
("production", None) => {
|
("production", None) => {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
"In production mode, you must provide a master key to secure your instance. It can be specified via the MEILI_MASTER_KEY environment variable or the --master-key launch option.
|
"You must provide a master key to secure your instance in a production environment. It can be specified via the MEILI_MASTER_KEY environment variable or the --master-key launch option.
|
||||||
|
|
||||||
We generated a secure master key for you (you can safely copy this token):
|
{}",
|
||||||
|
generated_master_key_message()
|
||||||
>> export MEILI_MASTER_KEY={} <<
|
|
||||||
",
|
|
||||||
generate_master_key()
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
// No error; continue
|
// No error; continue
|
||||||
@ -60,7 +57,8 @@ We generated a secure master key for you (you can safely copy this token):
|
|||||||
|
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
let analytics = if !opt.no_analytics {
|
let analytics = if !opt.no_analytics {
|
||||||
analytics::SegmentAnalytics::new(&opt, index_scheduler.clone()).await
|
analytics::SegmentAnalytics::new(&opt, index_scheduler.clone(), auth_controller.clone())
|
||||||
|
.await
|
||||||
} else {
|
} else {
|
||||||
analytics::MockAnalytics::new(&opt)
|
analytics::MockAnalytics::new(&opt)
|
||||||
};
|
};
|
||||||
@ -147,7 +145,7 @@ pub fn print_launch_resume(
|
|||||||
"
|
"
|
||||||
Thank you for using Meilisearch!
|
Thank you for using Meilisearch!
|
||||||
|
|
||||||
We collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html
|
\nWe collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html
|
||||||
|
|
||||||
Anonymous telemetry:\t\"Enabled\""
|
Anonymous telemetry:\t\"Enabled\""
|
||||||
);
|
);
|
||||||
@ -170,16 +168,10 @@ Anonymous telemetry:\t\"Enabled\""
|
|||||||
eprintln!("A master key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
|
eprintln!("A master key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
|
||||||
|
|
||||||
if master_key.len() < MASTER_KEY_MIN_SIZE {
|
if master_key.len() < MASTER_KEY_MIN_SIZE {
|
||||||
eprintln!();
|
print_master_key_too_short_warning()
|
||||||
log::warn!("The provided master key is too short (< {MASTER_KEY_MIN_SIZE} bytes)");
|
|
||||||
eprintln!("A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to the production environment.");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
("development", None) => {
|
("development", None) => print_missing_master_key_warning(),
|
||||||
log::warn!("No master key found; The server will accept unidentified requests");
|
|
||||||
eprintln!("If you need some protection in development mode, please export a key:\n\nexport MEILI_MASTER_KEY={}", generate_master_key());
|
|
||||||
eprintln!("\nA master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to the production environment.");
|
|
||||||
}
|
|
||||||
// unreachable because Opt::try_build above would have failed already if any other value had been produced
|
// unreachable because Opt::try_build above would have failed already if any other value had been produced
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
@ -190,3 +182,67 @@ Anonymous telemetry:\t\"Enabled\""
|
|||||||
eprintln!("Contact:\t\thttps://docs.meilisearch.com/resources/contact.html");
|
eprintln!("Contact:\t\thttps://docs.meilisearch.com/resources/contact.html");
|
||||||
eprintln!();
|
eprintln!();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const WARNING_BG_COLOR: Option<Color> = Some(Color::Ansi256(178));
|
||||||
|
const WARNING_FG_COLOR: Option<Color> = Some(Color::Ansi256(0));
|
||||||
|
|
||||||
|
fn print_master_key_too_short_warning() {
|
||||||
|
let choice =
|
||||||
|
if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never };
|
||||||
|
let mut stderr = StandardStream::stderr(choice);
|
||||||
|
stderr
|
||||||
|
.set_color(
|
||||||
|
ColorSpec::new().set_bg(WARNING_BG_COLOR).set_fg(WARNING_FG_COLOR).set_bold(true),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
writeln!(stderr, "\n").unwrap();
|
||||||
|
writeln!(
|
||||||
|
stderr,
|
||||||
|
" Meilisearch started with a master key considered unsafe for use in a production environment.
|
||||||
|
|
||||||
|
A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to a production environment."
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
stderr.reset().unwrap();
|
||||||
|
writeln!(stderr).unwrap();
|
||||||
|
|
||||||
|
eprintln!("\n{}", generated_master_key_message());
|
||||||
|
eprintln!(
|
||||||
|
"\nRestart Meilisearch with the argument above to use this new and secure master key."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_missing_master_key_warning() {
|
||||||
|
let choice =
|
||||||
|
if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never };
|
||||||
|
let mut stderr = StandardStream::stderr(choice);
|
||||||
|
stderr
|
||||||
|
.set_color(
|
||||||
|
ColorSpec::new().set_bg(WARNING_BG_COLOR).set_fg(WARNING_FG_COLOR).set_bold(true),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
writeln!(stderr, "\n").unwrap();
|
||||||
|
writeln!(
|
||||||
|
stderr,
|
||||||
|
" No master key was found. The server will accept unidentified requests.
|
||||||
|
|
||||||
|
A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to a production environment."
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
stderr.reset().unwrap();
|
||||||
|
writeln!(stderr).unwrap();
|
||||||
|
|
||||||
|
eprintln!("\n{}", generated_master_key_message());
|
||||||
|
eprintln!(
|
||||||
|
"\nRestart Meilisearch with the argument above to use this new and secure master key."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generated_master_key_message() -> String {
|
||||||
|
format!(
|
||||||
|
"We generated a new secure master key for you (you can safely use this token):
|
||||||
|
|
||||||
|
>> --master-key {} <<",
|
||||||
|
generate_master_key()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
@ -4,14 +4,15 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
|||||||
use deserr::DeserializeFromValue;
|
use deserr::DeserializeFromValue;
|
||||||
use meilisearch_auth::error::AuthControllerError;
|
use meilisearch_auth::error::AuthControllerError;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::{Code, DeserrError, ResponseError, TakeErrorMessage};
|
use meilisearch_types::error::{Code, ResponseError};
|
||||||
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::indexes::search::parse_usize_take_error_message;
|
|
||||||
use super::PAGINATION_DEFAULT_LIMIT;
|
use super::PAGINATION_DEFAULT_LIMIT;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
@ -36,7 +37,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
|
|
||||||
pub async fn create_api_key(
|
pub async fn create_api_key(
|
||||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
|
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
|
||||||
body: ValidatedJson<CreateApiKey, DeserrError>,
|
body: ValidatedJson<CreateApiKey, DeserrJsonError>,
|
||||||
_req: HttpRequest,
|
_req: HttpRequest,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let v = body.into_inner();
|
let v = body.into_inner();
|
||||||
@ -50,26 +51,23 @@ pub async fn create_api_key(
|
|||||||
Ok(HttpResponse::Created().json(res))
|
Ok(HttpResponse::Created().json(res))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
|
#[derive(DeserializeFromValue, Debug, Clone, Copy)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
pub struct ListApiKeys {
|
pub struct ListApiKeys {
|
||||||
#[serde(default)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidApiKeyOffset>)]
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyOffset>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
pub offset: Param<usize>,
|
||||||
pub offset: usize,
|
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidApiKeyLimit>)]
|
||||||
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
pub limit: Param<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidApiKeyLimit>, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
|
||||||
pub limit: usize,
|
|
||||||
}
|
}
|
||||||
impl ListApiKeys {
|
impl ListApiKeys {
|
||||||
fn as_pagination(self) -> Pagination {
|
fn as_pagination(self) -> Pagination {
|
||||||
Pagination { offset: self.offset, limit: self.limit }
|
Pagination { offset: self.offset.0, limit: self.limit.0 }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn list_api_keys(
|
pub async fn list_api_keys(
|
||||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||||
list_api_keys: QueryParameter<ListApiKeys, DeserrError>,
|
list_api_keys: QueryParameter<ListApiKeys, DeserrQueryParamError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let paginate = list_api_keys.into_inner().as_pagination();
|
let paginate = list_api_keys.into_inner().as_pagination();
|
||||||
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
@ -106,7 +104,7 @@ pub async fn get_api_key(
|
|||||||
|
|
||||||
pub async fn patch_api_key(
|
pub async fn patch_api_key(
|
||||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
|
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
|
||||||
body: ValidatedJson<PatchApiKey, DeserrError>,
|
body: ValidatedJson<PatchApiKey, DeserrJsonError>,
|
||||||
path: web::Path<AuthParam>,
|
path: web::Path<AuthParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let key = path.into_inner().key;
|
let key = path.into_inner().key;
|
||||||
@ -172,7 +170,7 @@ impl KeyView {
|
|||||||
key: generated_key,
|
key: generated_key,
|
||||||
uid: key.uid,
|
uid: key.uid,
|
||||||
actions: key.actions,
|
actions: key.actions,
|
||||||
indexes: key.indexes.into_iter().map(String::from).collect(),
|
indexes: key.indexes.into_iter().map(|x| x.to_string()).collect(),
|
||||||
expires_at: key.expires_at,
|
expires_at: key.expires_at,
|
||||||
created_at: key.created_at,
|
created_at: key.created_at,
|
||||||
updated_at: key.updated_at,
|
updated_at: key.updated_at,
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use std::io::ErrorKind;
|
use std::io::ErrorKind;
|
||||||
use std::num::ParseIntError;
|
|
||||||
|
|
||||||
use actix_web::http::header::CONTENT_TYPE;
|
use actix_web::http::header::CONTENT_TYPE;
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
@ -9,25 +8,25 @@ use deserr::DeserializeFromValue;
|
|||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::heed::RoTxn;
|
use meilisearch_types::heed::RoTxn;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||||
use meilisearch_types::star_or::StarOr;
|
use meilisearch_types::star_or::OptionStarOrList;
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use meilisearch_types::{milli, Document, Index};
|
use meilisearch_types::{milli, Document, Index};
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_cs::vec::CS;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tempfile::tempfile;
|
use tempfile::tempfile;
|
||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
||||||
|
|
||||||
use super::search::parse_usize_take_error_message;
|
|
||||||
use crate::analytics::{Analytics, DocumentDeletionKind};
|
use crate::analytics::{Analytics, DocumentDeletionKind};
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
use crate::error::PayloadError::ReceivePayload;
|
use crate::error::PayloadError::ReceivePayload;
|
||||||
@ -36,7 +35,7 @@ use crate::extractors::authentication::GuardedData;
|
|||||||
use crate::extractors::payload::Payload;
|
use crate::extractors::payload::Payload;
|
||||||
use crate::extractors::query_parameters::QueryParameter;
|
use crate::extractors::query_parameters::QueryParameter;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::{fold_star_or, PaginationView, SummarizedTaskView};
|
use crate::routes::{PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||||
|
|
||||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||||
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
|
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
|
||||||
@ -81,23 +80,26 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct GetDocument {
|
pub struct GetDocument {
|
||||||
#[deserr(error = DeserrError<InvalidDocumentFields>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
|
||||||
fields: Option<CS<StarOr<String>>>,
|
fields: OptionStarOrList<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_document(
|
pub async fn get_document(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<DocumentParam>,
|
document_param: web::Path<DocumentParam>,
|
||||||
params: QueryParameter<GetDocument, DeserrError>,
|
params: QueryParameter<GetDocument, DeserrQueryParamError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let GetDocument { fields } = params.into_inner();
|
let DocumentParam { index_uid, document_id } = document_param.into_inner();
|
||||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
let index_uid = IndexUid::try_from(index_uid)?;
|
||||||
|
|
||||||
let index = index_scheduler.index(&path.index_uid)?;
|
let GetDocument { fields } = params.into_inner();
|
||||||
let document = retrieve_document(&index, &path.document_id, attributes_to_retrieve)?;
|
let attributes_to_retrieve = fields.merge_star_and_none();
|
||||||
|
|
||||||
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
|
let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?;
|
||||||
debug!("returns: {:?}", document);
|
debug!("returns: {:?}", document);
|
||||||
Ok(HttpResponse::Ok().json(document))
|
Ok(HttpResponse::Ok().json(document))
|
||||||
}
|
}
|
||||||
@ -108,60 +110,68 @@ pub async fn delete_document(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let DocumentParam { index_uid, document_id } = path.into_inner();
|
||||||
|
let index_uid = IndexUid::try_from(index_uid)?;
|
||||||
|
|
||||||
analytics.delete_documents(DocumentDeletionKind::PerDocumentId, &req);
|
analytics.delete_documents(DocumentDeletionKind::PerDocumentId, &req);
|
||||||
|
|
||||||
let DocumentParam { document_id, index_uid } = path.into_inner();
|
let task = KindWithContent::DocumentDeletion {
|
||||||
let task = KindWithContent::DocumentDeletion { index_uid, documents_ids: vec![document_id] };
|
index_uid: index_uid.to_string(),
|
||||||
|
documents_ids: vec![document_id],
|
||||||
|
};
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct BrowseQuery {
|
pub struct BrowseQuery {
|
||||||
#[deserr(error = DeserrError<InvalidDocumentFields>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<ParseIntError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentOffset>)]
|
||||||
offset: usize,
|
offset: Param<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidDocumentLimit>, default = crate::routes::PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<ParseIntError>)]
|
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidDocumentLimit>)]
|
||||||
limit: usize,
|
limit: Param<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidDocumentLimit>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
|
||||||
fields: Option<CS<StarOr<String>>>,
|
fields: OptionStarOrList<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_all_documents(
|
pub async fn get_all_documents(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: QueryParameter<BrowseQuery, DeserrError>,
|
params: QueryParameter<BrowseQuery, DeserrQueryParamError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
let BrowseQuery { limit, offset, fields } = params.into_inner();
|
let BrowseQuery { limit, offset, fields } = params.into_inner();
|
||||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
let attributes_to_retrieve = fields.merge_star_and_none();
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let (total, documents) = retrieve_documents(&index, offset, limit, attributes_to_retrieve)?;
|
let (total, documents) = retrieve_documents(&index, offset.0, limit.0, attributes_to_retrieve)?;
|
||||||
|
|
||||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
let ret = PaginationView::new(offset.0, limit.0, total as usize, documents);
|
||||||
|
|
||||||
debug!("returns: {:?}", ret);
|
debug!("returns: {:?}", ret);
|
||||||
Ok(HttpResponse::Ok().json(ret))
|
Ok(HttpResponse::Ok().json(ret))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct UpdateDocumentsQuery {
|
pub struct UpdateDocumentsQuery {
|
||||||
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
|
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
|
||||||
pub primary_key: Option<String>,
|
pub primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn add_documents(
|
pub async fn add_documents(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: QueryParameter<UpdateDocumentsQuery, DeserrError>,
|
params: QueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
let params = params.into_inner();
|
let params = params.into_inner();
|
||||||
|
|
||||||
@ -171,7 +181,7 @@ pub async fn add_documents(
|
|||||||
let task = document_addition(
|
let task = document_addition(
|
||||||
extract_mime_type(&req)?,
|
extract_mime_type(&req)?,
|
||||||
index_scheduler,
|
index_scheduler,
|
||||||
index_uid.into_inner(),
|
index_uid,
|
||||||
params.primary_key,
|
params.primary_key,
|
||||||
body,
|
body,
|
||||||
IndexDocumentsMethod::ReplaceDocuments,
|
IndexDocumentsMethod::ReplaceDocuments,
|
||||||
@ -184,14 +194,15 @@ pub async fn add_documents(
|
|||||||
|
|
||||||
pub async fn update_documents(
|
pub async fn update_documents(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: QueryParameter<UpdateDocumentsQuery, DeserrError>,
|
params: QueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
let index_uid = path.into_inner();
|
|
||||||
|
|
||||||
analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||||
|
|
||||||
@ -213,7 +224,7 @@ pub async fn update_documents(
|
|||||||
async fn document_addition(
|
async fn document_addition(
|
||||||
mime_type: Option<Mime>,
|
mime_type: Option<Mime>,
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||||
index_uid: String,
|
index_uid: IndexUid,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
mut body: Payload,
|
mut body: Payload,
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
@ -234,9 +245,6 @@ async fn document_addition(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// is your indexUid valid?
|
|
||||||
let index_uid = IndexUid::try_from(index_uid)?.into_inner();
|
|
||||||
|
|
||||||
let (uuid, mut update_file) = index_scheduler.create_update_file()?;
|
let (uuid, mut update_file) = index_scheduler.create_update_file()?;
|
||||||
|
|
||||||
let temp_file = match tempfile() {
|
let temp_file = match tempfile() {
|
||||||
@ -312,7 +320,7 @@ async fn document_addition(
|
|||||||
documents_count,
|
documents_count,
|
||||||
primary_key,
|
primary_key,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
index_uid,
|
index_uid: index_uid.to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let scheduler = index_scheduler.clone();
|
let scheduler = index_scheduler.clone();
|
||||||
@ -330,12 +338,13 @@ async fn document_addition(
|
|||||||
|
|
||||||
pub async fn delete_documents(
|
pub async fn delete_documents(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: web::Json<Vec<Value>>,
|
body: web::Json<Vec<Value>>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", body);
|
debug!("called with params: {:?}", body);
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
||||||
|
|
||||||
@ -345,7 +354,7 @@ pub async fn delete_documents(
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let task =
|
let task =
|
||||||
KindWithContent::DocumentDeletion { index_uid: path.into_inner(), documents_ids: ids };
|
KindWithContent::DocumentDeletion { index_uid: index_uid.to_string(), documents_ids: ids };
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
@ -355,13 +364,14 @@ pub async fn delete_documents(
|
|||||||
|
|
||||||
pub async fn clear_all_documents(
|
pub async fn clear_all_documents(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
analytics.delete_documents(DocumentDeletionKind::ClearAll, &req);
|
analytics.delete_documents(DocumentDeletionKind::ClearAll, &req);
|
||||||
|
|
||||||
let task = KindWithContent::DocumentClear { index_uid: path.into_inner() };
|
let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() };
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
|
@ -5,16 +5,18 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
|||||||
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
|
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
|
use meilisearch_types::deserr::error_messages::immutable_field_error;
|
||||||
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::{unwrap_any, Code, DeserrError, ResponseError, TakeErrorMessage};
|
use meilisearch_types::error::{unwrap_any, Code, ResponseError};
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::{self, FieldDistribution, Index};
|
use meilisearch_types::milli::{self, FieldDistribution, Index};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::Serialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use self::search::parse_usize_take_error_message;
|
|
||||||
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@ -48,7 +50,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Clone)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct IndexView {
|
pub struct IndexView {
|
||||||
pub uid: String,
|
pub uid: String,
|
||||||
@ -71,26 +73,23 @@ impl IndexView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
|
#[derive(DeserializeFromValue, Debug, Clone, Copy)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
pub struct ListIndexes {
|
pub struct ListIndexes {
|
||||||
#[serde(default)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidIndexOffset>)]
|
||||||
#[deserr(error = DeserrError<InvalidIndexOffset>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
pub offset: Param<usize>,
|
||||||
pub offset: usize,
|
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidIndexLimit>)]
|
||||||
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
pub limit: Param<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidIndexLimit>, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
|
||||||
pub limit: usize,
|
|
||||||
}
|
}
|
||||||
impl ListIndexes {
|
impl ListIndexes {
|
||||||
fn as_pagination(self) -> Pagination {
|
fn as_pagination(self) -> Pagination {
|
||||||
Pagination { offset: self.offset, limit: self.limit }
|
Pagination { offset: self.offset.0, limit: self.limit.0 }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn list_indexes(
|
pub async fn list_indexes(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
||||||
paginate: QueryParameter<ListIndexes, DeserrError>,
|
paginate: QueryParameter<ListIndexes, DeserrQueryParamError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let search_rules = &index_scheduler.filters().search_rules;
|
let search_rules = &index_scheduler.filters().search_rules;
|
||||||
let indexes: Vec<_> = index_scheduler.indexes()?;
|
let indexes: Vec<_> = index_scheduler.indexes()?;
|
||||||
@ -107,22 +106,21 @@ pub async fn list_indexes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Debug)]
|
#[derive(DeserializeFromValue, Debug)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct IndexCreateRequest {
|
pub struct IndexCreateRequest {
|
||||||
#[deserr(error = DeserrError<InvalidIndexUid>, missing_field_error = DeserrError::missing_index_uid)]
|
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
|
||||||
uid: String,
|
uid: IndexUid,
|
||||||
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
|
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_index(
|
pub async fn create_index(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
||||||
body: ValidatedJson<IndexCreateRequest, DeserrError>,
|
body: ValidatedJson<IndexCreateRequest, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
||||||
let uid = IndexUid::try_from(uid)?.into_inner();
|
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().search_rules.is_index_authorized(&uid);
|
let allow_index_creation = index_scheduler.filters().search_rules.is_index_authorized(&uid);
|
||||||
if allow_index_creation {
|
if allow_index_creation {
|
||||||
@ -132,7 +130,7 @@ pub async fn create_index(
|
|||||||
Some(&req),
|
Some(&req),
|
||||||
);
|
);
|
||||||
|
|
||||||
let task = KindWithContent::IndexCreation { index_uid: uid, primary_key };
|
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
@ -146,25 +144,23 @@ fn deny_immutable_fields_index(
|
|||||||
field: &str,
|
field: &str,
|
||||||
accepted: &[&str],
|
accepted: &[&str],
|
||||||
location: ValuePointerRef,
|
location: ValuePointerRef,
|
||||||
) -> DeserrError {
|
) -> DeserrJsonError {
|
||||||
let mut error = unwrap_any(DeserrError::<BadRequest>::error::<Infallible>(
|
match field {
|
||||||
|
"uid" => immutable_field_error(field, accepted, Code::ImmutableIndexUid),
|
||||||
|
"createdAt" => immutable_field_error(field, accepted, Code::ImmutableIndexCreatedAt),
|
||||||
|
"updatedAt" => immutable_field_error(field, accepted, Code::ImmutableIndexUpdatedAt),
|
||||||
|
_ => unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||||
None,
|
None,
|
||||||
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||||
location,
|
location,
|
||||||
));
|
)),
|
||||||
|
|
||||||
error.code = match field {
|
|
||||||
"uid" => Code::ImmutableIndexUid,
|
|
||||||
"createdAt" => Code::ImmutableIndexCreatedAt,
|
|
||||||
"updatedAt" => Code::ImmutableIndexUpdatedAt,
|
|
||||||
_ => Code::BadRequest,
|
|
||||||
};
|
|
||||||
error
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Debug)]
|
#[derive(DeserializeFromValue, Debug)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)]
|
||||||
pub struct UpdateIndexRequest {
|
pub struct UpdateIndexRequest {
|
||||||
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
|
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -172,6 +168,8 @@ pub async fn get_index(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let index_view = IndexView::new(index_uid.into_inner(), &index)?;
|
let index_view = IndexView::new(index_uid.into_inner(), &index)?;
|
||||||
|
|
||||||
@ -182,12 +180,13 @@ pub async fn get_index(
|
|||||||
|
|
||||||
pub async fn update_index(
|
pub async fn update_index(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: ValidatedJson<UpdateIndexRequest, DeserrError>,
|
body: ValidatedJson<UpdateIndexRequest, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", body);
|
debug!("called with params: {:?}", body);
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Index Updated".to_string(),
|
"Index Updated".to_string(),
|
||||||
@ -196,7 +195,7 @@ pub async fn update_index(
|
|||||||
);
|
);
|
||||||
|
|
||||||
let task = KindWithContent::IndexUpdate {
|
let task = KindWithContent::IndexUpdate {
|
||||||
index_uid: path.into_inner(),
|
index_uid: index_uid.into_inner(),
|
||||||
primary_key: body.primary_key,
|
primary_key: body.primary_key,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -211,6 +210,7 @@ pub async fn delete_index(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
|
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
@ -224,6 +224,7 @@ pub async fn get_index_stats(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": true }), Some(&req));
|
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": true }), Some(&req));
|
||||||
|
|
||||||
let stats = IndexStats::new((*index_scheduler).clone(), index_uid.into_inner())?;
|
let stats = IndexStats::new((*index_scheduler).clone(), index_uid.into_inner())?;
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_auth::IndexSearchRules;
|
use meilisearch_auth::IndexSearchRules;
|
||||||
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
|
use meilisearch_types::error::ResponseError;
|
||||||
use serde_cs::vec::CS;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
|
use meilisearch_types::serde_cs::vec::CS;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::analytics::{Analytics, SearchAggregator};
|
use crate::analytics::{Analytics, SearchAggregator};
|
||||||
@ -16,7 +17,6 @@ use crate::extractors::authentication::GuardedData;
|
|||||||
use crate::extractors::json::ValidatedJson;
|
use crate::extractors::json::ValidatedJson;
|
||||||
use crate::extractors::query_parameters::QueryParameter;
|
use crate::extractors::query_parameters::QueryParameter;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::from_string_to_option_take_error_message;
|
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||||
@ -31,54 +31,42 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_usize_take_error_message(
|
|
||||||
s: &str,
|
|
||||||
) -> Result<usize, TakeErrorMessage<std::num::ParseIntError>> {
|
|
||||||
usize::from_str(s).map_err(TakeErrorMessage)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_bool_take_error_message(
|
|
||||||
s: &str,
|
|
||||||
) -> Result<bool, TakeErrorMessage<std::str::ParseBoolError>> {
|
|
||||||
s.parse().map_err(TakeErrorMessage)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct SearchQueryGet {
|
pub struct SearchQueryGet {
|
||||||
#[deserr(error = DeserrError<InvalidSearchQ>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
|
||||||
q: Option<String>,
|
q: Option<String>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchOffset>, default = DEFAULT_SEARCH_OFFSET(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSearchOffset>)]
|
||||||
offset: usize,
|
offset: Param<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchLimit>, default = DEFAULT_SEARCH_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSearchLimit>)]
|
||||||
limit: usize,
|
limit: Param<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchPage>, from(&String) = from_string_to_option_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchPage>)]
|
||||||
page: Option<usize>,
|
page: Option<Param<usize>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchHitsPerPage>, from(&String) = from_string_to_option_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchHitsPerPage>)]
|
||||||
hits_per_page: Option<usize>,
|
hits_per_page: Option<Param<usize>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchAttributesToRetrieve>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToRetrieve>)]
|
||||||
attributes_to_retrieve: Option<CS<String>>,
|
attributes_to_retrieve: Option<CS<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchAttributesToCrop>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToCrop>)]
|
||||||
attributes_to_crop: Option<CS<String>>,
|
attributes_to_crop: Option<CS<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
|
#[deserr(default = Param(DEFAULT_CROP_LENGTH()), error = DeserrQueryParamError<InvalidSearchCropLength>)]
|
||||||
crop_length: usize,
|
crop_length: Param<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchAttributesToHighlight>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToHighlight>)]
|
||||||
attributes_to_highlight: Option<CS<String>>,
|
attributes_to_highlight: Option<CS<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchFilter>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFilter>)]
|
||||||
filter: Option<String>,
|
filter: Option<String>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchSort>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchSort>)]
|
||||||
sort: Option<String>,
|
sort: Option<String>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchShowMatchesPosition>, default, from(&String) = parse_bool_take_error_message -> TakeErrorMessage<std::str::ParseBoolError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowMatchesPosition>)]
|
||||||
show_matches_position: bool,
|
show_matches_position: Param<bool>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchFacets>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFacets>)]
|
||||||
facets: Option<CS<String>>,
|
facets: Option<CS<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
#[deserr( default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPreTag>)]
|
||||||
highlight_pre_tag: String,
|
highlight_pre_tag: String,
|
||||||
#[deserr(error = DeserrError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
#[deserr( default = DEFAULT_HIGHLIGHT_POST_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPostTag>)]
|
||||||
highlight_post_tag: String,
|
highlight_post_tag: String,
|
||||||
#[deserr(error = DeserrError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
|
#[deserr(default = DEFAULT_CROP_MARKER(), error = DeserrQueryParamError<InvalidSearchCropMarker>)]
|
||||||
crop_marker: String,
|
crop_marker: String,
|
||||||
#[deserr(error = DeserrError<InvalidSearchMatchingStrategy>, default)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchMatchingStrategy>)]
|
||||||
matching_strategy: MatchingStrategy,
|
matching_strategy: MatchingStrategy,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,17 +82,17 @@ impl From<SearchQueryGet> for SearchQuery {
|
|||||||
|
|
||||||
Self {
|
Self {
|
||||||
q: other.q,
|
q: other.q,
|
||||||
offset: other.offset,
|
offset: other.offset.0,
|
||||||
limit: other.limit,
|
limit: other.limit.0,
|
||||||
page: other.page,
|
page: other.page.as_deref().copied(),
|
||||||
hits_per_page: other.hits_per_page,
|
hits_per_page: other.hits_per_page.as_deref().copied(),
|
||||||
attributes_to_retrieve: other.attributes_to_retrieve.map(|o| o.into_iter().collect()),
|
attributes_to_retrieve: other.attributes_to_retrieve.map(|o| o.into_iter().collect()),
|
||||||
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
||||||
crop_length: other.crop_length,
|
crop_length: other.crop_length.0,
|
||||||
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
|
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
|
||||||
filter,
|
filter,
|
||||||
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||||
show_matches_position: other.show_matches_position,
|
show_matches_position: other.show_matches_position.0,
|
||||||
facets: other.facets.map(|o| o.into_iter().collect()),
|
facets: other.facets.map(|o| o.into_iter().collect()),
|
||||||
highlight_pre_tag: other.highlight_pre_tag,
|
highlight_pre_tag: other.highlight_pre_tag,
|
||||||
highlight_post_tag: other.highlight_post_tag,
|
highlight_post_tag: other.highlight_post_tag,
|
||||||
@ -162,11 +150,13 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
|||||||
pub async fn search_with_url_query(
|
pub async fn search_with_url_query(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: QueryParameter<SearchQueryGet, DeserrError>,
|
params: QueryParameter<SearchQueryGet, DeserrQueryParamError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let mut query: SearchQuery = params.into_inner().into();
|
let mut query: SearchQuery = params.into_inner().into();
|
||||||
|
|
||||||
// Tenant token search_rules.
|
// Tenant token search_rules.
|
||||||
@ -194,10 +184,12 @@ pub async fn search_with_url_query(
|
|||||||
pub async fn search_with_post(
|
pub async fn search_with_post(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
params: ValidatedJson<SearchQuery, DeserrError>,
|
params: ValidatedJson<SearchQuery, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let mut query = params.into_inner();
|
let mut query = params.into_inner();
|
||||||
debug!("search called with params: {:?}", query);
|
debug!("search called with params: {:?}", query);
|
||||||
|
|
||||||
|
@ -2,7 +2,8 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_types::error::{DeserrError, ResponseError};
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
@ -40,12 +41,14 @@ macro_rules! make_setting_route {
|
|||||||
>,
|
>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() };
|
let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() };
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
|
||||||
let task = KindWithContent::SettingsUpdate {
|
let task = KindWithContent::SettingsUpdate {
|
||||||
index_uid,
|
index_uid: index_uid.to_string(),
|
||||||
new_settings: Box::new(new_settings),
|
new_settings: Box::new(new_settings),
|
||||||
is_deletion: true,
|
is_deletion: true,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
@ -69,6 +72,8 @@ macro_rules! make_setting_route {
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
$analytics_var: web::Data<dyn Analytics>,
|
$analytics_var: web::Data<dyn Analytics>,
|
||||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
|
|
||||||
$analytics(&body, &req);
|
$analytics(&body, &req);
|
||||||
@ -82,9 +87,9 @@ macro_rules! make_setting_route {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
|
||||||
let task = KindWithContent::SettingsUpdate {
|
let task = KindWithContent::SettingsUpdate {
|
||||||
index_uid,
|
index_uid: index_uid.to_string(),
|
||||||
new_settings: Box::new(new_settings),
|
new_settings: Box::new(new_settings),
|
||||||
is_deletion: false,
|
is_deletion: false,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
@ -105,6 +110,8 @@ macro_rules! make_setting_route {
|
|||||||
>,
|
>,
|
||||||
index_uid: actix_web::web::Path<String>,
|
index_uid: actix_web::web::Path<String>,
|
||||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
let settings = settings(&index, &rtxn)?;
|
let settings = settings(&index, &rtxn)?;
|
||||||
@ -130,7 +137,7 @@ make_setting_route!(
|
|||||||
"/filterable-attributes",
|
"/filterable-attributes",
|
||||||
put,
|
put,
|
||||||
std::collections::BTreeSet<String>,
|
std::collections::BTreeSet<String>,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsFilterableAttributes,
|
meilisearch_types::error::deserr_codes::InvalidSettingsFilterableAttributes,
|
||||||
>,
|
>,
|
||||||
filterable_attributes,
|
filterable_attributes,
|
||||||
@ -156,7 +163,7 @@ make_setting_route!(
|
|||||||
"/sortable-attributes",
|
"/sortable-attributes",
|
||||||
put,
|
put,
|
||||||
std::collections::BTreeSet<String>,
|
std::collections::BTreeSet<String>,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsSortableAttributes,
|
meilisearch_types::error::deserr_codes::InvalidSettingsSortableAttributes,
|
||||||
>,
|
>,
|
||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
@ -182,7 +189,7 @@ make_setting_route!(
|
|||||||
"/displayed-attributes",
|
"/displayed-attributes",
|
||||||
put,
|
put,
|
||||||
Vec<String>,
|
Vec<String>,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsDisplayedAttributes,
|
meilisearch_types::error::deserr_codes::InvalidSettingsDisplayedAttributes,
|
||||||
>,
|
>,
|
||||||
displayed_attributes,
|
displayed_attributes,
|
||||||
@ -208,7 +215,7 @@ make_setting_route!(
|
|||||||
"/typo-tolerance",
|
"/typo-tolerance",
|
||||||
patch,
|
patch,
|
||||||
meilisearch_types::settings::TypoSettings,
|
meilisearch_types::settings::TypoSettings,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsTypoTolerance,
|
meilisearch_types::error::deserr_codes::InvalidSettingsTypoTolerance,
|
||||||
>,
|
>,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
@ -253,7 +260,7 @@ make_setting_route!(
|
|||||||
"/searchable-attributes",
|
"/searchable-attributes",
|
||||||
put,
|
put,
|
||||||
Vec<String>,
|
Vec<String>,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsSearchableAttributes,
|
meilisearch_types::error::deserr_codes::InvalidSettingsSearchableAttributes,
|
||||||
>,
|
>,
|
||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
@ -279,7 +286,7 @@ make_setting_route!(
|
|||||||
"/stop-words",
|
"/stop-words",
|
||||||
put,
|
put,
|
||||||
std::collections::BTreeSet<String>,
|
std::collections::BTreeSet<String>,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsStopWords,
|
meilisearch_types::error::deserr_codes::InvalidSettingsStopWords,
|
||||||
>,
|
>,
|
||||||
stop_words,
|
stop_words,
|
||||||
@ -304,7 +311,7 @@ make_setting_route!(
|
|||||||
"/synonyms",
|
"/synonyms",
|
||||||
put,
|
put,
|
||||||
std::collections::BTreeMap<String, Vec<String>>,
|
std::collections::BTreeMap<String, Vec<String>>,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsSynonyms,
|
meilisearch_types::error::deserr_codes::InvalidSettingsSynonyms,
|
||||||
>,
|
>,
|
||||||
synonyms,
|
synonyms,
|
||||||
@ -329,7 +336,7 @@ make_setting_route!(
|
|||||||
"/distinct-attribute",
|
"/distinct-attribute",
|
||||||
put,
|
put,
|
||||||
String,
|
String,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsDistinctAttribute,
|
meilisearch_types::error::deserr_codes::InvalidSettingsDistinctAttribute,
|
||||||
>,
|
>,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
@ -353,7 +360,7 @@ make_setting_route!(
|
|||||||
"/ranking-rules",
|
"/ranking-rules",
|
||||||
put,
|
put,
|
||||||
Vec<meilisearch_types::settings::RankingRuleView>,
|
Vec<meilisearch_types::settings::RankingRuleView>,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsRankingRules,
|
meilisearch_types::error::deserr_codes::InvalidSettingsRankingRules,
|
||||||
>,
|
>,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
@ -384,7 +391,7 @@ make_setting_route!(
|
|||||||
"/faceting",
|
"/faceting",
|
||||||
patch,
|
patch,
|
||||||
meilisearch_types::settings::FacetingSettings,
|
meilisearch_types::settings::FacetingSettings,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsFaceting,
|
meilisearch_types::error::deserr_codes::InvalidSettingsFaceting,
|
||||||
>,
|
>,
|
||||||
faceting,
|
faceting,
|
||||||
@ -409,7 +416,7 @@ make_setting_route!(
|
|||||||
"/pagination",
|
"/pagination",
|
||||||
patch,
|
patch,
|
||||||
meilisearch_types::settings::PaginationSettings,
|
meilisearch_types::settings::PaginationSettings,
|
||||||
meilisearch_types::error::DeserrError<
|
meilisearch_types::deserr::DeserrJsonError<
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsPagination,
|
meilisearch_types::error::deserr_codes::InvalidSettingsPagination,
|
||||||
>,
|
>,
|
||||||
pagination,
|
pagination,
|
||||||
@ -461,10 +468,12 @@ generate_configure!(
|
|||||||
pub async fn update_all(
|
pub async fn update_all(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: ValidatedJson<Settings<Unchecked>, DeserrError>,
|
body: ValidatedJson<Settings<Unchecked>, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let new_settings = body.into_inner();
|
let new_settings = body.into_inner();
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
@ -570,6 +579,8 @@ pub async fn get_all(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
let new_settings = settings(&index, &rtxn)?;
|
let new_settings = settings(&index, &rtxn)?;
|
||||||
@ -581,6 +592,8 @@ pub async fn delete_all(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let new_settings = Settings::cleared().into_unchecked();
|
let new_settings = Settings::cleared().into_unchecked();
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::{IndexScheduler, Query};
|
use index_scheduler::{IndexScheduler, Query};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_types::error::{ResponseError, TakeErrorMessage};
|
use meilisearch_auth::AuthController;
|
||||||
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::settings::{Settings, Unchecked};
|
use meilisearch_types::settings::{Settings, Unchecked};
|
||||||
use meilisearch_types::star_or::StarOr;
|
|
||||||
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
@ -35,37 +34,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
.service(web::scope("/swap-indexes").configure(swap_indexes::configure));
|
.service(web::scope("/swap-indexes").configure(swap_indexes::configure));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extracts the raw values from the `StarOr` types and
|
const PAGINATION_DEFAULT_LIMIT: usize = 20;
|
||||||
/// return None if a `StarOr::Star` is encountered.
|
|
||||||
pub fn fold_star_or<T, O>(content: impl IntoIterator<Item = StarOr<T>>) -> Option<O>
|
|
||||||
where
|
|
||||||
O: FromIterator<T>,
|
|
||||||
{
|
|
||||||
content
|
|
||||||
.into_iter()
|
|
||||||
.map(|value| match value {
|
|
||||||
StarOr::Star => None,
|
|
||||||
StarOr::Other(val) => Some(val),
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_string_to_option<T, E>(input: &str) -> Result<Option<T>, E>
|
|
||||||
where
|
|
||||||
T: FromStr<Err = E>,
|
|
||||||
{
|
|
||||||
Ok(Some(input.parse()?))
|
|
||||||
}
|
|
||||||
pub fn from_string_to_option_take_error_message<T, E>(
|
|
||||||
input: &str,
|
|
||||||
) -> Result<Option<T>, TakeErrorMessage<E>>
|
|
||||||
where
|
|
||||||
T: FromStr<Err = E>,
|
|
||||||
{
|
|
||||||
Ok(Some(input.parse().map_err(TakeErrorMessage)?))
|
|
||||||
}
|
|
||||||
|
|
||||||
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
@ -262,13 +231,15 @@ pub struct Stats {
|
|||||||
|
|
||||||
async fn get_stats(
|
async fn get_stats(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::STATS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::STATS_GET }>, Data<IndexScheduler>>,
|
||||||
|
auth_controller: GuardedData<ActionPolicy<{ actions::STATS_GET }>, AuthController>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req));
|
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req));
|
||||||
let search_rules = &index_scheduler.filters().search_rules;
|
let search_rules = &index_scheduler.filters().search_rules;
|
||||||
|
|
||||||
let stats = create_all_stats((*index_scheduler).clone(), search_rules)?;
|
let stats =
|
||||||
|
create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), search_rules)?;
|
||||||
|
|
||||||
debug!("returns: {:?}", stats);
|
debug!("returns: {:?}", stats);
|
||||||
Ok(HttpResponse::Ok().json(stats))
|
Ok(HttpResponse::Ok().json(stats))
|
||||||
@ -276,6 +247,7 @@ async fn get_stats(
|
|||||||
|
|
||||||
pub fn create_all_stats(
|
pub fn create_all_stats(
|
||||||
index_scheduler: Data<IndexScheduler>,
|
index_scheduler: Data<IndexScheduler>,
|
||||||
|
auth_controller: AuthController,
|
||||||
search_rules: &meilisearch_auth::SearchRules,
|
search_rules: &meilisearch_auth::SearchRules,
|
||||||
) -> Result<Stats, ResponseError> {
|
) -> Result<Stats, ResponseError> {
|
||||||
let mut last_task: Option<OffsetDateTime> = None;
|
let mut last_task: Option<OffsetDateTime> = None;
|
||||||
@ -285,6 +257,7 @@ pub fn create_all_stats(
|
|||||||
Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() },
|
Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() },
|
||||||
search_rules.authorized_indexes(),
|
search_rules.authorized_indexes(),
|
||||||
)?;
|
)?;
|
||||||
|
// accumulate the size of each indexes
|
||||||
let processing_index = processing_task.first().and_then(|task| task.index_uid());
|
let processing_index = processing_task.first().and_then(|task| task.index_uid());
|
||||||
for (name, index) in index_scheduler.indexes()? {
|
for (name, index) in index_scheduler.indexes()? {
|
||||||
if !search_rules.is_index_authorized(&name) {
|
if !search_rules.is_index_authorized(&name) {
|
||||||
@ -305,6 +278,11 @@ pub fn create_all_stats(
|
|||||||
|
|
||||||
indexes.insert(name, stats);
|
indexes.insert(name, stats);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
database_size += index_scheduler.size()?;
|
||||||
|
database_size += auth_controller.size()?;
|
||||||
|
database_size += index_scheduler.compute_update_file_size()?;
|
||||||
|
|
||||||
let stats = Stats { database_size, last_update: last_task, indexes };
|
let stats = Stats { database_size, last_update: last_task, indexes };
|
||||||
Ok(stats)
|
Ok(stats)
|
||||||
}
|
}
|
||||||
|
@ -2,8 +2,10 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::DeserializeFromValue;
|
use deserr::DeserializeFromValue;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
|
use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
|
||||||
use meilisearch_types::error::{DeserrError, ResponseError};
|
use meilisearch_types::error::ResponseError;
|
||||||
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
@ -20,15 +22,15 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
|
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct SwapIndexesPayload {
|
pub struct SwapIndexesPayload {
|
||||||
#[deserr(error = DeserrError<InvalidSwapIndexes>)]
|
#[deserr(error = DeserrJsonError<InvalidSwapIndexes>, missing_field_error = DeserrJsonError::missing_swap_indexes)]
|
||||||
indexes: Vec<String>,
|
indexes: Vec<IndexUid>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn swap_indexes(
|
pub async fn swap_indexes(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
||||||
params: ValidatedJson<Vec<SwapIndexesPayload>, DeserrError>,
|
params: ValidatedJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
@ -44,6 +46,7 @@ pub async fn swap_indexes(
|
|||||||
|
|
||||||
let mut swaps = vec![];
|
let mut swaps = vec![];
|
||||||
for SwapIndexesPayload { indexes } in params.into_iter() {
|
for SwapIndexesPayload { indexes } in params.into_iter() {
|
||||||
|
// TODO: switch to deserr
|
||||||
let (lhs, rhs) = match indexes.as_slice() {
|
let (lhs, rhs) = match indexes.as_slice() {
|
||||||
[lhs, rhs] => (lhs, rhs),
|
[lhs, rhs] => (lhs, rhs),
|
||||||
_ => {
|
_ => {
|
||||||
@ -53,7 +56,7 @@ pub async fn swap_indexes(
|
|||||||
if !search_rules.is_index_authorized(lhs) || !search_rules.is_index_authorized(rhs) {
|
if !search_rules.is_index_authorized(lhs) || !search_rules.is_index_authorized(rhs) {
|
||||||
return Err(AuthenticationError::InvalidToken.into());
|
return Err(AuthenticationError::InvalidToken.into());
|
||||||
}
|
}
|
||||||
swaps.push(IndexSwap { indexes: (lhs.clone(), rhs.clone()) });
|
swaps.push(IndexSwap { indexes: (lhs.to_string(), rhs.to_string()) });
|
||||||
}
|
}
|
||||||
|
|
||||||
let task = KindWithContent::IndexSwap { swaps };
|
let task = KindWithContent::IndexSwap { swaps };
|
||||||
|
@ -1,34 +1,32 @@
|
|||||||
use std::num::ParseIntError;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::DeserializeFromValue;
|
use deserr::DeserializeFromValue;
|
||||||
use index_scheduler::{IndexScheduler, Query, TaskId};
|
use index_scheduler::{IndexScheduler, Query, TaskId};
|
||||||
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
|
use meilisearch_types::deserr::DeserrQueryParamError;
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
|
use meilisearch_types::error::{InvalidTaskDateError, ResponseError};
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::settings::{Settings, Unchecked};
|
use meilisearch_types::settings::{Settings, Unchecked};
|
||||||
use meilisearch_types::star_or::StarOr;
|
use meilisearch_types::star_or::{OptionStarOr, OptionStarOrList};
|
||||||
use meilisearch_types::tasks::{
|
use meilisearch_types::tasks::{
|
||||||
serialize_duration, Details, IndexSwap, Kind, KindWithContent, Status, Task,
|
serialize_duration, Details, IndexSwap, Kind, KindWithContent, Status, Task,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::Serialize;
|
||||||
use serde_cs::vec::CS;
|
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::macros::format_description;
|
use time::macros::format_description;
|
||||||
use time::{Date, Duration, OffsetDateTime, Time};
|
use time::{Date, Duration, OffsetDateTime, Time};
|
||||||
use tokio::task;
|
use tokio::task;
|
||||||
|
|
||||||
use super::{fold_star_or, SummarizedTaskView};
|
use super::SummarizedTaskView;
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::query_parameters::QueryParameter;
|
use crate::extractors::query_parameters::QueryParameter;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
const DEFAULT_LIMIT: fn() -> u32 = || 20;
|
const DEFAULT_LIMIT: u32 = 20;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
@ -164,162 +162,157 @@ impl From<Details> for DetailsView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_option_cs<T: FromStr>(
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
s: Option<CS<String>>,
|
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
) -> Result<Option<Vec<T>>, TakeErrorMessage<T::Err>> {
|
pub struct TasksFilterQuery {
|
||||||
if let Some(s) = s {
|
#[deserr(default = Param(DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidTaskLimit>)]
|
||||||
s.into_iter()
|
pub limit: Param<u32>,
|
||||||
.map(|s| T::from_str(&s))
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskFrom>)]
|
||||||
.collect::<Result<Vec<T>, T::Err>>()
|
pub from: Option<Param<TaskId>>,
|
||||||
.map_err(TakeErrorMessage)
|
|
||||||
.map(Some)
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
|
||||||
} else {
|
pub uids: OptionStarOrList<u32>,
|
||||||
Ok(None)
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
|
||||||
|
pub canceled_by: OptionStarOrList<u32>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
|
||||||
|
pub types: OptionStarOrList<Kind>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
|
||||||
|
pub statuses: OptionStarOrList<Status>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
|
||||||
|
pub index_uids: OptionStarOrList<IndexUid>,
|
||||||
|
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||||
|
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||||
|
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||||
|
pub after_started_at: OptionStarOr<OffsetDateTime>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||||
|
pub before_started_at: OptionStarOr<OffsetDateTime>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||||
|
pub after_finished_at: OptionStarOr<OffsetDateTime>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||||
|
pub before_finished_at: OptionStarOr<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
|
impl TasksFilterQuery {
|
||||||
|
fn into_query(self) -> Query {
|
||||||
|
Query {
|
||||||
|
limit: Some(self.limit.0),
|
||||||
|
from: self.from.as_deref().copied(),
|
||||||
|
statuses: self.statuses.merge_star_and_none(),
|
||||||
|
types: self.types.merge_star_and_none(),
|
||||||
|
index_uids: self.index_uids.map(|x| x.to_string()).merge_star_and_none(),
|
||||||
|
uids: self.uids.merge_star_and_none(),
|
||||||
|
canceled_by: self.canceled_by.merge_star_and_none(),
|
||||||
|
before_enqueued_at: self.before_enqueued_at.merge_star_and_none(),
|
||||||
|
after_enqueued_at: self.after_enqueued_at.merge_star_and_none(),
|
||||||
|
before_started_at: self.before_started_at.merge_star_and_none(),
|
||||||
|
after_started_at: self.after_started_at.merge_star_and_none(),
|
||||||
|
before_finished_at: self.before_finished_at.merge_star_and_none(),
|
||||||
|
after_finished_at: self.after_finished_at.merge_star_and_none(),
|
||||||
}
|
}
|
||||||
fn parse_option_cs_star_or<T: FromStr>(
|
|
||||||
s: Option<CS<StarOr<String>>>,
|
|
||||||
) -> Result<Option<Vec<T>>, TakeErrorMessage<T::Err>> {
|
|
||||||
if let Some(s) = s.and_then(fold_star_or) as Option<Vec<String>> {
|
|
||||||
s.into_iter()
|
|
||||||
.map(|s| T::from_str(&s))
|
|
||||||
.collect::<Result<Vec<T>, T::Err>>()
|
|
||||||
.map_err(TakeErrorMessage)
|
|
||||||
.map(Some)
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn parse_option_str<T: FromStr>(s: Option<String>) -> Result<Option<T>, TakeErrorMessage<T::Err>> {
|
|
||||||
if let Some(s) = s {
|
|
||||||
T::from_str(&s).map_err(TakeErrorMessage).map(Some)
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_str<T: FromStr>(s: String) -> Result<T, TakeErrorMessage<T::Err>> {
|
impl TaskDeletionOrCancelationQuery {
|
||||||
T::from_str(&s).map_err(TakeErrorMessage)
|
fn is_empty(&self) -> bool {
|
||||||
|
matches!(
|
||||||
|
self,
|
||||||
|
TaskDeletionOrCancelationQuery {
|
||||||
|
uids: OptionStarOrList::None,
|
||||||
|
canceled_by: OptionStarOrList::None,
|
||||||
|
types: OptionStarOrList::None,
|
||||||
|
statuses: OptionStarOrList::None,
|
||||||
|
index_uids: OptionStarOrList::None,
|
||||||
|
after_enqueued_at: OptionStarOr::None,
|
||||||
|
before_enqueued_at: OptionStarOr::None,
|
||||||
|
after_started_at: OptionStarOr::None,
|
||||||
|
before_started_at: OptionStarOr::None,
|
||||||
|
after_finished_at: OptionStarOr::None,
|
||||||
|
before_finished_at: OptionStarOr::None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, DeserializeFromValue)]
|
#[derive(Debug, DeserializeFromValue)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct TasksFilterQuery {
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskLimit>, default = DEFAULT_LIMIT(), from(String) = parse_str::<u32> -> TakeErrorMessage<ParseIntError>)]
|
|
||||||
pub limit: u32,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskFrom>, from(Option<String>) = parse_option_str::<TaskId> -> TakeErrorMessage<ParseIntError>)]
|
|
||||||
pub from: Option<TaskId>,
|
|
||||||
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskUids>, from(Option<CS<String>>) = parse_option_cs::<u32> -> TakeErrorMessage<ParseIntError>)]
|
|
||||||
pub uids: Option<Vec<u32>>,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskCanceledBy>, from(Option<CS<String>>) = parse_option_cs::<u32> -> TakeErrorMessage<ParseIntError>)]
|
|
||||||
pub canceled_by: Option<Vec<u32>>,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskTypes>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<Kind> -> TakeErrorMessage<ResponseError>)]
|
|
||||||
pub types: Option<Vec<Kind>>,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskStatuses>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<Status> -> TakeErrorMessage<ResponseError>)]
|
|
||||||
pub statuses: Option<Vec<Status>>,
|
|
||||||
#[deserr(error = DeserrError<InvalidIndexUid>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<IndexUid> -> TakeErrorMessage<ResponseError>)]
|
|
||||||
pub index_uids: Option<Vec<IndexUid>>,
|
|
||||||
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskAfterEnqueuedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
|
||||||
pub after_enqueued_at: Option<OffsetDateTime>,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskBeforeEnqueuedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
|
||||||
pub before_enqueued_at: Option<OffsetDateTime>,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskAfterStartedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
|
||||||
pub after_started_at: Option<OffsetDateTime>,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskBeforeStartedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
|
||||||
pub before_started_at: Option<OffsetDateTime>,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskAfterFinishedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
|
||||||
pub after_finished_at: Option<OffsetDateTime>,
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskBeforeFinishedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
|
||||||
pub before_finished_at: Option<OffsetDateTime>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
|
||||||
pub struct TaskDeletionOrCancelationQuery {
|
pub struct TaskDeletionOrCancelationQuery {
|
||||||
#[deserr(error = DeserrError<InvalidTaskUids>, from(Option<CS<String>>) = parse_option_cs::<u32> -> TakeErrorMessage<ParseIntError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
|
||||||
pub uids: Option<Vec<u32>>,
|
pub uids: OptionStarOrList<u32>,
|
||||||
#[deserr(error = DeserrError<InvalidTaskCanceledBy>, from(Option<CS<String>>) = parse_option_cs::<u32> -> TakeErrorMessage<ParseIntError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
|
||||||
pub canceled_by: Option<Vec<u32>>,
|
pub canceled_by: OptionStarOrList<u32>,
|
||||||
#[deserr(error = DeserrError<InvalidTaskTypes>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<Kind> -> TakeErrorMessage<ResponseError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
|
||||||
pub types: Option<Vec<Kind>>,
|
pub types: OptionStarOrList<Kind>,
|
||||||
#[deserr(error = DeserrError<InvalidTaskStatuses>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<Status> -> TakeErrorMessage<ResponseError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
|
||||||
pub statuses: Option<Vec<Status>>,
|
pub statuses: OptionStarOrList<Status>,
|
||||||
#[deserr(error = DeserrError<InvalidIndexUid>, default = None, from(Option<CS<StarOr<String>>>) = parse_option_cs_star_or::<IndexUid> -> TakeErrorMessage<ResponseError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
|
||||||
pub index_uids: Option<Vec<IndexUid>>,
|
pub index_uids: OptionStarOrList<IndexUid>,
|
||||||
|
|
||||||
#[deserr(error = DeserrError<InvalidTaskAfterEnqueuedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||||
pub after_enqueued_at: Option<OffsetDateTime>,
|
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||||
#[deserr(error = DeserrError<InvalidTaskBeforeEnqueuedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||||
pub before_enqueued_at: Option<OffsetDateTime>,
|
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||||
#[deserr(error = DeserrError<InvalidTaskAfterStartedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||||
pub after_started_at: Option<OffsetDateTime>,
|
pub after_started_at: OptionStarOr<OffsetDateTime>,
|
||||||
#[deserr(error = DeserrError<InvalidTaskBeforeStartedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||||
pub before_started_at: Option<OffsetDateTime>,
|
pub before_started_at: OptionStarOr<OffsetDateTime>,
|
||||||
#[deserr(error = DeserrError<InvalidTaskAfterFinishedAt>, default = None, from(Option<String>) = deserialize_date_after -> TakeErrorMessage<InvalidTaskDateError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||||
pub after_finished_at: Option<OffsetDateTime>,
|
pub after_finished_at: OptionStarOr<OffsetDateTime>,
|
||||||
#[deserr(error = DeserrError<InvalidTaskBeforeFinishedAt>, default = None, from(Option<String>) = deserialize_date_before -> TakeErrorMessage<InvalidTaskDateError>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||||
pub before_finished_at: Option<OffsetDateTime>,
|
pub before_finished_at: OptionStarOr<OffsetDateTime>,
|
||||||
|
}
|
||||||
|
impl TaskDeletionOrCancelationQuery {
|
||||||
|
fn into_query(self) -> Query {
|
||||||
|
Query {
|
||||||
|
limit: None,
|
||||||
|
from: None,
|
||||||
|
statuses: self.statuses.merge_star_and_none(),
|
||||||
|
types: self.types.merge_star_and_none(),
|
||||||
|
index_uids: self.index_uids.map(|x| x.to_string()).merge_star_and_none(),
|
||||||
|
uids: self.uids.merge_star_and_none(),
|
||||||
|
canceled_by: self.canceled_by.merge_star_and_none(),
|
||||||
|
before_enqueued_at: self.before_enqueued_at.merge_star_and_none(),
|
||||||
|
after_enqueued_at: self.after_enqueued_at.merge_star_and_none(),
|
||||||
|
before_started_at: self.before_started_at.merge_star_and_none(),
|
||||||
|
after_started_at: self.after_started_at.merge_star_and_none(),
|
||||||
|
before_finished_at: self.before_finished_at.merge_star_and_none(),
|
||||||
|
after_finished_at: self.after_finished_at.merge_star_and_none(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cancel_tasks(
|
async fn cancel_tasks(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
||||||
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrError>,
|
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let TaskDeletionOrCancelationQuery {
|
let params = params.into_inner();
|
||||||
types,
|
|
||||||
uids,
|
if params.is_empty() {
|
||||||
canceled_by,
|
return Err(index_scheduler::Error::TaskCancelationWithEmptyQuery.into());
|
||||||
statuses,
|
}
|
||||||
index_uids,
|
|
||||||
after_enqueued_at,
|
|
||||||
before_enqueued_at,
|
|
||||||
after_started_at,
|
|
||||||
before_started_at,
|
|
||||||
after_finished_at,
|
|
||||||
before_finished_at,
|
|
||||||
} = params.into_inner();
|
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Tasks Canceled".to_string(),
|
"Tasks Canceled".to_string(),
|
||||||
json!({
|
json!({
|
||||||
"filtered_by_uid": uids.is_some(),
|
"filtered_by_uid": params.uids.is_some(),
|
||||||
"filtered_by_index_uid": index_uids.is_some(),
|
"filtered_by_index_uid": params.index_uids.is_some(),
|
||||||
"filtered_by_type": types.is_some(),
|
"filtered_by_type": params.types.is_some(),
|
||||||
"filtered_by_status": statuses.is_some(),
|
"filtered_by_status": params.statuses.is_some(),
|
||||||
"filtered_by_canceled_by": canceled_by.is_some(),
|
"filtered_by_canceled_by": params.canceled_by.is_some(),
|
||||||
"filtered_by_before_enqueued_at": before_enqueued_at.is_some(),
|
"filtered_by_before_enqueued_at": params.before_enqueued_at.is_some(),
|
||||||
"filtered_by_after_enqueued_at": after_enqueued_at.is_some(),
|
"filtered_by_after_enqueued_at": params.after_enqueued_at.is_some(),
|
||||||
"filtered_by_before_started_at": before_started_at.is_some(),
|
"filtered_by_before_started_at": params.before_started_at.is_some(),
|
||||||
"filtered_by_after_started_at": after_started_at.is_some(),
|
"filtered_by_after_started_at": params.after_started_at.is_some(),
|
||||||
"filtered_by_before_finished_at": before_finished_at.is_some(),
|
"filtered_by_before_finished_at": params.before_finished_at.is_some(),
|
||||||
"filtered_by_after_finished_at": after_finished_at.is_some(),
|
"filtered_by_after_finished_at": params.after_finished_at.is_some(),
|
||||||
}),
|
}),
|
||||||
Some(&req),
|
Some(&req),
|
||||||
);
|
);
|
||||||
|
|
||||||
let query = Query {
|
let query = params.into_query();
|
||||||
limit: None,
|
|
||||||
from: None,
|
|
||||||
statuses,
|
|
||||||
types,
|
|
||||||
index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()),
|
|
||||||
uids,
|
|
||||||
canceled_by,
|
|
||||||
before_enqueued_at,
|
|
||||||
after_enqueued_at,
|
|
||||||
before_started_at,
|
|
||||||
after_started_at,
|
|
||||||
before_finished_at,
|
|
||||||
after_finished_at,
|
|
||||||
};
|
|
||||||
|
|
||||||
if query.is_empty() {
|
|
||||||
return Err(index_scheduler::Error::TaskCancelationWithEmptyQuery.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
|
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||||
&index_scheduler.read_txn()?,
|
&index_scheduler.read_txn()?,
|
||||||
@ -337,62 +330,34 @@ async fn cancel_tasks(
|
|||||||
|
|
||||||
async fn delete_tasks(
|
async fn delete_tasks(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
|
||||||
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrError>,
|
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let TaskDeletionOrCancelationQuery {
|
let params = params.into_inner();
|
||||||
types,
|
|
||||||
uids,
|
|
||||||
canceled_by,
|
|
||||||
statuses,
|
|
||||||
index_uids,
|
|
||||||
|
|
||||||
after_enqueued_at,
|
if params.is_empty() {
|
||||||
before_enqueued_at,
|
return Err(index_scheduler::Error::TaskDeletionWithEmptyQuery.into());
|
||||||
after_started_at,
|
}
|
||||||
before_started_at,
|
|
||||||
after_finished_at,
|
|
||||||
before_finished_at,
|
|
||||||
} = params.into_inner();
|
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Tasks Deleted".to_string(),
|
"Tasks Deleted".to_string(),
|
||||||
json!({
|
json!({
|
||||||
"filtered_by_uid": uids.is_some(),
|
"filtered_by_uid": params.uids.is_some(),
|
||||||
"filtered_by_index_uid": index_uids.is_some(),
|
"filtered_by_index_uid": params.index_uids.is_some(),
|
||||||
"filtered_by_type": types.is_some(),
|
"filtered_by_type": params.types.is_some(),
|
||||||
"filtered_by_status": statuses.is_some(),
|
"filtered_by_status": params.statuses.is_some(),
|
||||||
"filtered_by_canceled_by": canceled_by.is_some(),
|
"filtered_by_canceled_by": params.canceled_by.is_some(),
|
||||||
"filtered_by_before_enqueued_at": before_enqueued_at.is_some(),
|
"filtered_by_before_enqueued_at": params.before_enqueued_at.is_some(),
|
||||||
"filtered_by_after_enqueued_at": after_enqueued_at.is_some(),
|
"filtered_by_after_enqueued_at": params.after_enqueued_at.is_some(),
|
||||||
"filtered_by_before_started_at": before_started_at.is_some(),
|
"filtered_by_before_started_at": params.before_started_at.is_some(),
|
||||||
"filtered_by_after_started_at": after_started_at.is_some(),
|
"filtered_by_after_started_at": params.after_started_at.is_some(),
|
||||||
"filtered_by_before_finished_at": before_finished_at.is_some(),
|
"filtered_by_before_finished_at": params.before_finished_at.is_some(),
|
||||||
"filtered_by_after_finished_at": after_finished_at.is_some(),
|
"filtered_by_after_finished_at": params.after_finished_at.is_some(),
|
||||||
}),
|
}),
|
||||||
Some(&req),
|
Some(&req),
|
||||||
);
|
);
|
||||||
|
let query = params.into_query();
|
||||||
let query = Query {
|
|
||||||
limit: None,
|
|
||||||
from: None,
|
|
||||||
statuses,
|
|
||||||
types,
|
|
||||||
index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()),
|
|
||||||
uids,
|
|
||||||
canceled_by,
|
|
||||||
after_enqueued_at,
|
|
||||||
before_enqueued_at,
|
|
||||||
after_started_at,
|
|
||||||
before_started_at,
|
|
||||||
after_finished_at,
|
|
||||||
before_finished_at,
|
|
||||||
};
|
|
||||||
|
|
||||||
if query.is_empty() {
|
|
||||||
return Err(index_scheduler::Error::TaskDeletionWithEmptyQuery.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
|
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||||
&index_scheduler.read_txn()?,
|
&index_scheduler.read_txn()?,
|
||||||
@ -418,47 +383,17 @@ pub struct AllTasks {
|
|||||||
|
|
||||||
async fn get_tasks(
|
async fn get_tasks(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
|
||||||
params: QueryParameter<TasksFilterQuery, DeserrError>,
|
params: QueryParameter<TasksFilterQuery, DeserrQueryParamError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let params = params.into_inner();
|
let mut params = params.into_inner();
|
||||||
analytics.get_tasks(¶ms, &req);
|
analytics.get_tasks(¶ms, &req);
|
||||||
|
|
||||||
let TasksFilterQuery {
|
|
||||||
types,
|
|
||||||
uids,
|
|
||||||
canceled_by,
|
|
||||||
statuses,
|
|
||||||
index_uids,
|
|
||||||
limit,
|
|
||||||
from,
|
|
||||||
after_enqueued_at,
|
|
||||||
before_enqueued_at,
|
|
||||||
after_started_at,
|
|
||||||
before_started_at,
|
|
||||||
after_finished_at,
|
|
||||||
before_finished_at,
|
|
||||||
} = params;
|
|
||||||
|
|
||||||
// We +1 just to know if there is more after this "page" or not.
|
// We +1 just to know if there is more after this "page" or not.
|
||||||
let limit = limit.saturating_add(1);
|
params.limit.0 = params.limit.0.saturating_add(1);
|
||||||
|
let limit = params.limit.0;
|
||||||
let query = index_scheduler::Query {
|
let query = params.into_query();
|
||||||
limit: Some(limit),
|
|
||||||
from,
|
|
||||||
statuses,
|
|
||||||
types,
|
|
||||||
index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()),
|
|
||||||
uids,
|
|
||||||
canceled_by,
|
|
||||||
before_enqueued_at,
|
|
||||||
after_enqueued_at,
|
|
||||||
before_started_at,
|
|
||||||
after_started_at,
|
|
||||||
before_finished_at,
|
|
||||||
after_finished_at,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut tasks_results: Vec<TaskView> = index_scheduler
|
let mut tasks_results: Vec<TaskView> = index_scheduler
|
||||||
.get_tasks_from_authorized_indexes(
|
.get_tasks_from_authorized_indexes(
|
||||||
@ -524,7 +459,7 @@ pub enum DeserializeDateOption {
|
|||||||
pub fn deserialize_date(
|
pub fn deserialize_date(
|
||||||
value: &str,
|
value: &str,
|
||||||
option: DeserializeDateOption,
|
option: DeserializeDateOption,
|
||||||
) -> std::result::Result<OffsetDateTime, TakeErrorMessage<InvalidTaskDateError>> {
|
) -> std::result::Result<OffsetDateTime, InvalidTaskDateError> {
|
||||||
// We can't parse using time's rfc3339 format, since then we won't know what part of the
|
// We can't parse using time's rfc3339 format, since then we won't know what part of the
|
||||||
// datetime was not explicitly specified, and thus we won't be able to increment it to the
|
// datetime was not explicitly specified, and thus we won't be able to increment it to the
|
||||||
// next step.
|
// next step.
|
||||||
@ -546,54 +481,41 @@ pub fn deserialize_date(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(TakeErrorMessage(InvalidTaskDateError(value.to_owned())))
|
Err(InvalidTaskDateError(value.to_owned()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize_date_before(
|
|
||||||
value: Option<String>,
|
|
||||||
) -> std::result::Result<Option<OffsetDateTime>, TakeErrorMessage<InvalidTaskDateError>> {
|
|
||||||
if let Some(value) = value {
|
|
||||||
let date = deserialize_date(&value, DeserializeDateOption::Before)?;
|
|
||||||
Ok(Some(date))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn deserialize_date_after(
|
pub fn deserialize_date_after(
|
||||||
value: Option<String>,
|
value: OptionStarOr<String>,
|
||||||
) -> std::result::Result<Option<OffsetDateTime>, TakeErrorMessage<InvalidTaskDateError>> {
|
) -> std::result::Result<OptionStarOr<OffsetDateTime>, InvalidTaskDateError> {
|
||||||
if let Some(value) = value {
|
value.try_map(|x| deserialize_date(&x, DeserializeDateOption::After))
|
||||||
let date = deserialize_date(&value, DeserializeDateOption::After)?;
|
|
||||||
Ok(Some(date))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
}
|
||||||
|
pub fn deserialize_date_before(
|
||||||
|
value: OptionStarOr<String>,
|
||||||
|
) -> std::result::Result<OptionStarOr<OffsetDateTime>, InvalidTaskDateError> {
|
||||||
|
value.try_map(|x| deserialize_date(&x, DeserializeDateOption::Before))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct InvalidTaskDateError(String);
|
|
||||||
impl std::fmt::Display for InvalidTaskDateError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "`{}` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl std::error::Error for InvalidTaskDateError {}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use deserr::DeserializeFromValue;
|
use deserr::DeserializeFromValue;
|
||||||
use meili_snap::snapshot;
|
use meili_snap::snapshot;
|
||||||
use meilisearch_types::error::DeserrError;
|
use meilisearch_types::deserr::DeserrQueryParamError;
|
||||||
|
use meilisearch_types::error::{Code, ResponseError};
|
||||||
|
|
||||||
use crate::extractors::query_parameters::QueryParameter;
|
|
||||||
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
|
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
|
||||||
|
|
||||||
fn deserr_query_params<T>(j: &str) -> Result<T, actix_web::Error>
|
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
|
||||||
where
|
where
|
||||||
T: DeserializeFromValue<DeserrError>,
|
T: DeserializeFromValue<DeserrQueryParamError>,
|
||||||
{
|
{
|
||||||
QueryParameter::<T, DeserrError>::from_query(j).map(|p| p.0)
|
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
|
||||||
|
|
||||||
|
match deserr::deserialize::<_, _, DeserrQueryParamError>(value) {
|
||||||
|
Ok(data) => Ok(data),
|
||||||
|
Err(e) => Err(ResponseError::from(e)),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -602,65 +524,113 @@ mod tests {
|
|||||||
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
|
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
|
|
||||||
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||||
snapshot!(format!("{:?}", query.before_enqueued_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||||
snapshot!(format!("{:?}", query.after_started_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
snapshot!(format!("{:?}", query.after_started_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||||
snapshot!(format!("{:?}", query.before_started_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
snapshot!(format!("{:?}", query.before_started_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||||
snapshot!(format!("{:?}", query.after_finished_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00");
|
snapshot!(format!("{:?}", query.after_finished_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||||
snapshot!(format!("{:?}", query.before_finished_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00");
|
snapshot!(format!("{:?}", query.before_finished_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params =
|
let params =
|
||||||
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
|
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00");
|
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
|
||||||
snapshot!(format!("{:?}", query.before_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00");
|
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
|
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 -06:20:00");
|
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 -06:20:00)");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
|
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 +00:00:00");
|
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 +00:00:00)");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
|
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.2000003 +00:00:00");
|
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.2000003 +00:00:00)");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
// Stars are allowed in date fields as well
|
||||||
|
let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*";
|
||||||
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
|
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "afterFinishedAt=2021";
|
let params = "afterFinishedAt=2021";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||||
|
"code": "invalid_task_after_finished_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "beforeFinishedAt=2021";
|
let params = "beforeFinishedAt=2021";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||||
|
"code": "invalid_task_before_finished_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "afterEnqueuedAt=2021-12";
|
let params = "afterEnqueuedAt=2021-12";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||||
|
"code": "invalid_task_after_enqueued_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let params = "beforeEnqueuedAt=2021-12-03T23";
|
let params = "beforeEnqueuedAt=2021-12-03T23";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||||
|
"code": "invalid_task_before_enqueued_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "afterStartedAt=2021-12-03T23:45";
|
let params = "afterStartedAt=2021-12-03T23:45";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||||
|
"code": "invalid_task_after_started_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "beforeStartedAt=2021-12-03T23:45";
|
let params = "beforeStartedAt=2021-12-03T23:45";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||||
|
"code": "invalid_task_before_started_at",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -669,22 +639,48 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let params = "uids=78,1,12,73";
|
let params = "uids=78,1,12,73";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.uids.unwrap()), @"[78, 1, 12, 73]");
|
snapshot!(format!("{:?}", query.uids), @"List([78, 1, 12, 73])");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "uids=1";
|
let params = "uids=1";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.uids.unwrap()), @"[1]");
|
snapshot!(format!("{:?}", query.uids), @"List([1])");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let params = "uids=cat,*,dog";
|
||||||
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer",
|
||||||
|
"code": "invalid_task_uids",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "uids=78,hello,world";
|
let params = "uids=78,hello,world";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer",
|
||||||
|
"code": "invalid_task_uids",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "uids=cat";
|
let params = "uids=cat";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer",
|
||||||
|
"code": "invalid_task_uids",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -693,17 +689,24 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
|
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.statuses.unwrap()), @"[Succeeded, Failed, Enqueued, Processing, Canceled]");
|
snapshot!(format!("{:?}", query.statuses), @"List([Succeeded, Failed, Enqueued, Processing, Canceled])");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "statuses=enqueued";
|
let params = "statuses=enqueued";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.statuses.unwrap()), @"[Enqueued]");
|
snapshot!(format!("{:?}", query.statuses), @"List([Enqueued])");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "statuses=finished";
|
let params = "statuses=finished";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`finished` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
|
||||||
|
"code": "invalid_task_statuses",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_statuses"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
@ -711,17 +714,24 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
|
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.types.unwrap()), @"[DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation]");
|
snapshot!(format!("{:?}", query.types), @"List([DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation])");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "types=settingsUpdate";
|
let params = "types=settingsUpdate";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.types.unwrap()), @"[SettingsUpdate]");
|
snapshot!(format!("{:?}", query.types), @"List([SettingsUpdate])");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "types=createIndex";
|
let params = "types=createIndex";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`createIndex` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
|
||||||
|
"code": "invalid_task_types",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
@ -729,22 +739,36 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let params = "indexUids=toto,tata-78";
|
let params = "indexUids=toto,tata-78";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.index_uids.unwrap()), @r###"[IndexUid("toto"), IndexUid("tata-78")]"###);
|
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("toto"), IndexUid("tata-78")])"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "indexUids=index_a";
|
let params = "indexUids=index_a";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query.index_uids.unwrap()), @r###"[IndexUid("index_a")]"###);
|
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("index_a")])"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "indexUids=1,hé";
|
let params = "indexUids=1,hé";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||||
|
"code": "invalid_index_uid",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let params = "indexUids=hé";
|
let params = "indexUids=hé";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"`hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||||
|
"code": "invalid_index_uid",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -753,38 +777,74 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||||
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: 15, from: Some(12), uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: Some([Succeeded, Enqueued]), index_uids: Some([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
|
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Stars should translate to `None` in the query
|
// Stars should translate to `None` in the query
|
||||||
// Verify value of the default limit
|
// Verify value of the default limit
|
||||||
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||||
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: 20, from: None, uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Stars should also translate to `None` in task deletion/cancelation queries
|
// Stars should also translate to `None` in task deletion/cancelation queries
|
||||||
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Stars in uids not allowed
|
// Star in from not allowed
|
||||||
let params = "uids=*";
|
let params = "uids=*&from=*";
|
||||||
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `from`: could not parse `*` as a positive integer",
|
||||||
|
"code": "invalid_task_from",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_task_from"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// From not allowed in task deletion/cancelation queries
|
// From not allowed in task deletion/cancelation queries
|
||||||
let params = "from=12";
|
let params = "from=12";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"Json deserialize error: unknown field `from`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
|
||||||
|
"code": "bad_request",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Limit not allowed in task deletion/cancelation queries
|
// Limit not allowed in task deletion/cancelation queries
|
||||||
let params = "limit=12";
|
let params = "limit=12";
|
||||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||||
snapshot!(format!("{err}"), @"Json deserialize error: unknown field `limit`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.");
|
snapshot!(meili_snap::json_string!(err), @r###"
|
||||||
|
{
|
||||||
|
"message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
|
||||||
|
"code": "bad_request",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deserialize_task_delete_or_cancel_empty() {
|
||||||
|
{
|
||||||
|
let params = "";
|
||||||
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
|
assert!(query.is_empty());
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let params = "statuses=*";
|
||||||
|
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||||
|
assert!(!query.is_empty());
|
||||||
|
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,8 +5,8 @@ use std::time::Instant;
|
|||||||
|
|
||||||
use deserr::DeserializeFromValue;
|
use deserr::DeserializeFromValue;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::DeserrError;
|
|
||||||
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
||||||
use meilisearch_types::{milli, Document};
|
use meilisearch_types::{milli, Document};
|
||||||
use milli::tokenizer::TokenizerBuilder;
|
use milli::tokenizer::TokenizerBuilder;
|
||||||
@ -15,7 +15,7 @@ use milli::{
|
|||||||
SortError, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET,
|
SortError, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET,
|
||||||
};
|
};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::Serialize;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
@ -30,41 +30,41 @@ pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
|
|||||||
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, Default, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct SearchQuery {
|
pub struct SearchQuery {
|
||||||
#[deserr(error = DeserrError<InvalidSearchQ>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
||||||
pub q: Option<String>,
|
pub q: Option<String>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchOffset>, default = DEFAULT_SEARCH_OFFSET())]
|
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
|
||||||
pub offset: usize,
|
pub offset: usize,
|
||||||
#[deserr(error = DeserrError<InvalidSearchLimit>, default = DEFAULT_SEARCH_LIMIT())]
|
#[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrJsonError<InvalidSearchLimit>)]
|
||||||
pub limit: usize,
|
pub limit: usize,
|
||||||
#[deserr(error = DeserrError<InvalidSearchPage>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchPage>)]
|
||||||
pub page: Option<usize>,
|
pub page: Option<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchHitsPerPage>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchHitsPerPage>)]
|
||||||
pub hits_per_page: Option<usize>,
|
pub hits_per_page: Option<usize>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchAttributesToRetrieve>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToRetrieve>)]
|
||||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchAttributesToCrop>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToCrop>)]
|
||||||
pub attributes_to_crop: Option<Vec<String>>,
|
pub attributes_to_crop: Option<Vec<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
|
||||||
pub crop_length: usize,
|
pub crop_length: usize,
|
||||||
#[deserr(error = DeserrError<InvalidSearchAttributesToHighlight>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToHighlight>)]
|
||||||
pub attributes_to_highlight: Option<HashSet<String>>,
|
pub attributes_to_highlight: Option<HashSet<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchShowMatchesPosition>, default)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchShowMatchesPosition>, default)]
|
||||||
pub show_matches_position: bool,
|
pub show_matches_position: bool,
|
||||||
#[deserr(error = DeserrError<InvalidSearchFilter>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
|
||||||
pub filter: Option<Value>,
|
pub filter: Option<Value>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchSort>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchSort>)]
|
||||||
pub sort: Option<Vec<String>>,
|
pub sort: Option<Vec<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchFacets>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchFacets>)]
|
||||||
pub facets: Option<Vec<String>>,
|
pub facets: Option<Vec<String>>,
|
||||||
#[deserr(error = DeserrError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
||||||
pub highlight_pre_tag: String,
|
pub highlight_pre_tag: String,
|
||||||
#[deserr(error = DeserrError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
||||||
pub highlight_post_tag: String,
|
pub highlight_post_tag: String,
|
||||||
#[deserr(error = DeserrError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
|
||||||
pub crop_marker: String,
|
pub crop_marker: String,
|
||||||
#[deserr(error = DeserrError<InvalidSearchMatchingStrategy>, default)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
|
||||||
pub matching_strategy: MatchingStrategy,
|
pub matching_strategy: MatchingStrategy,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,9 +74,8 @@ impl SearchQuery {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||||
#[deserr(rename_all = camelCase)]
|
#[deserr(rename_all = camelCase)]
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub enum MatchingStrategy {
|
pub enum MatchingStrategy {
|
||||||
/// Remove query words from last to first
|
/// Remove query words from last to first
|
||||||
Last,
|
Last,
|
||||||
|
@ -205,7 +205,7 @@ async fn error_add_api_key_no_header() {
|
|||||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||||
"code": "missing_authorization_header",
|
"code": "missing_authorization_header",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-authorization-header"
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -228,7 +228,7 @@ async fn error_add_api_key_bad_key() {
|
|||||||
"message": "The provided API key is invalid.",
|
"message": "The provided API key is invalid.",
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -248,10 +248,10 @@ async fn error_add_api_key_missing_parameter() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Json deserialize error: missing field `indexes` at ``",
|
"message": "Missing field `indexes`",
|
||||||
"code": "bad_request",
|
"code": "missing_api_key_indexes",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#bad-request"
|
"link": "https://docs.meilisearch.com/errors#missing_api_key_indexes"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -265,10 +265,10 @@ async fn error_add_api_key_missing_parameter() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Json deserialize error: missing field `actions` at ``",
|
"message": "Missing field `actions`",
|
||||||
"code": "bad_request",
|
"code": "missing_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#bad-request"
|
"link": "https://docs.meilisearch.com/errors#missing_api_key_actions"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -279,22 +279,13 @@ async fn error_add_api_key_missing_parameter() {
|
|||||||
"actions": ["documents.add"],
|
"actions": ["documents.add"],
|
||||||
});
|
});
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
meili_snap::snapshot!(code, @"201 Created");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"name": null,
|
"message": "Missing field `expiresAt`",
|
||||||
"description": "Indexing API key",
|
"code": "missing_api_key_expires_at",
|
||||||
"key": "[ignored]",
|
"type": "invalid_request",
|
||||||
"uid": "[ignored]",
|
"link": "https://docs.meilisearch.com/errors#missing_api_key_expires_at"
|
||||||
"actions": [
|
|
||||||
"documents.add"
|
|
||||||
],
|
|
||||||
"indexes": [
|
|
||||||
"products"
|
|
||||||
],
|
|
||||||
"expiresAt": null,
|
|
||||||
"createdAt": "[ignored]",
|
|
||||||
"updatedAt": "[ignored]"
|
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -314,10 +305,10 @@ async fn error_add_api_key_invalid_parameters_description() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "invalid type: Map `{\"name\":\"products\"}`, expected a String at `.description`.",
|
"message": "Invalid value type at `.description`: expected a string, but found an object: `{\"name\":\"products\"}`",
|
||||||
"code": "invalid_api_key_description",
|
"code": "invalid_api_key_description",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-description"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_description"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -337,10 +328,10 @@ async fn error_add_api_key_invalid_parameters_name() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "invalid type: Map `{\"name\":\"products\"}`, expected a String at `.name`.",
|
"message": "Invalid value type at `.name`: expected a string, but found an object: `{\"name\":\"products\"}`",
|
||||||
"code": "invalid_api_key_name",
|
"code": "invalid_api_key_name",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-name"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_name"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -360,10 +351,10 @@ async fn error_add_api_key_invalid_parameters_indexes() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "invalid type: Map `{\"name\":\"products\"}`, expected a Sequence at `.indexes`.",
|
"message": "Invalid value type at `.indexes`: expected an array, but found an object: `{\"name\":\"products\"}`",
|
||||||
"code": "invalid_api_key_indexes",
|
"code": "invalid_api_key_indexes",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-indexes"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -386,10 +377,10 @@ async fn error_add_api_key_invalid_index_uids() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "`invalid index # / \\name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexes[0]`.",
|
"message": "Invalid value at `.indexes[0]`: `invalid index # / \\name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||||
"code": "invalid_api_key_indexes",
|
"code": "invalid_api_key_indexes",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-indexes"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -411,10 +402,10 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "invalid type: Map `{\"name\":\"products\"}`, expected a Sequence at `.actions`.",
|
"message": "Invalid value type at `.actions`: expected an array, but found an object: `{\"name\":\"products\"}`",
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -431,10 +422,10 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Json deserialize error: unknown value `doc.add`, expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete` at `.actions[0]`.",
|
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`",
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -455,10 +446,10 @@ async fn error_add_api_key_invalid_parameters_expires_at() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "invalid type: Map `{\"name\":\"products\"}`, expected a String at `.expiresAt`.",
|
"message": "Invalid value type at `.expiresAt`: expected a string, but found an object: `{\"name\":\"products\"}`",
|
||||||
"code": "invalid_api_key_expires_at",
|
"code": "invalid_api_key_expires_at",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-expires-at"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
@ -478,10 +469,10 @@ async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() {
|
|||||||
|
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "`2010-11-13T00:00:00Z` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.\n at `.expiresAt`.",
|
"message": "Invalid value at `.expiresAt`: `2010-11-13T00:00:00Z` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.\n",
|
||||||
"code": "invalid_api_key_expires_at",
|
"code": "invalid_api_key_expires_at",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-expires-at"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
@ -503,10 +494,10 @@ async fn error_add_api_key_invalid_parameters_uid() {
|
|||||||
|
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "invalid length: expected length 32 for simple format, found 13 at `.uid`.",
|
"message": "Invalid value at `.uid`: invalid length: expected length 32 for simple format, found 13",
|
||||||
"code": "invalid_api_key_uid",
|
"code": "invalid_api_key_uid",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-uid"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_uid"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
@ -551,7 +542,7 @@ async fn error_add_api_key_parameters_uid_already_exist() {
|
|||||||
"message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.",
|
"message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.",
|
||||||
"code": "api_key_already_exists",
|
"code": "api_key_already_exists",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#api-key-already-exists"
|
"link": "https://docs.meilisearch.com/errors#api_key_already_exists"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"409 Conflict");
|
meili_snap::snapshot!(code, @"409 Conflict");
|
||||||
@ -697,7 +688,7 @@ async fn error_get_api_key_no_header() {
|
|||||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||||
"code": "missing_authorization_header",
|
"code": "missing_authorization_header",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-authorization-header"
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -716,7 +707,7 @@ async fn error_get_api_key_bad_key() {
|
|||||||
"message": "The provided API key is invalid.",
|
"message": "The provided API key is invalid.",
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"403 Forbidden");
|
meili_snap::snapshot!(code, @"403 Forbidden");
|
||||||
@ -735,7 +726,7 @@ async fn error_get_api_key_not_found() {
|
|||||||
"message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.",
|
"message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.",
|
||||||
"code": "api_key_not_found",
|
"code": "api_key_not_found",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#api-key-not-found"
|
"link": "https://docs.meilisearch.com/errors#api_key_not_found"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"404 Not Found");
|
meili_snap::snapshot!(code, @"404 Not Found");
|
||||||
@ -799,7 +790,7 @@ async fn list_api_keys() {
|
|||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"201 Created");
|
meili_snap::snapshot!(code, @"201 Created");
|
||||||
|
|
||||||
let (response, code) = server.list_api_keys().await;
|
let (response, code) = server.list_api_keys("").await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".results[].createdAt" => "[ignored]", ".results[].updatedAt" => "[ignored]", ".results[].uid" => "[ignored]", ".results[].key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".results[].createdAt" => "[ignored]", ".results[].updatedAt" => "[ignored]", ".results[].uid" => "[ignored]", ".results[].key" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"results": [
|
"results": [
|
||||||
@ -873,13 +864,13 @@ async fn list_api_keys() {
|
|||||||
async fn error_list_api_keys_no_header() {
|
async fn error_list_api_keys_no_header() {
|
||||||
let server = Server::new_auth().await;
|
let server = Server::new_auth().await;
|
||||||
|
|
||||||
let (response, code) = server.list_api_keys().await;
|
let (response, code) = server.list_api_keys("").await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||||
"code": "missing_authorization_header",
|
"code": "missing_authorization_header",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-authorization-header"
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -890,13 +881,13 @@ async fn error_list_api_keys_bad_key() {
|
|||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_api_key("d4000bd7225f77d1eb22cc706ed36772bbc36767c016a27f76def7537b68600d");
|
server.use_api_key("d4000bd7225f77d1eb22cc706ed36772bbc36767c016a27f76def7537b68600d");
|
||||||
|
|
||||||
let (response, code) = server.list_api_keys().await;
|
let (response, code) = server.list_api_keys("").await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "The provided API key is invalid.",
|
"message": "The provided API key is invalid.",
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"403 Forbidden");
|
meili_snap::snapshot!(code, @"403 Forbidden");
|
||||||
@ -973,7 +964,7 @@ async fn delete_api_key() {
|
|||||||
"message": "[ignored]",
|
"message": "[ignored]",
|
||||||
"code": "api_key_not_found",
|
"code": "api_key_not_found",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#api-key-not-found"
|
"link": "https://docs.meilisearch.com/errors#api_key_not_found"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"404 Not Found");
|
meili_snap::snapshot!(code, @"404 Not Found");
|
||||||
@ -992,7 +983,7 @@ async fn error_delete_api_key_no_header() {
|
|||||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||||
"code": "missing_authorization_header",
|
"code": "missing_authorization_header",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-authorization-header"
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -1011,7 +1002,7 @@ async fn error_delete_api_key_bad_key() {
|
|||||||
"message": "The provided API key is invalid.",
|
"message": "The provided API key is invalid.",
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"403 Forbidden");
|
meili_snap::snapshot!(code, @"403 Forbidden");
|
||||||
@ -1030,7 +1021,7 @@ async fn error_delete_api_key_not_found() {
|
|||||||
"message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.",
|
"message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.",
|
||||||
"code": "api_key_not_found",
|
"code": "api_key_not_found",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#api-key-not-found"
|
"link": "https://docs.meilisearch.com/errors#api_key_not_found"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"404 Not Found");
|
meili_snap::snapshot!(code, @"404 Not Found");
|
||||||
@ -1089,14 +1080,14 @@ async fn patch_api_key_description() {
|
|||||||
|
|
||||||
let uid = response["uid"].as_str().unwrap();
|
let uid = response["uid"].as_str().unwrap();
|
||||||
|
|
||||||
// Add a description
|
// Add a description and a name
|
||||||
let content = json!({ "description": "Indexing API key" });
|
let content = json!({ "description": "Indexing API key", "name": "bob" });
|
||||||
|
|
||||||
thread::sleep(time::Duration::new(1, 0));
|
thread::sleep(time::Duration::new(1, 0));
|
||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"name": null,
|
"name": "bob",
|
||||||
"description": "Indexing API key",
|
"description": "Indexing API key",
|
||||||
"key": "[ignored]",
|
"key": "[ignored]",
|
||||||
"uid": "[ignored]",
|
"uid": "[ignored]",
|
||||||
@ -1128,7 +1119,7 @@ async fn patch_api_key_description() {
|
|||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"name": null,
|
"name": "bob",
|
||||||
"description": "Product API key",
|
"description": "Product API key",
|
||||||
"key": "[ignored]",
|
"key": "[ignored]",
|
||||||
"uid": "[ignored]",
|
"uid": "[ignored]",
|
||||||
@ -1160,7 +1151,7 @@ async fn patch_api_key_description() {
|
|||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"name": null,
|
"name": "bob",
|
||||||
"description": null,
|
"description": null,
|
||||||
"key": "[ignored]",
|
"key": "[ignored]",
|
||||||
"uid": "[ignored]",
|
"uid": "[ignored]",
|
||||||
@ -1242,15 +1233,15 @@ async fn patch_api_key_name() {
|
|||||||
let created_at = response["createdAt"].as_str().unwrap();
|
let created_at = response["createdAt"].as_str().unwrap();
|
||||||
let updated_at = response["updatedAt"].as_str().unwrap();
|
let updated_at = response["updatedAt"].as_str().unwrap();
|
||||||
|
|
||||||
// Add a name
|
// Add a name and description
|
||||||
let content = json!({ "name": "Indexing API key" });
|
let content = json!({ "name": "Indexing API key", "description": "The doggoscription" });
|
||||||
|
|
||||||
thread::sleep(time::Duration::new(1, 0));
|
thread::sleep(time::Duration::new(1, 0));
|
||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"name": "Indexing API key",
|
"name": "Indexing API key",
|
||||||
"description": null,
|
"description": "The doggoscription",
|
||||||
"key": "[ignored]",
|
"key": "[ignored]",
|
||||||
"uid": "[ignored]",
|
"uid": "[ignored]",
|
||||||
"actions": [
|
"actions": [
|
||||||
@ -1285,7 +1276,7 @@ async fn patch_api_key_name() {
|
|||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"name": "Product API key",
|
"name": "Product API key",
|
||||||
"description": null,
|
"description": "The doggoscription",
|
||||||
"key": "[ignored]",
|
"key": "[ignored]",
|
||||||
"uid": "[ignored]",
|
"uid": "[ignored]",
|
||||||
"actions": [
|
"actions": [
|
||||||
@ -1311,13 +1302,13 @@ async fn patch_api_key_name() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
|
||||||
// Remove the name
|
// Remove the name
|
||||||
let content = json!({ "name": serde_json::Value::Null });
|
let content = json!({ "name": null });
|
||||||
|
|
||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"name": null,
|
"name": null,
|
||||||
"description": null,
|
"description": "The doggoscription",
|
||||||
"key": "[ignored]",
|
"key": "[ignored]",
|
||||||
"uid": "[ignored]",
|
"uid": "[ignored]",
|
||||||
"actions": [
|
"actions": [
|
||||||
@ -1403,10 +1394,10 @@ async fn error_patch_api_key_indexes() {
|
|||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Json deserialize error: unknown field `indexes`, expected one of `description`, `name` at ``.",
|
"message": "Immutable field `indexes`: expected one of `description`, `name`",
|
||||||
"code": "immutable_api_key_indexes",
|
"code": "immutable_api_key_indexes",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#immutable-api-key-indexes"
|
"link": "https://docs.meilisearch.com/errors#immutable_api_key_indexes"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
@ -1480,10 +1471,10 @@ async fn error_patch_api_key_actions() {
|
|||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Json deserialize error: unknown field `actions`, expected one of `description`, `name` at ``.",
|
"message": "Immutable field `actions`: expected one of `description`, `name`",
|
||||||
"code": "immutable_api_key_actions",
|
"code": "immutable_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#immutable-api-key-actions"
|
"link": "https://docs.meilisearch.com/errors#immutable_api_key_actions"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
@ -1549,10 +1540,10 @@ async fn error_patch_api_key_expiration_date() {
|
|||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Json deserialize error: unknown field `expiresAt`, expected one of `description`, `name` at ``.",
|
"message": "Immutable field `expiresAt`: expected one of `description`, `name`",
|
||||||
"code": "immutable_api_key_expires_at",
|
"code": "immutable_api_key_expires_at",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#immutable-api-key-expires-at"
|
"link": "https://docs.meilisearch.com/errors#immutable_api_key_expires_at"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
@ -1574,7 +1565,7 @@ async fn error_patch_api_key_no_header() {
|
|||||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||||
"code": "missing_authorization_header",
|
"code": "missing_authorization_header",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-authorization-header"
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -1597,7 +1588,7 @@ async fn error_patch_api_key_bad_key() {
|
|||||||
"message": "The provided API key is invalid.",
|
"message": "The provided API key is invalid.",
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"403 Forbidden");
|
meili_snap::snapshot!(code, @"403 Forbidden");
|
||||||
@ -1620,7 +1611,7 @@ async fn error_patch_api_key_not_found() {
|
|||||||
"message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.",
|
"message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.",
|
||||||
"code": "api_key_not_found",
|
"code": "api_key_not_found",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#api-key-not-found"
|
"link": "https://docs.meilisearch.com/errors#api_key_not_found"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"404 Not Found");
|
meili_snap::snapshot!(code, @"404 Not Found");
|
||||||
@ -1670,10 +1661,10 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
|
|||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "invalid type: Integer `13`, expected a String at `.description`.",
|
"message": "Invalid value type at `.description`: expected a string, but found a positive integer: `13`",
|
||||||
"code": "invalid_api_key_description",
|
"code": "invalid_api_key_description",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-description"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_description"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
@ -1686,10 +1677,10 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
|
|||||||
let (response, code) = server.patch_api_key(&uid, content).await;
|
let (response, code) = server.patch_api_key(&uid, content).await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "invalid type: Integer `13`, expected a String at `.name`.",
|
"message": "Invalid value type at `.name`: expected a string, but found a positive integer: `13`",
|
||||||
"code": "invalid_api_key_name",
|
"code": "invalid_api_key_name",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key-name"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_name"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
@ -1705,7 +1696,7 @@ async fn error_access_api_key_routes_no_master_key_set() {
|
|||||||
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
||||||
"code": "missing_master_key",
|
"code": "missing_master_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-master-key"
|
"link": "https://docs.meilisearch.com/errors#missing_master_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -1716,7 +1707,7 @@ async fn error_access_api_key_routes_no_master_key_set() {
|
|||||||
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
||||||
"code": "missing_master_key",
|
"code": "missing_master_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-master-key"
|
"link": "https://docs.meilisearch.com/errors#missing_master_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -1727,18 +1718,18 @@ async fn error_access_api_key_routes_no_master_key_set() {
|
|||||||
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
||||||
"code": "missing_master_key",
|
"code": "missing_master_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-master-key"
|
"link": "https://docs.meilisearch.com/errors#missing_master_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
|
|
||||||
let (response, code) = server.list_api_keys().await;
|
let (response, code) = server.list_api_keys("").await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
||||||
"code": "missing_master_key",
|
"code": "missing_master_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-master-key"
|
"link": "https://docs.meilisearch.com/errors#missing_master_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -1751,7 +1742,7 @@ async fn error_access_api_key_routes_no_master_key_set() {
|
|||||||
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
||||||
"code": "missing_master_key",
|
"code": "missing_master_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-master-key"
|
"link": "https://docs.meilisearch.com/errors#missing_master_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -1762,7 +1753,7 @@ async fn error_access_api_key_routes_no_master_key_set() {
|
|||||||
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
||||||
"code": "missing_master_key",
|
"code": "missing_master_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-master-key"
|
"link": "https://docs.meilisearch.com/errors#missing_master_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
@ -1773,18 +1764,18 @@ async fn error_access_api_key_routes_no_master_key_set() {
|
|||||||
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
||||||
"code": "missing_master_key",
|
"code": "missing_master_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-master-key"
|
"link": "https://docs.meilisearch.com/errors#missing_master_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
|
|
||||||
let (response, code) = server.list_api_keys().await;
|
let (response, code) = server.list_api_keys("").await;
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||||
{
|
{
|
||||||
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
|
||||||
"code": "missing_master_key",
|
"code": "missing_master_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#missing-master-key"
|
"link": "https://docs.meilisearch.com/errors#missing_master_key"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
meili_snap::snapshot!(code, @"401 Unauthorized");
|
meili_snap::snapshot!(code, @"401 Unauthorized");
|
||||||
|
@ -73,7 +73,7 @@ static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
|||||||
json!({"message": "The provided API key is invalid.",
|
json!({"message": "The provided API key is invalid.",
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid-api-key"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -520,7 +520,7 @@ async fn error_creating_index_without_action() {
|
|||||||
"message": "Index `test` not found.",
|
"message": "Index `test` not found.",
|
||||||
"code": "index_not_found",
|
"code": "index_not_found",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#index-not-found"
|
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||||
});
|
});
|
||||||
|
|
||||||
// try to create a index via add documents route
|
// try to create a index via add documents route
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user