3329: Refactor error handling from deserr r=irevoire a=loiclec

Close https://github.com/meilisearch/meilisearch/issues/3318
Close https://github.com/meilisearch/meilisearch/issues/3289

[TODO]

Co-authored-by: Loïc Lecrenier <loic.lecrenier@me.com>
Co-authored-by: Tamo <tamo@meilisearch.com>
This commit is contained in:
bors[bot] 2023-01-11 18:15:32 +00:00 committed by GitHub
commit 5c1a7c3b9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 2185 additions and 2320 deletions

122
Cargo.lock generated
View File

@ -46,7 +46,7 @@ dependencies = [
"actix-tls", "actix-tls",
"actix-utils", "actix-utils",
"ahash", "ahash",
"base64", "base64 0.13.1",
"bitflags", "bitflags",
"brotli", "brotli",
"bytes", "bytes",
@ -331,9 +331,9 @@ dependencies = [
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.60" version = "0.1.61"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d1d8ab452a3936018a687b20e6f7cf5363d713b732b8884001317b0e48aa3" checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282"
dependencies = [ dependencies = [
"proc-macro2 1.0.49", "proc-macro2 1.0.49",
"quote 1.0.23", "quote 1.0.23",
@ -387,6 +387,12 @@ version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
[[package]] [[package]]
name = "base64ct" name = "base64ct"
version = "1.5.3" version = "1.5.3"
@ -1018,9 +1024,9 @@ dependencies = [
[[package]] [[package]]
name = "deserr" name = "deserr"
version = "0.1.2" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb73133def0ebeb6f2e911a8ea3495cde53b00a5337dddc49bdb4b6c450ac8c7" checksum = "86290491a2b5c21a1a5083da8dae831006761258fabd5617309c3eebc5f89468"
dependencies = [ dependencies = [
"deserr-internal", "deserr-internal",
"serde-cs", "serde-cs",
@ -1029,9 +1035,9 @@ dependencies = [
[[package]] [[package]]
name = "deserr-internal" name = "deserr-internal"
version = "0.1.2" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f290f1f52fbf7d0afe91e6f71f3c831ae5b223a55cc396e819c5748ba73a7bfa" checksum = "7131de1c27581bc376a22166c9f570be91b76cb096be2f6aecf224c27bf7c49a"
dependencies = [ dependencies = [
"convert_case 0.5.0", "convert_case 0.5.0",
"proc-macro2 1.0.49", "proc-macro2 1.0.49",
@ -1309,8 +1315,8 @@ dependencies = [
[[package]] [[package]]
name = "filter-parser" name = "filter-parser"
version = "0.38.0" version = "0.39.0"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.38.0#c3f4835e8e102586bd6d5eb1e55c4bba5e92f994" source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5"
dependencies = [ dependencies = [
"nom", "nom",
"nom_locate", "nom_locate",
@ -1328,8 +1334,8 @@ dependencies = [
[[package]] [[package]]
name = "flatten-serde-json" name = "flatten-serde-json"
version = "0.38.0" version = "0.39.0"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.38.0#c3f4835e8e102586bd6d5eb1e55c4bba5e92f994" source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5"
dependencies = [ dependencies = [
"serde_json", "serde_json",
] ]
@ -1513,9 +1519,9 @@ dependencies = [
[[package]] [[package]]
name = "glob" name = "glob"
version = "0.3.0" version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]] [[package]]
name = "grenad" name = "grenad"
@ -1786,9 +1792,9 @@ dependencies = [
[[package]] [[package]]
name = "insta" name = "insta"
version = "1.24.1" version = "1.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb5686bd8e9239eabe90bb30a0c341bffd6fdc177fb556708f2cb792bf00352d" checksum = "f6f0f08b46e4379744de2ab67aa8f7de3ffd1da3e275adc41fcc82053ede46ff"
dependencies = [ dependencies = [
"console", "console",
"lazy_static", "lazy_static",
@ -1821,9 +1827,9 @@ dependencies = [
[[package]] [[package]]
name = "ipnet" name = "ipnet"
version = "2.7.0" version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11b0d96e660696543b251e58030cf9787df56da39dab19ad60eae7353040917e" checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146"
[[package]] [[package]]
name = "is-terminal" name = "is-terminal"
@ -1893,8 +1899,8 @@ dependencies = [
[[package]] [[package]]
name = "json-depth-checker" name = "json-depth-checker"
version = "0.38.0" version = "0.39.0"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.38.0#c3f4835e8e102586bd6d5eb1e55c4bba5e92f994" source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5"
dependencies = [ dependencies = [
"serde_json", "serde_json",
] ]
@ -1905,7 +1911,7 @@ version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09f4f04699947111ec1733e71778d763555737579e44b85844cae8e1940a1828" checksum = "09f4f04699947111ec1733e71778d763555737579e44b85844cae8e1940a1828"
dependencies = [ dependencies = [
"base64", "base64 0.13.1",
"pem", "pem",
"ring", "ring",
"serde", "serde",
@ -1942,9 +1948,9 @@ checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
[[package]] [[package]]
name = "libgit2-sys" name = "libgit2-sys"
version = "0.14.0+1.5.0" version = "0.14.1+1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47a00859c70c8a4f7218e6d1cc32875c4b55f6799445b842b0d8ed5e4c3d959b" checksum = "4a07fb2692bc3593bda59de45a502bb3071659f2c515e28c71e728306b038e17"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -1960,9 +1966,9 @@ checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
[[package]] [[package]]
name = "libmimalloc-sys" name = "libmimalloc-sys"
version = "0.1.28" version = "0.1.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04d1c67deb83e6b75fa4fe3309e09cfeade12e7721d95322af500d3814ea60c9" checksum = "dd8c7cbf8b89019683667e347572e6d55a7df7ea36b0c4ce69961b0cde67b174"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -2371,7 +2377,7 @@ dependencies = [
name = "meilisearch-auth" name = "meilisearch-auth"
version = "1.0.0" version = "1.0.0"
dependencies = [ dependencies = [
"base64", "base64 0.13.1",
"enum-iterator", "enum-iterator",
"hmac", "hmac",
"meilisearch-types", "meilisearch-types",
@ -2442,8 +2448,8 @@ dependencies = [
[[package]] [[package]]
name = "milli" name = "milli"
version = "0.38.0" version = "0.39.0"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.38.0#c3f4835e8e102586bd6d5eb1e55c4bba5e92f994" source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5"
dependencies = [ dependencies = [
"bimap", "bimap",
"bincode", "bincode",
@ -2453,6 +2459,7 @@ dependencies = [
"concat-arrays", "concat-arrays",
"crossbeam-channel", "crossbeam-channel",
"csv", "csv",
"deserr",
"either", "either",
"filter-parser", "filter-parser",
"flatten-serde-json", "flatten-serde-json",
@ -2487,9 +2494,9 @@ dependencies = [
[[package]] [[package]]
name = "mimalloc" name = "mimalloc"
version = "0.1.32" version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b2374e2999959a7b583e1811a1ddbf1d3a4b9496eceb9746f1192a59d871eca" checksum = "9dcb174b18635f7561a0c6c9fc2ce57218ac7523cf72c50af80e2d79ab8f3ba1"
dependencies = [ dependencies = [
"libmimalloc-sys", "libmimalloc-sys",
] ]
@ -2615,9 +2622,9 @@ dependencies = [
[[package]] [[package]]
name = "object" name = "object"
version = "0.30.1" version = "0.30.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d864c91689fdc196779b98dba0aceac6118594c2df6ee5d943eb6a8df4d107a" checksum = "2b8c786513eb403643f2a88c244c2aaa270ef2153f55094587d0c48a3cf22a83"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
@ -2748,7 +2755,7 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03c64931a1a212348ec4f3b4362585eca7159d0d09cbdf4a7f74f02173596fd4" checksum = "03c64931a1a212348ec4f3b4362585eca7159d0d09cbdf4a7f74f02173596fd4"
dependencies = [ dependencies = [
"base64", "base64 0.13.1",
] ]
[[package]] [[package]]
@ -2767,9 +2774,9 @@ dependencies = [
[[package]] [[package]]
name = "pest" name = "pest"
version = "2.5.2" version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f6e86fb9e7026527a0d46bc308b841d73170ef8f443e1807f6ef88526a816d4" checksum = "4257b4a04d91f7e9e6290be5d3da4804dd5784fafde3a497d73eb2b4a158c30a"
dependencies = [ dependencies = [
"thiserror", "thiserror",
"ucd-trie", "ucd-trie",
@ -2777,9 +2784,9 @@ dependencies = [
[[package]] [[package]]
name = "pest_derive" name = "pest_derive"
version = "2.5.2" version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96504449aa860c8dcde14f9fba5c58dc6658688ca1fe363589d6327b8662c603" checksum = "241cda393b0cdd65e62e07e12454f1f25d57017dcc514b1514cd3c4645e3a0a6"
dependencies = [ dependencies = [
"pest", "pest",
"pest_generator", "pest_generator",
@ -2787,9 +2794,9 @@ dependencies = [
[[package]] [[package]]
name = "pest_generator" name = "pest_generator"
version = "2.5.2" version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "798e0220d1111ae63d66cb66a5dcb3fc2d986d520b98e49e1852bfdb11d7c5e7" checksum = "46b53634d8c8196302953c74d5352f33d0c512a9499bd2ce468fc9f4128fa27c"
dependencies = [ dependencies = [
"pest", "pest",
"pest_meta", "pest_meta",
@ -2800,13 +2807,13 @@ dependencies = [
[[package]] [[package]]
name = "pest_meta" name = "pest_meta"
version = "2.5.2" version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "984298b75898e30a843e278a9f2452c31e349a073a0ce6fd950a12a74464e065" checksum = "0ef4f1332a8d4678b41966bb4cc1d0676880e84183a1ecc3f4b69f03e99c7a51"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"pest", "pest",
"sha1", "sha2",
] ]
[[package]] [[package]]
@ -3108,9 +3115,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.7.0" version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -3144,7 +3151,7 @@ version = "0.11.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68cc60575865c7831548863cc02356512e3f1dc2f3f82cb837d7fc4cc8f3c97c" checksum = "68cc60575865c7831548863cc02356512e3f1dc2f3f82cb837d7fc4cc8f3c97c"
dependencies = [ dependencies = [
"base64", "base64 0.13.1",
"bytes", "bytes",
"encoding_rs", "encoding_rs",
"futures-core", "futures-core",
@ -3265,11 +3272,11 @@ dependencies = [
[[package]] [[package]]
name = "rustls-pemfile" name = "rustls-pemfile"
version = "1.0.1" version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0864aeff53f8c05aa08d86e5ef839d3dfcf07aeba2db32f12db0ef716e87bd55" checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b"
dependencies = [ dependencies = [
"base64", "base64 0.21.0",
] ]
[[package]] [[package]]
@ -3323,9 +3330,9 @@ dependencies = [
[[package]] [[package]]
name = "segment" name = "segment"
version = "0.2.1" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24fc91c898e0487ff3e471d0849bbaf7d38a00ff5e3531009d386b0bab9b6b12" checksum = "2bb93f3f738322ce8f33c4e80c251fb1560ca81f3a241355271fcb912eeb48e3"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"reqwest", "reqwest",
@ -3607,9 +3614,9 @@ dependencies = [
[[package]] [[package]]
name = "sysinfo" name = "sysinfo"
version = "0.26.8" version = "0.26.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29ddf41e393a9133c81d5f0974195366bd57082deac6e0eb02ed39b8341c2bb6" checksum = "5c18a6156d1f27a9592ee18c1a846ca8dd5c258b7179fc193ae87c74ebb666f5"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"core-foundation-sys", "core-foundation-sys",
@ -3733,9 +3740,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.24.0" version = "1.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7125661431c26622a80ca5051a2f936c9a678318e0351007b0cc313143024e5c" checksum = "1d9f76183f91ecfb55e1d7d5602bd1d979e38a3a522fe900241cf195624d67ae"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"bytes", "bytes",
@ -3836,9 +3843,9 @@ dependencies = [
[[package]] [[package]]
name = "try-lock" name = "try-lock"
version = "0.2.3" version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
[[package]] [[package]]
name = "typenum" name = "typenum"
@ -4310,10 +4317,11 @@ dependencies = [
[[package]] [[package]]
name = "zstd-sys" name = "zstd-sys"
version = "2.0.4+zstd.1.5.2" version = "2.0.5+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fa202f2ef00074143e219d15b62ffc317d17cc33909feac471c044087cad7b0" checksum = "edc50ffce891ad571e9f9afe5039c4837bede781ac4bb13052ed7ae695518596"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
"pkg-config",
] ]

View File

@ -249,17 +249,17 @@ pub(crate) mod test {
pub fn create_test_settings() -> Settings<Checked> { pub fn create_test_settings() -> Settings<Checked> {
let settings = Settings { let settings = Settings {
displayed_attributes: Setting::Set(vec![S("race"), S("name")]).into(), displayed_attributes: Setting::Set(vec![S("race"), S("name")]),
searchable_attributes: Setting::Set(vec![S("name"), S("race")]).into(), searchable_attributes: Setting::Set(vec![S("name"), S("race")]),
filterable_attributes: Setting::Set(btreeset! { S("race"), S("age") }).into(), filterable_attributes: Setting::Set(btreeset! { S("race"), S("age") }),
sortable_attributes: Setting::Set(btreeset! { S("age") }).into(), sortable_attributes: Setting::Set(btreeset! { S("age") }),
ranking_rules: Setting::NotSet.into(), ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet.into(), stop_words: Setting::NotSet,
synonyms: Setting::NotSet.into(), synonyms: Setting::NotSet,
distinct_attribute: Setting::NotSet.into(), distinct_attribute: Setting::NotSet,
typo_tolerance: Setting::NotSet.into(), typo_tolerance: Setting::NotSet,
faceting: Setting::NotSet.into(), faceting: Setting::NotSet,
pagination: Setting::NotSet.into(), pagination: Setting::NotSet,
_kind: std::marker::PhantomData, _kind: std::marker::PhantomData,
}; };
settings.check() settings.check()

View File

@ -1,3 +1,5 @@
use std::str::FromStr;
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5}; use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
use crate::reader::{v5, v6, Document, UpdateFile}; use crate::reader::{v5, v6, Document, UpdateFile};
use crate::Result; use crate::Result;
@ -254,14 +256,14 @@ impl<T> From<v5::Setting<T>> for v6::Setting<T> {
impl From<v5::ResponseError> for v6::ResponseError { impl From<v5::ResponseError> for v6::ResponseError {
fn from(error: v5::ResponseError) -> Self { fn from(error: v5::ResponseError) -> Self {
let code = match error.error_code.as_ref() { let code = match error.error_code.as_ref() {
"index_creation_failed" => v6::Code::CreateIndex, "index_creation_failed" => v6::Code::IndexCreationFailed,
"index_already_exists" => v6::Code::IndexAlreadyExists, "index_already_exists" => v6::Code::IndexAlreadyExists,
"index_not_found" => v6::Code::IndexNotFound, "index_not_found" => v6::Code::IndexNotFound,
"invalid_index_uid" => v6::Code::InvalidIndexUid, "invalid_index_uid" => v6::Code::InvalidIndexUid,
"invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo, "invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo,
"invalid_state" => v6::Code::InvalidState, "invalid_state" => v6::Code::InvalidState,
"primary_key_inference_failed" => v6::Code::NoPrimaryKeyCandidateFound, "primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound,
"index_primary_key_already_exists" => v6::Code::PrimaryKeyAlreadyPresent, "index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists,
"max_fields_limit_exceeded" => v6::Code::MaxFieldsLimitExceeded, "max_fields_limit_exceeded" => v6::Code::MaxFieldsLimitExceeded,
"missing_document_id" => v6::Code::MissingDocumentId, "missing_document_id" => v6::Code::MissingDocumentId,
"invalid_document_id" => v6::Code::InvalidDocumentId, "invalid_document_id" => v6::Code::InvalidDocumentId,
@ -274,16 +276,16 @@ impl From<v5::ResponseError> for v6::ResponseError {
"internal" => v6::Code::Internal, "internal" => v6::Code::Internal,
"invalid_geo_field" => v6::Code::InvalidDocumentGeoField, "invalid_geo_field" => v6::Code::InvalidDocumentGeoField,
"invalid_ranking_rule" => v6::Code::InvalidSettingsRankingRules, "invalid_ranking_rule" => v6::Code::InvalidSettingsRankingRules,
"invalid_store_file" => v6::Code::InvalidStore, "invalid_store_file" => v6::Code::InvalidStoreFile,
"invalid_api_key" => v6::Code::InvalidToken, "invalid_api_key" => v6::Code::InvalidApiKey,
"missing_authorization_header" => v6::Code::MissingAuthorizationHeader, "missing_authorization_header" => v6::Code::MissingAuthorizationHeader,
"no_space_left_on_device" => v6::Code::NoSpaceLeftOnDevice, "no_space_left_on_device" => v6::Code::NoSpaceLeftOnDevice,
"dump_not_found" => v6::Code::DumpNotFound, "dump_not_found" => v6::Code::DumpNotFound,
"task_not_found" => v6::Code::TaskNotFound, "task_not_found" => v6::Code::TaskNotFound,
"payload_too_large" => v6::Code::PayloadTooLarge, "payload_too_large" => v6::Code::PayloadTooLarge,
"unretrievable_document" => v6::Code::DocumentNotFound, "unretrievable_document" => v6::Code::UnretrievableDocument,
"unsupported_media_type" => v6::Code::UnsupportedMediaType, "unsupported_media_type" => v6::Code::UnsupportedMediaType,
"dump_already_processing" => v6::Code::DumpAlreadyInProgress, "dump_already_processing" => v6::Code::DumpAlreadyProcessing,
"dump_process_failed" => v6::Code::DumpProcessFailed, "dump_process_failed" => v6::Code::DumpProcessFailed,
"invalid_content_type" => v6::Code::InvalidContentType, "invalid_content_type" => v6::Code::InvalidContentType,
"missing_content_type" => v6::Code::MissingContentType, "missing_content_type" => v6::Code::MissingContentType,
@ -315,7 +317,26 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
searchable_attributes: settings.searchable_attributes.into(), searchable_attributes: settings.searchable_attributes.into(),
filterable_attributes: settings.filterable_attributes.into(), filterable_attributes: settings.filterable_attributes.into(),
sortable_attributes: settings.sortable_attributes.into(), sortable_attributes: settings.sortable_attributes.into(),
ranking_rules: settings.ranking_rules.into(), ranking_rules: {
match settings.ranking_rules {
v5::settings::Setting::Set(ranking_rules) => {
let mut new_ranking_rules = vec![];
for rule in ranking_rules {
match v6::RankingRuleView::from_str(&rule) {
Ok(new_rule) => {
new_ranking_rules.push(new_rule);
}
Err(_) => {
log::warn!("Error while importing settings. The ranking rule `{rule}` does not exist anymore.")
}
}
}
v6::Setting::Set(new_ranking_rules)
}
v5::settings::Setting::Reset => v6::Setting::Reset,
v5::settings::Setting::NotSet => v6::Setting::NotSet,
}
},
stop_words: settings.stop_words.into(), stop_words: settings.stop_words.into(),
synonyms: settings.synonyms.into(), synonyms: settings.synonyms.into(),
distinct_attribute: settings.distinct_attribute.into(), distinct_attribute: settings.distinct_attribute.into(),

View File

@ -26,7 +26,7 @@ pub type Kind = crate::KindDump;
pub type Details = meilisearch_types::tasks::Details; pub type Details = meilisearch_types::tasks::Details;
// everything related to the settings // everything related to the settings
pub type Setting<T> = meilisearch_types::settings::Setting<T>; pub type Setting<T> = meilisearch_types::milli::update::Setting<T>;
pub type TypoTolerance = meilisearch_types::settings::TypoSettings; pub type TypoTolerance = meilisearch_types::settings::TypoSettings;
pub type MinWordSizeForTypos = meilisearch_types::settings::MinWordSizeTyposSetting; pub type MinWordSizeForTypos = meilisearch_types::settings::MinWordSizeTyposSetting;
pub type FacetingSettings = meilisearch_types::settings::FacetingSettings; pub type FacetingSettings = meilisearch_types::settings::FacetingSettings;
@ -40,6 +40,7 @@ pub type IndexUid = meilisearch_types::index_uid::IndexUid;
// everything related to the errors // everything related to the errors
pub type ResponseError = meilisearch_types::error::ResponseError; pub type ResponseError = meilisearch_types::error::ResponseError;
pub type Code = meilisearch_types::error::Code; pub type Code = meilisearch_types::error::Code;
pub type RankingRuleView = meilisearch_types::settings::RankingRuleView;
pub struct V6Reader { pub struct V6Reader {
dump: TempDir, dump: TempDir,

View File

@ -139,8 +139,8 @@ impl ErrorCode for Error {
match self { match self {
Error::IndexNotFound(_) => Code::IndexNotFound, Error::IndexNotFound(_) => Code::IndexNotFound,
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists, Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
Error::SwapDuplicateIndexesFound(_) => Code::InvalidDuplicateIndexesFound, Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
Error::SwapDuplicateIndexFound(_) => Code::InvalidDuplicateIndexesFound, Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
Error::SwapIndexNotFound(_) => Code::InvalidSwapIndexes, Error::SwapIndexNotFound(_) => Code::InvalidSwapIndexes,
Error::SwapIndexesNotFound(_) => Code::InvalidSwapIndexes, Error::SwapIndexesNotFound(_) => Code::InvalidSwapIndexes,
Error::InvalidTaskDate { field, .. } => (*field).into(), Error::InvalidTaskDate { field, .. } => (*field).into(),
@ -150,8 +150,8 @@ impl ErrorCode for Error {
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledBy, Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledBy,
Error::InvalidIndexUid { .. } => Code::InvalidIndexUid, Error::InvalidIndexUid { .. } => Code::InvalidIndexUid,
Error::TaskNotFound(_) => Code::TaskNotFound, Error::TaskNotFound(_) => Code::TaskNotFound,
Error::TaskDeletionWithEmptyQuery => Code::TaskDeletionWithEmptyQuery, Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters,
Error::TaskCancelationWithEmptyQuery => Code::TaskCancelationWithEmptyQuery, Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters,
Error::Dump(e) => e.error_code(), Error::Dump(e) => e.error_code(),
Error::Milli(e) => e.error_code(), Error::Milli(e) => e.error_code(),
Error::ProcessBatchPanicked => Code::Internal, Error::ProcessBatchPanicked => Code::Internal,

View File

@ -1,7 +1,7 @@
use std::error::Error; use std::error::Error;
use meilisearch_types::error::{Code, ErrorCode}; use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::{internal_error, keys}; use meilisearch_types::internal_error;
pub type Result<T> = std::result::Result<T, AuthControllerError>; pub type Result<T> = std::result::Result<T, AuthControllerError>;
@ -11,8 +11,6 @@ pub enum AuthControllerError {
ApiKeyNotFound(String), ApiKeyNotFound(String),
#[error("`uid` field value `{0}` is already an existing API key.")] #[error("`uid` field value `{0}` is already an existing API key.")]
ApiKeyAlreadyExists(String), ApiKeyAlreadyExists(String),
#[error(transparent)]
ApiKey(#[from] keys::Error),
#[error("Internal error: {0}")] #[error("Internal error: {0}")]
Internal(Box<dyn Error + Send + Sync + 'static>), Internal(Box<dyn Error + Send + Sync + 'static>),
} }
@ -27,7 +25,6 @@ internal_error!(
impl ErrorCode for AuthControllerError { impl ErrorCode for AuthControllerError {
fn error_code(&self) -> Code { fn error_code(&self) -> Code {
match self { match self {
Self::ApiKey(e) => e.error_code(),
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound, Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists, Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
Self::Internal(_) => Code::Internal, Self::Internal(_) => Code::Internal,

View File

@ -8,10 +8,9 @@ use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use error::{AuthControllerError, Result}; use error::{AuthControllerError, Result};
use meilisearch_types::keys::{Action, Key}; use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
use meilisearch_types::star_or::StarOr; use meilisearch_types::star_or::StarOr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value;
pub use store::open_auth_store_env; pub use store::open_auth_store_env;
use store::{generate_key_as_hexa, HeedAuthStore}; use store::{generate_key_as_hexa, HeedAuthStore};
use time::OffsetDateTime; use time::OffsetDateTime;
@ -34,17 +33,18 @@ impl AuthController {
Ok(Self { store: Arc::new(store), master_key: master_key.clone() }) Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
} }
pub fn create_key(&self, value: Value) -> Result<Key> { pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
let key = Key::create_from_value(value)?; match self.store.get_api_key(create_key.uid)? {
match self.store.get_api_key(key.uid)? { Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(key.uid.to_string())), None => self.store.put_api_key(create_key.to_key()),
None => self.store.put_api_key(key),
} }
} }
pub fn update_key(&self, uid: Uuid, value: Value) -> Result<Key> { pub fn update_key(&self, uid: Uuid, patch: PatchApiKey) -> Result<Key> {
let mut key = self.get_key(uid)?; let mut key = self.get_key(uid)?;
key.update_from_value(value)?; key.description = patch.description;
key.name = patch.name;
key.updated_at = OffsetDateTime::now_utc();
self.store.put_api_key(key) self.store.put_api_key(key)
} }

View File

@ -9,14 +9,14 @@ actix-web = { version = "4.2.1", default-features = false }
anyhow = "1.0.65" anyhow = "1.0.65"
convert_case = "0.6.0" convert_case = "0.6.0"
csv = "1.1.6" csv = "1.1.6"
deserr = { version = "0.1.2", features = ["serde-json"] } deserr = { version = "0.1.4", features = ["serde-json"] }
either = { version = "1.6.1", features = ["serde"] } either = { version = "1.6.1", features = ["serde"] }
enum-iterator = "1.1.3" enum-iterator = "1.1.3"
file-store = { path = "../file-store" } file-store = { path = "../file-store" }
flate2 = "1.0.24" flate2 = "1.0.24"
fst = "0.4.7" fst = "0.4.7"
memmap2 = "0.5.7" memmap2 = "0.5.7"
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.38.0", default-features = false } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.39.0", default-features = false }
proptest = { version = "1.0.0", optional = true } proptest = { version = "1.0.0", optional = true }
proptest-derive = { version = "0.3.0", optional = true } proptest-derive = { version = "0.3.0", optional = true }
roaring = { version = "0.10.0", features = ["serde"] } roaring = { version = "0.10.0", features = ["serde"] }

View File

@ -1,12 +1,17 @@
use std::convert::Infallible;
use std::marker::PhantomData;
use std::{fmt, io}; use std::{fmt, io};
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use actix_web::{self as aweb, HttpResponseBuilder}; use actix_web::{self as aweb, HttpResponseBuilder};
use aweb::rt::task::JoinError; use aweb::rt::task::JoinError;
use convert_case::Casing; use convert_case::Casing;
use deserr::{DeserializeError, IntoValue, MergeWithError, ValuePointerRef};
use milli::heed::{Error as HeedError, MdbError}; use milli::heed::{Error as HeedError, MdbError};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use self::deserr_codes::MissingIndexUid;
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] #[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
@ -31,7 +36,7 @@ impl ResponseError {
Self { Self {
code: code.http(), code: code.http(),
message, message,
error_code: code.err_code().error_name.to_string(), error_code: code.err_code().error_name,
error_type: code.type_(), error_type: code.type_(),
error_link: code.url(), error_link: code.url(),
} }
@ -48,7 +53,7 @@ impl std::error::Error for ResponseError {}
impl<T> From<T> for ResponseError impl<T> From<T> for ResponseError
where where
T: ErrorCode, T: std::error::Error + ErrorCode,
{ {
fn from(other: T) -> Self { fn from(other: T) -> Self {
Self::from_msg(other.to_string(), other.error_code()) Self::from_msg(other.to_string(), other.error_code())
@ -66,7 +71,7 @@ impl aweb::error::ResponseError for ResponseError {
} }
} }
pub trait ErrorCode: std::error::Error { pub trait ErrorCode {
fn error_code(&self) -> Code; fn error_code(&self) -> Code;
/// returns the HTTP status code associated with the error /// returns the HTTP status code associated with the error
@ -111,461 +116,186 @@ impl fmt::Display for ErrorType {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] macro_rules! make_error_codes {
pub enum Code { ($($code_ident:ident, $err_type:ident, $status:ident);*) => {
// error related to your setup #[derive(Debug, Clone, Copy, PartialEq, Eq)]
IoError, pub enum Code {
NoSpaceLeftOnDevice, $($code_ident),*
TooManyOpenFiles, }
impl Code {
// index related error /// associate a `Code` variant to the actual ErrCode
CreateIndex, fn err_code(&self) -> ErrCode {
IndexAlreadyExists, match self {
InvalidIndexPrimaryKey, $(
IndexNotFound, Code::$code_ident => {
InvalidIndexUid, ErrCode::$err_type( stringify!($code_ident).to_case(convert_case::Case::Snake), StatusCode::$status)
MissingIndexUid, }
InvalidMinWordLengthForTypo, )*
InvalidIndexLimit, }
InvalidIndexOffset,
DuplicateIndexFound,
// invalid state error
InvalidState,
NoPrimaryKeyCandidateFound,
MultiplePrimaryKeyCandidatesFound,
PrimaryKeyAlreadyPresent,
MaxFieldsLimitExceeded,
MissingDocumentId,
InvalidDocumentId,
// Invalid swap-indexes
InvalidSwapIndexes,
InvalidDuplicateIndexesFound,
// Invalid settings update request
InvalidSettingsDisplayedAttributes,
InvalidSettingsSearchableAttributes,
InvalidSettingsFilterableAttributes,
InvalidSettingsSortableAttributes,
InvalidSettingsRankingRules,
InvalidSettingsStopWords,
InvalidSettingsSynonyms,
InvalidSettingsDistinctAttribute,
InvalidSettingsTypoTolerance,
InvalidSettingsFaceting,
InvalidSettingsPagination,
// Invalid search request
InvalidSearchQ,
InvalidSearchOffset,
InvalidSearchLimit,
InvalidSearchPage,
InvalidSearchHitsPerPage,
InvalidSearchAttributesToRetrieve,
InvalidSearchAttributesToCrop,
InvalidSearchCropLength,
InvalidSearchAttributesToHighlight,
InvalidSearchShowMatchesPosition,
InvalidSearchFilter,
InvalidSearchSort,
InvalidSearchFacets,
InvalidSearchHighlightPreTag,
InvalidSearchHighlightPostTag,
InvalidSearchCropMarker,
InvalidSearchMatchingStrategy,
// Related to the tasks
InvalidTaskUids,
InvalidTaskTypes,
InvalidTaskStatuses,
InvalidTaskCanceledBy,
InvalidTaskLimit,
InvalidTaskFrom,
InvalidTaskBeforeEnqueuedAt,
InvalidTaskAfterEnqueuedAt,
InvalidTaskBeforeStartedAt,
InvalidTaskAfterStartedAt,
InvalidTaskBeforeFinishedAt,
InvalidTaskAfterFinishedAt,
// Documents API
InvalidDocumentFields,
InvalidDocumentLimit,
InvalidDocumentOffset,
BadParameter,
BadRequest,
DatabaseSizeLimitReached,
DocumentNotFound,
Internal,
InvalidDocumentGeoField,
InvalidStore,
InvalidToken,
MissingAuthorizationHeader,
MissingMasterKey,
DumpNotFound,
TaskNotFound,
TaskDeletionWithEmptyQuery,
TaskCancelationWithEmptyQuery,
PayloadTooLarge,
UnsupportedMediaType,
DumpAlreadyInProgress,
DumpProcessFailed,
// Only used when importing a dump
UnretrievableErrorCode,
InvalidContentType,
MissingContentType,
MalformedPayload,
MissingPayload,
ApiKeyNotFound,
MissingApiKeyActions,
MissingApiKeyExpiresAt,
MissingApiKeyIndexes,
InvalidApiKeyOffset,
InvalidApiKeyLimit,
InvalidApiKeyActions,
InvalidApiKeyIndexes,
InvalidApiKeyExpiresAt,
InvalidApiKeyDescription,
InvalidApiKeyName,
InvalidApiKeyUid,
ImmutableField,
ApiKeyAlreadyExists,
}
impl Code {
/// associate a `Code` variant to the actual ErrCode
fn err_code(&self) -> ErrCode {
use Code::*;
match self {
// related to the setup
IoError => ErrCode::system("io_error", StatusCode::INTERNAL_SERVER_ERROR),
TooManyOpenFiles => {
ErrCode::system("too_many_open_files", StatusCode::INTERNAL_SERVER_ERROR)
} }
NoSpaceLeftOnDevice => { /// return the HTTP status code associated with the `Code`
ErrCode::system("no_space_left_on_device", StatusCode::INTERNAL_SERVER_ERROR) fn http(&self) -> StatusCode {
self.err_code().status_code
} }
// index related errors /// return error name, used as error code
// create index is thrown on internal error while creating an index. fn name(&self) -> String {
CreateIndex => { self.err_code().error_name.to_string()
ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT),
// thrown when requesting an unexisting index
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
MissingIndexUid => ErrCode::invalid("missing_index_uid", StatusCode::BAD_REQUEST),
InvalidIndexPrimaryKey => {
ErrCode::invalid("invalid_index_primary_key", StatusCode::BAD_REQUEST)
}
InvalidIndexLimit => ErrCode::invalid("invalid_index_limit", StatusCode::BAD_REQUEST),
InvalidIndexOffset => ErrCode::invalid("invalid_index_offset", StatusCode::BAD_REQUEST),
// invalid state error
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
// thrown when no primary key has been set
NoPrimaryKeyCandidateFound => {
ErrCode::invalid("index_primary_key_no_candidate_found", StatusCode::BAD_REQUEST)
}
MultiplePrimaryKeyCandidatesFound => ErrCode::invalid(
"index_primary_key_multiple_candidates_found",
StatusCode::BAD_REQUEST,
),
// error thrown when trying to set an already existing primary key
PrimaryKeyAlreadyPresent => {
ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST)
} }
// invalid database /// return the error type
InvalidStore => { fn type_(&self) -> String {
ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR) self.err_code().error_type.to_string()
} }
// invalid document /// return the doc url associated with the error
MaxFieldsLimitExceeded => { fn url(&self) -> String {
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST) format!(
} "https://docs.meilisearch.com/errors#{}",
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST), self.name().to_case(convert_case::Case::Kebab)
InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST), )
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
DatabaseSizeLimitReached => {
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
}
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
InvalidDocumentGeoField => {
ErrCode::invalid("invalid_document_geo_field", StatusCode::BAD_REQUEST)
}
InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN),
MissingAuthorizationHeader => {
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
}
MissingMasterKey => {
ErrCode::authentication("missing_master_key", StatusCode::UNAUTHORIZED)
}
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
TaskDeletionWithEmptyQuery => {
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
}
TaskCancelationWithEmptyQuery => {
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
}
DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND),
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
UnsupportedMediaType => {
ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
// error related to dump
DumpAlreadyInProgress => {
ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT)
}
DumpProcessFailed => {
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
MissingContentType => {
ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST),
InvalidContentType => {
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
// This one can only happen when importing a dump and encountering an unknown code in the task queue.
UnretrievableErrorCode => {
ErrCode::invalid("unretrievable_error_code", StatusCode::BAD_REQUEST)
}
// error related to keys
ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND),
MissingApiKeyExpiresAt => {
ErrCode::invalid("missing_api_key_expires_at", StatusCode::BAD_REQUEST)
}
MissingApiKeyActions => {
ErrCode::invalid("missing_api_key_actions", StatusCode::BAD_REQUEST)
}
MissingApiKeyIndexes => {
ErrCode::invalid("missing_api_key_indexes", StatusCode::BAD_REQUEST)
}
InvalidApiKeyOffset => {
ErrCode::invalid("invalid_api_key_offset", StatusCode::BAD_REQUEST)
}
InvalidApiKeyLimit => {
ErrCode::invalid("invalid_api_key_limit", StatusCode::BAD_REQUEST)
}
InvalidApiKeyActions => {
ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST)
}
InvalidApiKeyIndexes => {
ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST)
}
InvalidApiKeyExpiresAt => {
ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST)
}
InvalidApiKeyDescription => {
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
}
InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST),
InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST),
ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT),
ImmutableField => ErrCode::invalid("immutable_field", StatusCode::BAD_REQUEST),
InvalidMinWordLengthForTypo => {
ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST)
}
DuplicateIndexFound => {
ErrCode::invalid("duplicate_index_found", StatusCode::BAD_REQUEST)
}
// Swap indexes error
InvalidSwapIndexes => ErrCode::invalid("invalid_swap_indexes", StatusCode::BAD_REQUEST),
InvalidDuplicateIndexesFound => {
ErrCode::invalid("invalid_swap_duplicate_index_found", StatusCode::BAD_REQUEST)
}
// Invalid settings
InvalidSettingsDisplayedAttributes => {
ErrCode::invalid("invalid_settings_displayed_attributes", StatusCode::BAD_REQUEST)
}
InvalidSettingsSearchableAttributes => {
ErrCode::invalid("invalid_settings_searchable_attributes", StatusCode::BAD_REQUEST)
}
InvalidSettingsFilterableAttributes => {
ErrCode::invalid("invalid_settings_filterable_attributes", StatusCode::BAD_REQUEST)
}
InvalidSettingsSortableAttributes => {
ErrCode::invalid("invalid_settings_sortable_attributes", StatusCode::BAD_REQUEST)
}
InvalidSettingsRankingRules => {
ErrCode::invalid("invalid_settings_ranking_rules", StatusCode::BAD_REQUEST)
}
InvalidSettingsStopWords => {
ErrCode::invalid("invalid_settings_stop_words", StatusCode::BAD_REQUEST)
}
InvalidSettingsSynonyms => {
ErrCode::invalid("invalid_settings_synonyms", StatusCode::BAD_REQUEST)
}
InvalidSettingsDistinctAttribute => {
ErrCode::invalid("invalid_settings_distinct_attribute", StatusCode::BAD_REQUEST)
}
InvalidSettingsTypoTolerance => {
ErrCode::invalid("invalid_settings_typo_tolerance", StatusCode::BAD_REQUEST)
}
InvalidSettingsFaceting => {
ErrCode::invalid("invalid_settings_faceting", StatusCode::BAD_REQUEST)
}
InvalidSettingsPagination => {
ErrCode::invalid("invalid_settings_pagination", StatusCode::BAD_REQUEST)
}
// Invalid search
InvalidSearchQ => ErrCode::invalid("invalid_search_q", StatusCode::BAD_REQUEST),
InvalidSearchOffset => {
ErrCode::invalid("invalid_search_offset", StatusCode::BAD_REQUEST)
}
InvalidSearchLimit => ErrCode::invalid("invalid_search_limit", StatusCode::BAD_REQUEST),
InvalidSearchPage => ErrCode::invalid("invalid_search_page", StatusCode::BAD_REQUEST),
InvalidSearchHitsPerPage => {
ErrCode::invalid("invalid_search_hits_per_page", StatusCode::BAD_REQUEST)
}
InvalidSearchAttributesToRetrieve => {
ErrCode::invalid("invalid_search_attributes_to_retrieve", StatusCode::BAD_REQUEST)
}
InvalidSearchAttributesToCrop => {
ErrCode::invalid("invalid_search_attributes_to_crop", StatusCode::BAD_REQUEST)
}
InvalidSearchCropLength => {
ErrCode::invalid("invalid_search_crop_length", StatusCode::BAD_REQUEST)
}
InvalidSearchAttributesToHighlight => {
ErrCode::invalid("invalid_search_attributes_to_highlight", StatusCode::BAD_REQUEST)
}
InvalidSearchShowMatchesPosition => {
ErrCode::invalid("invalid_search_show_matches_position", StatusCode::BAD_REQUEST)
}
InvalidSearchFilter => {
ErrCode::invalid("invalid_search_filter", StatusCode::BAD_REQUEST)
}
InvalidSearchSort => ErrCode::invalid("invalid_search_sort", StatusCode::BAD_REQUEST),
InvalidSearchFacets => {
ErrCode::invalid("invalid_search_facets", StatusCode::BAD_REQUEST)
}
InvalidSearchHighlightPreTag => {
ErrCode::invalid("invalid_search_highlight_pre_tag", StatusCode::BAD_REQUEST)
}
InvalidSearchHighlightPostTag => {
ErrCode::invalid("invalid_search_highlight_post_tag", StatusCode::BAD_REQUEST)
}
InvalidSearchCropMarker => {
ErrCode::invalid("invalid_search_crop_marker", StatusCode::BAD_REQUEST)
}
InvalidSearchMatchingStrategy => {
ErrCode::invalid("invalid_search_matching_strategy", StatusCode::BAD_REQUEST)
}
// Related to the tasks
InvalidTaskUids => ErrCode::invalid("invalid_task_uids", StatusCode::BAD_REQUEST),
InvalidTaskTypes => ErrCode::invalid("invalid_task_types", StatusCode::BAD_REQUEST),
InvalidTaskStatuses => {
ErrCode::invalid("invalid_task_statuses", StatusCode::BAD_REQUEST)
}
InvalidTaskCanceledBy => {
ErrCode::invalid("invalid_task_canceled_by", StatusCode::BAD_REQUEST)
}
InvalidTaskLimit => ErrCode::invalid("invalid_task_limit", StatusCode::BAD_REQUEST),
InvalidTaskFrom => ErrCode::invalid("invalid_task_from", StatusCode::BAD_REQUEST),
InvalidTaskBeforeEnqueuedAt => {
ErrCode::invalid("invalid_task_before_enqueued_at", StatusCode::BAD_REQUEST)
}
InvalidTaskAfterEnqueuedAt => {
ErrCode::invalid("invalid_task_after_enqueued_at", StatusCode::BAD_REQUEST)
}
InvalidTaskBeforeStartedAt => {
ErrCode::invalid("invalid_task_before_started_at", StatusCode::BAD_REQUEST)
}
InvalidTaskAfterStartedAt => {
ErrCode::invalid("invalid_task_after_started_at", StatusCode::BAD_REQUEST)
}
InvalidTaskBeforeFinishedAt => {
ErrCode::invalid("invalid_task_before_finished_at", StatusCode::BAD_REQUEST)
}
InvalidTaskAfterFinishedAt => {
ErrCode::invalid("invalid_task_after_finished_at", StatusCode::BAD_REQUEST)
}
InvalidDocumentFields => {
ErrCode::invalid("invalid_document_fields", StatusCode::BAD_REQUEST)
}
InvalidDocumentLimit => {
ErrCode::invalid("invalid_document_limit", StatusCode::BAD_REQUEST)
}
InvalidDocumentOffset => {
ErrCode::invalid("invalid_document_offset", StatusCode::BAD_REQUEST)
} }
} }
pub mod deserr_codes {
use super::{Code, ErrorCode};
$(
#[derive(Default)]
pub struct $code_ident;
impl ErrorCode for $code_ident {
fn error_code(&self) -> Code {
Code::$code_ident
}
}
)*
}
} }
}
/// return the HTTP status code associated with the `Code` make_error_codes! {
fn http(&self) -> StatusCode { ApiKeyAlreadyExists , invalid , CONFLICT ;
self.err_code().status_code ApiKeyNotFound , invalid , NOT_FOUND ;
} BadParameter , invalid , BAD_REQUEST;
BadRequest , invalid , BAD_REQUEST;
/// return error name, used as error code DatabaseSizeLimitReached , internal , INTERNAL_SERVER_ERROR;
fn name(&self) -> String { DocumentNotFound , invalid , NOT_FOUND;
self.err_code().error_name.to_string() DumpAlreadyProcessing , invalid , CONFLICT;
} DumpNotFound , invalid , NOT_FOUND;
DumpProcessFailed , internal , INTERNAL_SERVER_ERROR;
/// return the error type DuplicateIndexFound , invalid , BAD_REQUEST;
fn type_(&self) -> String { ImmutableField , invalid , BAD_REQUEST;
self.err_code().error_type.to_string() IndexAlreadyExists , invalid , CONFLICT ;
} IndexCreationFailed , internal , INTERNAL_SERVER_ERROR;
IndexNotFound , invalid , NOT_FOUND;
/// return the doc url associated with the error IndexPrimaryKeyAlreadyExists , invalid , BAD_REQUEST ;
fn url(&self) -> String { IndexPrimaryKeyNoCandidateFound , invalid , BAD_REQUEST ;
format!( IndexPrimaryKeyMultipleCandidatesFound, invalid , BAD_REQUEST;
"https://docs.meilisearch.com/errors#{}", Internal , internal , INTERNAL_SERVER_ERROR ;
self.name().to_case(convert_case::Case::Kebab) InvalidApiKeyActions , invalid , BAD_REQUEST ;
) InvalidApiKeyDescription , invalid , BAD_REQUEST ;
} InvalidApiKeyExpiresAt , invalid , BAD_REQUEST ;
InvalidApiKeyIndexes , invalid , BAD_REQUEST ;
InvalidApiKeyLimit , invalid , BAD_REQUEST ;
InvalidApiKeyName , invalid , BAD_REQUEST ;
InvalidApiKeyOffset , invalid , BAD_REQUEST ;
InvalidApiKeyUid , invalid , BAD_REQUEST ;
InvalidApiKey , authentication, FORBIDDEN ;
InvalidContentType , invalid , UNSUPPORTED_MEDIA_TYPE ;
InvalidDocumentFields , invalid , BAD_REQUEST ;
InvalidDocumentGeoField , invalid , BAD_REQUEST ;
InvalidDocumentId , invalid , BAD_REQUEST ;
InvalidDocumentLimit , invalid , BAD_REQUEST ;
InvalidDocumentOffset , invalid , BAD_REQUEST ;
InvalidIndexLimit , invalid , BAD_REQUEST ;
InvalidIndexOffset , invalid , BAD_REQUEST ;
InvalidIndexPrimaryKey , invalid , BAD_REQUEST ;
InvalidIndexUid , invalid , BAD_REQUEST ;
InvalidMinWordLengthForTypo , invalid , BAD_REQUEST ;
InvalidSearchAttributesToCrop , invalid , BAD_REQUEST ;
InvalidSearchAttributesToHighlight , invalid , BAD_REQUEST ;
InvalidSearchAttributesToRetrieve , invalid , BAD_REQUEST ;
InvalidSearchCropLength , invalid , BAD_REQUEST ;
InvalidSearchCropMarker , invalid , BAD_REQUEST ;
InvalidSearchFacets , invalid , BAD_REQUEST ;
InvalidSearchFilter , invalid , BAD_REQUEST ;
InvalidSearchHighlightPostTag , invalid , BAD_REQUEST ;
InvalidSearchHighlightPreTag , invalid , BAD_REQUEST ;
InvalidSearchHitsPerPage , invalid , BAD_REQUEST ;
InvalidSearchLimit , invalid , BAD_REQUEST ;
InvalidSearchMatchingStrategy , invalid , BAD_REQUEST ;
InvalidSearchOffset , invalid , BAD_REQUEST ;
InvalidSearchPage , invalid , BAD_REQUEST ;
InvalidSearchQ , invalid , BAD_REQUEST ;
InvalidSearchShowMatchesPosition , invalid , BAD_REQUEST ;
InvalidSearchSort , invalid , BAD_REQUEST ;
InvalidSettingsDisplayedAttributes , invalid , BAD_REQUEST ;
InvalidSettingsDistinctAttribute , invalid , BAD_REQUEST ;
InvalidSettingsFaceting , invalid , BAD_REQUEST ;
InvalidSettingsFilterableAttributes , invalid , BAD_REQUEST ;
InvalidSettingsPagination , invalid , BAD_REQUEST ;
InvalidSettingsRankingRules , invalid , BAD_REQUEST ;
InvalidSettingsSearchableAttributes , invalid , BAD_REQUEST ;
InvalidSettingsSortableAttributes , invalid , BAD_REQUEST ;
InvalidSettingsStopWords , invalid , BAD_REQUEST ;
InvalidSettingsSynonyms , invalid , BAD_REQUEST ;
InvalidSettingsTypoTolerance , invalid , BAD_REQUEST ;
InvalidState , internal , INTERNAL_SERVER_ERROR ;
InvalidStoreFile , internal , INTERNAL_SERVER_ERROR ;
InvalidSwapDuplicateIndexFound , invalid , BAD_REQUEST ;
InvalidSwapIndexes , invalid , BAD_REQUEST ;
InvalidTaskAfterEnqueuedAt , invalid , BAD_REQUEST ;
InvalidTaskAfterFinishedAt , invalid , BAD_REQUEST ;
InvalidTaskAfterStartedAt , invalid , BAD_REQUEST ;
InvalidTaskBeforeEnqueuedAt , invalid , BAD_REQUEST ;
InvalidTaskBeforeFinishedAt , invalid , BAD_REQUEST ;
InvalidTaskBeforeStartedAt , invalid , BAD_REQUEST ;
InvalidTaskCanceledBy , invalid , BAD_REQUEST ;
InvalidTaskFrom , invalid , BAD_REQUEST ;
InvalidTaskLimit , invalid , BAD_REQUEST ;
InvalidTaskStatuses , invalid , BAD_REQUEST ;
InvalidTaskTypes , invalid , BAD_REQUEST ;
InvalidTaskUids , invalid , BAD_REQUEST ;
IoError , system , UNPROCESSABLE_ENTITY;
MalformedPayload , invalid , BAD_REQUEST ;
MaxFieldsLimitExceeded , invalid , BAD_REQUEST ;
MissingApiKeyActions , invalid , BAD_REQUEST ;
MissingApiKeyExpiresAt , invalid , BAD_REQUEST ;
MissingApiKeyIndexes , invalid , BAD_REQUEST ;
MissingAuthorizationHeader , authentication, UNAUTHORIZED ;
MissingContentType , invalid , UNSUPPORTED_MEDIA_TYPE ;
MissingDocumentId , invalid , BAD_REQUEST ;
MissingIndexUid , invalid , BAD_REQUEST ;
MissingMasterKey , authentication, UNAUTHORIZED ;
MissingPayload , invalid , BAD_REQUEST ;
MissingTaskFilters , invalid , BAD_REQUEST ;
NoSpaceLeftOnDevice , system , UNPROCESSABLE_ENTITY;
PayloadTooLarge , invalid , PAYLOAD_TOO_LARGE ;
TaskNotFound , invalid , NOT_FOUND ;
TooManyOpenFiles , system , UNPROCESSABLE_ENTITY ;
UnretrievableDocument , internal , BAD_REQUEST ;
UnretrievableErrorCode , invalid , BAD_REQUEST ;
UnsupportedMediaType , invalid , UNSUPPORTED_MEDIA_TYPE
} }
/// Internal structure providing a convenient way to create error codes /// Internal structure providing a convenient way to create error codes
struct ErrCode { struct ErrCode {
status_code: StatusCode, status_code: StatusCode,
error_type: ErrorType, error_type: ErrorType,
error_name: &'static str, error_name: String,
} }
impl ErrCode { impl ErrCode {
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode { fn authentication(error_name: String, status_code: StatusCode) -> ErrCode {
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError } ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
} }
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode { fn internal(error_name: String, status_code: StatusCode) -> ErrCode {
ErrCode { status_code, error_name, error_type: ErrorType::InternalError } ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
} }
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode { fn invalid(error_name: String, status_code: StatusCode) -> ErrCode {
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError } ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
} }
fn system(error_name: &'static str, status_code: StatusCode) -> ErrCode { fn system(error_name: String, status_code: StatusCode) -> ErrCode {
ErrCode { status_code, error_name, error_type: ErrorType::System } ErrCode { status_code, error_name, error_type: ErrorType::System }
} }
} }
@ -591,7 +321,7 @@ impl ErrorCode for milli::Error {
| UserError::DocumentLimitReached | UserError::DocumentLimitReached
| UserError::AccessingSoftDeletedDocument { .. } | UserError::AccessingSoftDeletedDocument { .. }
| UserError::UnknownInternalDocumentId { .. } => Code::Internal, | UserError::UnknownInternalDocumentId { .. } => Code::Internal,
UserError::InvalidStoreFile => Code::InvalidStore, UserError::InvalidStoreFile => Code::InvalidStoreFile,
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice, UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
UserError::MaxDatabaseSizeReached => Code::DatabaseSizeLimitReached, UserError::MaxDatabaseSizeReached => Code::DatabaseSizeLimitReached,
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded, UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
@ -600,11 +330,11 @@ impl ErrorCode for milli::Error {
UserError::InvalidDocumentId { .. } | UserError::TooManyDocumentIds { .. } => { UserError::InvalidDocumentId { .. } | UserError::TooManyDocumentIds { .. } => {
Code::InvalidDocumentId Code::InvalidDocumentId
} }
UserError::NoPrimaryKeyCandidateFound => Code::NoPrimaryKeyCandidateFound, UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
UserError::MultiplePrimaryKeyCandidatesFound { .. } => { UserError::MultiplePrimaryKeyCandidatesFound { .. } => {
Code::MultiplePrimaryKeyCandidatesFound Code::IndexPrimaryKeyMultipleCandidatesFound
} }
UserError::PrimaryKeyCannotBeChanged(_) => Code::PrimaryKeyAlreadyPresent, UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists,
UserError::SortRankingRuleMissing => Code::InvalidSearchSort, UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
UserError::InvalidFacetsDistribution { .. } => Code::BadRequest, UserError::InvalidFacetsDistribution { .. } => Code::BadRequest,
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort, UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
@ -639,7 +369,7 @@ impl ErrorCode for HeedError {
fn error_code(&self) -> Code { fn error_code(&self) -> Code {
match self { match self {
HeedError::Mdb(MdbError::MapFull) => Code::DatabaseSizeLimitReached, HeedError::Mdb(MdbError::MapFull) => Code::DatabaseSizeLimitReached,
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStore, HeedError::Mdb(MdbError::Invalid) => Code::InvalidStoreFile,
HeedError::Io(e) => e.error_code(), HeedError::Io(e) => e.error_code(),
HeedError::Mdb(_) HeedError::Mdb(_)
| HeedError::Encoding | HeedError::Encoding
@ -680,6 +410,82 @@ mod strategy {
} }
} }
pub struct DeserrError<C: ErrorCode = deserr_codes::BadRequest> {
pub msg: String,
pub code: Code,
_phantom: PhantomData<C>,
}
impl<C: ErrorCode> std::fmt::Debug for DeserrError<C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish()
}
}
impl<C: ErrorCode> std::fmt::Display for DeserrError<C> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.msg)
}
}
impl<C: ErrorCode> std::error::Error for DeserrError<C> {}
impl<C: ErrorCode> ErrorCode for DeserrError<C> {
fn error_code(&self) -> Code {
self.code
}
}
impl<C1: ErrorCode, C2: ErrorCode> MergeWithError<DeserrError<C2>> for DeserrError<C1> {
fn merge(
_self_: Option<Self>,
other: DeserrError<C2>,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
}
}
impl DeserrError<MissingIndexUid> {
pub fn missing_index_uid(field: &str, location: ValuePointerRef) -> Self {
let x = unwrap_any(Self::error::<Infallible>(
None,
deserr::ErrorKind::MissingField { field },
location,
));
Self { msg: x.msg, code: MissingIndexUid.error_code(), _phantom: PhantomData }
}
}
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrError<C> {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let msg = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
Err(DeserrError { msg, code: C::default().error_code(), _phantom: PhantomData })
}
}
pub struct TakeErrorMessage<T>(pub T);
impl<C: Default + ErrorCode, T> MergeWithError<TakeErrorMessage<T>> for DeserrError<C>
where
T: std::error::Error,
{
fn merge(
_self_: Option<Self>,
other: TakeErrorMessage<T>,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
DeserrError::error::<Infallible>(
None,
deserr::ErrorKind::Unexpected { msg: other.0.to_string() },
merge_location,
)
}
}
#[macro_export] #[macro_export]
macro_rules! internal_error { macro_rules! internal_error {
($target:ty : $($other:path), *) => { ($target:ty : $($other:path), *) => {

View File

@ -1,22 +1,105 @@
use std::convert::Infallible;
use std::fmt::Display;
use std::hash::Hash; use std::hash::Hash;
use std::str::FromStr;
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValuePointerRef};
use enum_iterator::Sequence; use enum_iterator::Sequence;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{from_value, Value};
use time::format_description::well_known::Rfc3339; use time::format_description::well_known::Rfc3339;
use time::macros::{format_description, time}; use time::macros::{format_description, time};
use time::{Date, OffsetDateTime, PrimitiveDateTime}; use time::{Date, OffsetDateTime, PrimitiveDateTime};
use uuid::Uuid; use uuid::Uuid;
use crate::error::{Code, ErrorCode}; use crate::error::deserr_codes::*;
use crate::error::{unwrap_any, Code, DeserrError, ErrorCode, TakeErrorMessage};
use crate::index_uid::{IndexUid, IndexUidFormatError}; use crate::index_uid::{IndexUid, IndexUidFormatError};
use crate::star_or::StarOr; use crate::star_or::StarOr;
type Result<T> = std::result::Result<T, Error>;
pub type KeyId = Uuid; pub type KeyId = Uuid;
impl<C: Default + ErrorCode> MergeWithError<IndexUidFormatError> for DeserrError<C> {
fn merge(
_self_: Option<Self>,
other: IndexUidFormatError,
merge_location: deserr::ValuePointerRef,
) -> std::result::Result<Self, Self> {
DeserrError::error::<Infallible>(
None,
deserr::ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
fn parse_uuid_from_str(s: &str) -> Result<Uuid, TakeErrorMessage<uuid::Error>> {
Uuid::parse_str(s).map_err(TakeErrorMessage)
}
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct CreateApiKey {
#[deserr(error = DeserrError<InvalidApiKeyDescription>)]
pub description: Option<String>,
#[deserr(error = DeserrError<InvalidApiKeyName>)]
pub name: Option<String>,
#[deserr(default = Uuid::new_v4(), error = DeserrError<InvalidApiKeyUid>, from(&String) = parse_uuid_from_str -> TakeErrorMessage<uuid::Error>)]
pub uid: KeyId,
#[deserr(error = DeserrError<InvalidApiKeyActions>)]
pub actions: Vec<Action>,
#[deserr(error = DeserrError<InvalidApiKeyIndexes>)]
pub indexes: Vec<StarOr<IndexUid>>,
#[deserr(error = DeserrError<InvalidApiKeyExpiresAt>, default = None, from(&String) = parse_expiration_date -> TakeErrorMessage<ParseOffsetDateTimeError>)]
pub expires_at: Option<OffsetDateTime>,
}
impl CreateApiKey {
pub fn to_key(self) -> Key {
let CreateApiKey { description, name, uid, actions, indexes, expires_at } = self;
let now = OffsetDateTime::now_utc();
Key {
description,
name,
uid,
actions,
indexes,
expires_at,
created_at: now,
updated_at: now,
}
}
}
fn deny_immutable_fields_api_key(
field: &str,
accepted: &[&str],
location: ValuePointerRef,
) -> DeserrError {
let mut error = unwrap_any(DeserrError::<BadRequest>::error::<Infallible>(
None,
deserr::ErrorKind::UnknownKey { key: field, accepted },
location,
));
error.code = match field {
"uid" => Code::ImmutableField,
"actions" => Code::ImmutableField,
"indexes" => Code::ImmutableField,
"expiresAt" => Code::ImmutableField,
"createdAt" => Code::ImmutableField,
"updatedAt" => Code::ImmutableField,
_ => Code::BadRequest,
};
error
}
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)]
pub struct PatchApiKey {
#[deserr(error = DeserrError<InvalidApiKeyDescription>)]
pub description: Option<String>,
#[deserr(error = DeserrError<InvalidApiKeyName>)]
pub name: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
pub struct Key { pub struct Key {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
@ -35,100 +118,6 @@ pub struct Key {
} }
impl Key { impl Key {
pub fn create_from_value(value: Value) -> Result<Self> {
let name = match value.get("name") {
None | Some(Value::Null) => None,
Some(des) => from_value(des.clone())
.map(Some)
.map_err(|_| Error::InvalidApiKeyName(des.clone()))?,
};
let description = match value.get("description") {
None | Some(Value::Null) => None,
Some(des) => from_value(des.clone())
.map(Some)
.map_err(|_| Error::InvalidApiKeyDescription(des.clone()))?,
};
let uid = value.get("uid").map_or_else(
|| Ok(Uuid::new_v4()),
|uid| from_value(uid.clone()).map_err(|_| Error::InvalidApiKeyUid(uid.clone())),
)?;
let actions = value
.get("actions")
.map(|act| {
from_value(act.clone()).map_err(|_| Error::InvalidApiKeyActions(act.clone()))
})
.ok_or(Error::MissingApiKeyActions)??;
let indexes = value
.get("indexes")
.map(|ind| {
from_value::<Vec<String>>(ind.clone())
// If it's not a vec of string, return an API key parsing error.
.map_err(|_| Error::InvalidApiKeyIndexes(ind.clone()))
.and_then(|ind| {
ind.into_iter()
// If it's not a valid Index uid, return an Index Uid parsing error.
.map(|i| StarOr::<IndexUid>::from_str(&i).map_err(Error::from))
.collect()
})
})
.ok_or(Error::MissingApiKeyIndexes)??;
let expires_at = value
.get("expiresAt")
.map(parse_expiration_date)
.ok_or(Error::MissingApiKeyExpiresAt)??;
let created_at = OffsetDateTime::now_utc();
let updated_at = created_at;
Ok(Self { name, description, uid, actions, indexes, expires_at, created_at, updated_at })
}
pub fn update_from_value(&mut self, value: Value) -> Result<()> {
if let Some(des) = value.get("description") {
let des =
from_value(des.clone()).map_err(|_| Error::InvalidApiKeyDescription(des.clone()));
self.description = des?;
}
if let Some(des) = value.get("name") {
let des = from_value(des.clone()).map_err(|_| Error::InvalidApiKeyName(des.clone()));
self.name = des?;
}
if value.get("uid").is_some() {
return Err(Error::ImmutableField("uid".to_string()));
}
if value.get("actions").is_some() {
return Err(Error::ImmutableField("actions".to_string()));
}
if value.get("indexes").is_some() {
return Err(Error::ImmutableField("indexes".to_string()));
}
if value.get("expiresAt").is_some() {
return Err(Error::ImmutableField("expiresAt".to_string()));
}
if value.get("createdAt").is_some() {
return Err(Error::ImmutableField("createdAt".to_string()));
}
if value.get("updatedAt").is_some() {
return Err(Error::ImmutableField("updatedAt".to_string()));
}
self.updated_at = OffsetDateTime::now_utc();
Ok(())
}
pub fn default_admin() -> Self { pub fn default_admin() -> Self {
let now = OffsetDateTime::now_utc(); let now = OffsetDateTime::now_utc();
let uid = Uuid::new_v4(); let uid = Uuid::new_v4();
@ -160,107 +149,143 @@ impl Key {
} }
} }
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> { #[derive(Debug)]
match value { pub struct ParseOffsetDateTimeError(String);
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339) impl Display for ParseOffsetDateTimeError {
.or_else(|_| { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
PrimitiveDateTime::parse( writeln!(f, "`{original}` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", original = self.0)
string, }
format_description!( }
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]" impl std::error::Error for ParseOffsetDateTimeError {}
),
).map(|datetime| datetime.assume_utc()) fn parse_expiration_date(
}) string: &str,
.or_else(|_| { ) -> std::result::Result<Option<OffsetDateTime>, TakeErrorMessage<ParseOffsetDateTimeError>> {
PrimitiveDateTime::parse( let datetime = if let Ok(datetime) = OffsetDateTime::parse(string, &Rfc3339) {
string, datetime
format_description!( } else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]" string,
), format_description!(
).map(|datetime| datetime.assume_utc()) "[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
}) ),
.or_else(|_| { ) {
Date::parse(string, format_description!( primitive_datetime.assume_utc()
"[year repr:full base:calendar]-[month repr:numerical]-[day]" } else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc()) string,
}) format_description!(
.map_err(|_| Error::InvalidApiKeyExpiresAt(value.clone())) "[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
// check if the key is already expired. ),
.and_then(|d| { ) {
if d > OffsetDateTime::now_utc() { primitive_datetime.assume_utc()
Ok(d) } else if let Ok(date) = Date::parse(
} else { string,
Err(Error::InvalidApiKeyExpiresAt(value.clone())) format_description!("[year repr:full base:calendar]-[month repr:numerical]-[day]"),
} ) {
}) PrimitiveDateTime::new(date, time!(00:00)).assume_utc()
.map(Option::Some), } else {
Value::Null => Ok(None), return Err(TakeErrorMessage(ParseOffsetDateTimeError(string.to_owned())));
_otherwise => Err(Error::InvalidApiKeyExpiresAt(value.clone())), };
if datetime > OffsetDateTime::now_utc() {
Ok(Some(datetime))
} else {
Err(TakeErrorMessage(ParseOffsetDateTimeError(string.to_owned())))
} }
} }
#[derive(Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence)] #[derive(
Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, DeserializeFromValue,
)]
#[repr(u8)] #[repr(u8)]
pub enum Action { pub enum Action {
#[serde(rename = "*")] #[serde(rename = "*")]
#[deserr(rename = "*")]
All = 0, All = 0,
#[serde(rename = "search")] #[serde(rename = "search")]
#[deserr(rename = "search")]
Search, Search,
#[serde(rename = "documents.*")] #[serde(rename = "documents.*")]
#[deserr(rename = "documents.*")]
DocumentsAll, DocumentsAll,
#[serde(rename = "documents.add")] #[serde(rename = "documents.add")]
#[deserr(rename = "documents.add")]
DocumentsAdd, DocumentsAdd,
#[serde(rename = "documents.get")] #[serde(rename = "documents.get")]
#[deserr(rename = "documents.get")]
DocumentsGet, DocumentsGet,
#[serde(rename = "documents.delete")] #[serde(rename = "documents.delete")]
#[deserr(rename = "documents.delete")]
DocumentsDelete, DocumentsDelete,
#[serde(rename = "indexes.*")] #[serde(rename = "indexes.*")]
#[deserr(rename = "indexes.*")]
IndexesAll, IndexesAll,
#[serde(rename = "indexes.create")] #[serde(rename = "indexes.create")]
#[deserr(rename = "indexes.create")]
IndexesAdd, IndexesAdd,
#[serde(rename = "indexes.get")] #[serde(rename = "indexes.get")]
#[deserr(rename = "indexes.get")]
IndexesGet, IndexesGet,
#[serde(rename = "indexes.update")] #[serde(rename = "indexes.update")]
#[deserr(rename = "indexes.update")]
IndexesUpdate, IndexesUpdate,
#[serde(rename = "indexes.delete")] #[serde(rename = "indexes.delete")]
#[deserr(rename = "indexes.delete")]
IndexesDelete, IndexesDelete,
#[serde(rename = "indexes.swap")] #[serde(rename = "indexes.swap")]
#[deserr(rename = "indexes.swap")]
IndexesSwap, IndexesSwap,
#[serde(rename = "tasks.*")] #[serde(rename = "tasks.*")]
#[deserr(rename = "tasks.*")]
TasksAll, TasksAll,
#[serde(rename = "tasks.cancel")] #[serde(rename = "tasks.cancel")]
#[deserr(rename = "tasks.cancel")]
TasksCancel, TasksCancel,
#[serde(rename = "tasks.delete")] #[serde(rename = "tasks.delete")]
#[deserr(rename = "tasks.delete")]
TasksDelete, TasksDelete,
#[serde(rename = "tasks.get")] #[serde(rename = "tasks.get")]
#[deserr(rename = "tasks.get")]
TasksGet, TasksGet,
#[serde(rename = "settings.*")] #[serde(rename = "settings.*")]
#[deserr(rename = "settings.*")]
SettingsAll, SettingsAll,
#[serde(rename = "settings.get")] #[serde(rename = "settings.get")]
#[deserr(rename = "settings.get")]
SettingsGet, SettingsGet,
#[serde(rename = "settings.update")] #[serde(rename = "settings.update")]
#[deserr(rename = "settings.update")]
SettingsUpdate, SettingsUpdate,
#[serde(rename = "stats.*")] #[serde(rename = "stats.*")]
#[deserr(rename = "stats.*")]
StatsAll, StatsAll,
#[serde(rename = "stats.get")] #[serde(rename = "stats.get")]
#[deserr(rename = "stats.get")]
StatsGet, StatsGet,
#[serde(rename = "metrics.*")] #[serde(rename = "metrics.*")]
#[deserr(rename = "metrics.*")]
MetricsAll, MetricsAll,
#[serde(rename = "metrics.get")] #[serde(rename = "metrics.get")]
#[deserr(rename = "metrics.get")]
MetricsGet, MetricsGet,
#[serde(rename = "dumps.*")] #[serde(rename = "dumps.*")]
#[deserr(rename = "dumps.*")]
DumpsAll, DumpsAll,
#[serde(rename = "dumps.create")] #[serde(rename = "dumps.create")]
#[deserr(rename = "dumps.create")]
DumpsCreate, DumpsCreate,
#[serde(rename = "version")] #[serde(rename = "version")]
#[deserr(rename = "version")]
Version, Version,
#[serde(rename = "keys.create")] #[serde(rename = "keys.create")]
#[deserr(rename = "keys.create")]
KeysAdd, KeysAdd,
#[serde(rename = "keys.get")] #[serde(rename = "keys.get")]
#[deserr(rename = "keys.get")]
KeysGet, KeysGet,
#[serde(rename = "keys.update")] #[serde(rename = "keys.update")]
#[deserr(rename = "keys.update")]
KeysUpdate, KeysUpdate,
#[serde(rename = "keys.delete")] #[serde(rename = "keys.delete")]
#[deserr(rename = "keys.delete")]
KeysDelete, KeysDelete,
} }
@ -341,56 +366,3 @@ pub mod actions {
pub const KEYS_UPDATE: u8 = KeysUpdate.repr(); pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
pub const KEYS_DELETE: u8 = KeysDelete.repr(); pub const KEYS_DELETE: u8 = KeysDelete.repr();
} }
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("`expiresAt` field is mandatory.")]
MissingApiKeyExpiresAt,
#[error("`indexes` field is mandatory.")]
MissingApiKeyIndexes,
#[error("`actions` field is mandatory.")]
MissingApiKeyActions,
#[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
InvalidApiKeyActions(Value),
#[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
InvalidApiKeyIndexes(Value),
#[error("{0}")]
InvalidApiKeyIndexUid(IndexUidFormatError),
#[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
InvalidApiKeyExpiresAt(Value),
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
InvalidApiKeyDescription(Value),
#[error(
"`name` field value `{0}` is invalid. It should be a string or specified as a null value."
)]
InvalidApiKeyName(Value),
#[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")]
InvalidApiKeyUid(Value),
#[error("The `{0}` field cannot be modified for the given resource.")]
ImmutableField(String),
}
impl From<IndexUidFormatError> for Error {
fn from(e: IndexUidFormatError) -> Self {
Self::InvalidApiKeyIndexUid(e)
}
}
impl ErrorCode for Error {
fn error_code(&self) -> Code {
match self {
Self::MissingApiKeyExpiresAt => Code::MissingApiKeyExpiresAt,
Self::MissingApiKeyIndexes => Code::MissingApiKeyIndexes,
Self::MissingApiKeyActions => Code::MissingApiKeyActions,
Self::InvalidApiKeyActions(_) => Code::InvalidApiKeyActions,
Self::InvalidApiKeyIndexes(_) | Self::InvalidApiKeyIndexUid(_) => {
Code::InvalidApiKeyIndexes
}
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName,
Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid,
Self::ImmutableField(_) => Code::ImmutableField,
}
}
}

View File

@ -1,11 +1,18 @@
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use std::convert::Infallible;
use std::fmt;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::str::FromStr;
use deserr::{DeserializeError, DeserializeFromValue}; use deserr::{DeserializeError, DeserializeFromValue, ErrorKind, MergeWithError, ValuePointerRef};
use fst::IntoStreamer; use fst::IntoStreamer;
use milli::{Index, DEFAULT_VALUES_PER_FACET}; use milli::update::Setting;
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
use serde::{Deserialize, Serialize, Serializer};
use crate::error::deserr_codes::*;
use crate::error::{unwrap_any, DeserrError};
/// The maximimum number of results that the engine /// The maximimum number of results that the engine
/// will be able to return in one search call. /// will be able to return in one search call.
@ -27,112 +34,6 @@ where
.serialize(s) .serialize(s)
} }
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
pub enum Setting<T> {
Set(T),
Reset,
NotSet,
}
impl<T> Default for Setting<T> {
fn default() -> Self {
Self::NotSet
}
}
impl<T> From<Setting<T>> for milli::update::Setting<T> {
fn from(value: Setting<T>) -> Self {
match value {
Setting::Set(x) => milli::update::Setting::Set(x),
Setting::Reset => milli::update::Setting::Reset,
Setting::NotSet => milli::update::Setting::NotSet,
}
}
}
impl<T> From<milli::update::Setting<T>> for Setting<T> {
fn from(value: milli::update::Setting<T>) -> Self {
match value {
milli::update::Setting::Set(x) => Setting::Set(x),
milli::update::Setting::Reset => Setting::Reset,
milli::update::Setting::NotSet => Setting::NotSet,
}
}
}
impl<T> Setting<T> {
pub fn set(self) -> Option<T> {
match self {
Self::Set(value) => Some(value),
_ => None,
}
}
pub const fn as_ref(&self) -> Setting<&T> {
match *self {
Self::Set(ref value) => Setting::Set(value),
Self::Reset => Setting::Reset,
Self::NotSet => Setting::NotSet,
}
}
pub const fn is_not_set(&self) -> bool {
matches!(self, Self::NotSet)
}
/// If `Self` is `Reset`, then map self to `Set` with the provided `val`.
pub fn or_reset(self, val: T) -> Self {
match self {
Self::Reset => Self::Set(val),
otherwise => otherwise,
}
}
}
impl<T: Serialize> Serialize for Setting<T> {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Set(value) => Some(value),
// Usually not_set isn't serialized by setting skip_serializing_if field attribute
Self::NotSet | Self::Reset => None,
}
.serialize(serializer)
}
}
impl<'de, T: Deserialize<'de>> Deserialize<'de> for Setting<T> {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Deserialize::deserialize(deserializer).map(|x| match x {
Some(x) => Self::Set(x),
None => Self::Reset, // Reset is forced by sending null value
})
}
}
impl<T, E> DeserializeFromValue<E> for Setting<T>
where
T: DeserializeFromValue<E>,
E: DeserializeError,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
match value {
deserr::Value::Null => Ok(Setting::Reset),
_ => T::deserialize_from_value(value, location).map(Setting::Set),
}
}
fn default() -> Option<Self> {
Some(Self::NotSet)
}
}
#[derive(Clone, Default, Debug, Serialize, PartialEq, Eq)] #[derive(Clone, Default, Debug, Serialize, PartialEq, Eq)]
pub struct Checked; pub struct Checked;
@ -151,78 +52,90 @@ where
} }
} }
#[cfg_attr(test, derive(proptest_derive::Arbitrary))] fn validate_min_word_size_for_typo_setting<E: DeserializeError>(
s: MinWordSizeTyposSetting,
location: ValuePointerRef,
) -> Result<MinWordSizeTyposSetting, E> {
if let (Setting::Set(one), Setting::Set(two)) = (s.one_typo, s.two_typos) {
if one > two {
return Err(unwrap_any(E::error::<Infallible>(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location)));
}
}
Ok(s)
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields, rename_all = "camelCase")]
#[serde(rename_all = "camelCase")] #[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrError<InvalidMinWordLengthForTypo>)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct MinWordSizeTyposSetting { pub struct MinWordSizeTyposSetting {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
pub one_typo: Setting<u8>, pub one_typo: Setting<u8>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
pub two_typos: Setting<u8>, pub two_typos: Setting<u8>,
} }
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields, rename_all = "camelCase")]
#[serde(rename_all = "camelCase")] #[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrError<InvalidMinWordLengthForTypo>>)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct TypoSettings { pub struct TypoSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
pub enabled: Setting<bool>, pub enabled: Setting<bool>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(error = DeserrError<InvalidMinWordLengthForTypo>)]
pub min_word_size_for_typos: Setting<MinWordSizeTyposSetting>, pub min_word_size_for_typos: Setting<MinWordSizeTyposSetting>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
pub disable_on_words: Setting<BTreeSet<String>>, pub disable_on_words: Setting<BTreeSet<String>>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
pub disable_on_attributes: Setting<BTreeSet<String>>, pub disable_on_attributes: Setting<BTreeSet<String>>,
} }
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields, rename_all = "camelCase")]
#[serde(rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct FacetingSettings { pub struct FacetingSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
pub max_values_per_facet: Setting<usize>, pub max_values_per_facet: Setting<usize>,
} }
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields, rename_all = "camelCase")]
#[serde(rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct PaginationSettings { pub struct PaginationSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
pub max_total_hits: Setting<usize>, pub max_total_hits: Setting<usize>,
} }
impl MergeWithError<milli::CriterionError> for DeserrError<InvalidSettingsRankingRules> {
fn merge(
_self_: Option<Self>,
other: milli::CriterionError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Self::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings /// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a /// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`. /// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)] #[serde(
#[serde(rename_all = "camelCase")] deny_unknown_fields,
#[serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>"))] rename_all = "camelCase",
#[deserr(rename_all = camelCase, deny_unknown_fields)] bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>")
#[cfg_attr(test, derive(proptest_derive::Arbitrary))] )]
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct Settings<T> { pub struct Settings<T> {
#[serde( #[serde(
default, default,
serialize_with = "serialize_with_wildcard", serialize_with = "serialize_with_wildcard",
skip_serializing_if = "Setting::is_not_set" skip_serializing_if = "Setting::is_not_set"
)] )]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsDisplayedAttributes>)]
pub displayed_attributes: Setting<Vec<String>>, pub displayed_attributes: Setting<Vec<String>>,
#[serde( #[serde(
@ -230,38 +143,39 @@ pub struct Settings<T> {
serialize_with = "serialize_with_wildcard", serialize_with = "serialize_with_wildcard",
skip_serializing_if = "Setting::is_not_set" skip_serializing_if = "Setting::is_not_set"
)] )]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsSearchableAttributes>)]
pub searchable_attributes: Setting<Vec<String>>, pub searchable_attributes: Setting<Vec<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsFilterableAttributes>)]
pub filterable_attributes: Setting<BTreeSet<String>>, pub filterable_attributes: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsSortableAttributes>)]
pub sortable_attributes: Setting<BTreeSet<String>>, pub sortable_attributes: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsRankingRules>)]
pub ranking_rules: Setting<Vec<String>>, pub ranking_rules: Setting<Vec<RankingRuleView>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsStopWords>)]
pub stop_words: Setting<BTreeSet<String>>, pub stop_words: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsSynonyms>)]
pub synonyms: Setting<BTreeMap<String, Vec<String>>>, pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsDistinctAttribute>)]
pub distinct_attribute: Setting<String>, pub distinct_attribute: Setting<String>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsTypoTolerance>)]
pub typo_tolerance: Setting<TypoSettings>, pub typo_tolerance: Setting<TypoSettings>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsFaceting>)]
pub faceting: Setting<FacetingSettings>, pub faceting: Setting<FacetingSettings>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))] #[deserr(error = DeserrError<InvalidSettingsPagination>)]
pub pagination: Setting<PaginationSettings>, pub pagination: Setting<PaginationSettings>,
#[serde(skip)] #[serde(skip)]
#[deserr(skip)]
pub _kind: PhantomData<T>, pub _kind: PhantomData<T>,
} }
@ -396,7 +310,9 @@ pub fn apply_settings_to_builder(
} }
match settings.ranking_rules { match settings.ranking_rules {
Setting::Set(ref criteria) => builder.set_criteria(criteria.clone()), Setting::Set(ref criteria) => {
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
}
Setting::Reset => builder.reset_criteria(), Setting::Reset => builder.reset_criteria(),
Setting::NotSet => (), Setting::NotSet => (),
} }
@ -510,7 +426,7 @@ pub fn settings(
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect(); let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
let criteria = index.criteria(rtxn)?.into_iter().map(|c| c.to_string()).collect(); let criteria = index.criteria(rtxn)?;
let stop_words = index let stop_words = index
.stop_words(rtxn)? .stop_words(rtxn)?
@ -571,7 +487,7 @@ pub fn settings(
}, },
filterable_attributes: Setting::Set(filterable_attributes), filterable_attributes: Setting::Set(filterable_attributes),
sortable_attributes: Setting::Set(sortable_attributes), sortable_attributes: Setting::Set(sortable_attributes),
ranking_rules: Setting::Set(criteria), ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
stop_words: Setting::Set(stop_words), stop_words: Setting::Set(stop_words),
distinct_attribute: match distinct_field { distinct_attribute: match distinct_field {
Some(field) => Setting::Set(field), Some(field) => Setting::Set(field),
@ -585,16 +501,106 @@ pub fn settings(
}) })
} }
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
#[deserr(from(&String) = FromStr::from_str -> CriterionError)]
pub enum RankingRuleView {
/// Sorted by decreasing number of matched query terms.
/// Query words at the front of an attribute is considered better than if it was at the back.
Words,
/// Sorted by increasing number of typos.
Typo,
/// Sorted by increasing distance between matched query terms.
Proximity,
/// Documents with quey words contained in more important
/// attributes are considered better.
Attribute,
/// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable
/// attributes can be used in place of this criterion at query time.
Sort,
/// Sorted by the similarity of the matched words with the query words.
Exactness,
/// Sorted by the increasing value of the field specified.
Asc(String),
/// Sorted by the decreasing value of the field specified.
Desc(String),
}
impl Serialize for RankingRuleView {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&format!("{}", Criterion::from(self.clone())))
}
}
impl<'de> Deserialize<'de> for RankingRuleView {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = RankingRuleView;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "the name of a valid ranking rule (string)")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let criterion = Criterion::from_str(v).map_err(|_| {
E::invalid_value(serde::de::Unexpected::Str(v), &"a valid ranking rule")
})?;
Ok(RankingRuleView::from(criterion))
}
}
deserializer.deserialize_str(Visitor)
}
}
impl FromStr for RankingRuleView {
type Err = <Criterion as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(RankingRuleView::from(Criterion::from_str(s)?))
}
}
impl fmt::Display for RankingRuleView {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fmt::Display::fmt(&Criterion::from(self.clone()), f)
}
}
impl From<Criterion> for RankingRuleView {
fn from(value: Criterion) -> Self {
match value {
Criterion::Words => RankingRuleView::Words,
Criterion::Typo => RankingRuleView::Typo,
Criterion::Proximity => RankingRuleView::Proximity,
Criterion::Attribute => RankingRuleView::Attribute,
Criterion::Sort => RankingRuleView::Sort,
Criterion::Exactness => RankingRuleView::Exactness,
Criterion::Asc(x) => RankingRuleView::Asc(x),
Criterion::Desc(x) => RankingRuleView::Desc(x),
}
}
}
impl From<RankingRuleView> for Criterion {
fn from(value: RankingRuleView) -> Self {
match value {
RankingRuleView::Words => Criterion::Words,
RankingRuleView::Typo => Criterion::Typo,
RankingRuleView::Proximity => Criterion::Proximity,
RankingRuleView::Attribute => Criterion::Attribute,
RankingRuleView::Sort => Criterion::Sort,
RankingRuleView::Exactness => Criterion::Exactness,
RankingRuleView::Asc(x) => Criterion::Asc(x),
RankingRuleView::Desc(x) => Criterion::Desc(x),
}
}
}
#[cfg(test)] #[cfg(test)]
pub(crate) mod test { pub(crate) mod test {
use proptest::prelude::*;
use super::*; use super::*;
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
prop_oneof![Just(Setting::NotSet), Just(Setting::Reset), any::<T>().prop_map(Setting::Set)]
}
#[test] #[test]
fn test_setting_check() { fn test_setting_check() {
// test no changes // test no changes

View File

@ -3,9 +3,12 @@ use std::marker::PhantomData;
use std::ops::Deref; use std::ops::Deref;
use std::str::FromStr; use std::str::FromStr;
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
use serde::de::Visitor; use serde::de::Visitor;
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::error::unwrap_any;
/// A type that tries to match either a star (*) or /// A type that tries to match either a star (*) or
/// any other thing that implements `FromStr`. /// any other thing that implements `FromStr`.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -14,6 +17,35 @@ pub enum StarOr<T> {
Other(T), Other(T),
} }
impl<E: DeserializeError, T> DeserializeFromValue<E> for StarOr<T>
where
T: FromStr,
E: MergeWithError<T::Err>,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
match value {
deserr::Value::String(v) => match v.as_str() {
"*" => Ok(StarOr::Star),
v => match FromStr::from_str(v) {
Ok(x) => Ok(StarOr::Other(x)),
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
},
},
_ => Err(unwrap_any(E::error::<V>(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,
accepted: &[ValueKind::String],
},
location,
))),
}
}
}
impl<T: FromStr> FromStr for StarOr<T> { impl<T: FromStr> FromStr for StarOr<T> {
type Err = T::Err; type Err = T::Err;

View File

@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", "
bytes = "1.2.1" bytes = "1.2.1"
clap = { version = "4.0.9", features = ["derive", "env"] } clap = { version = "4.0.9", features = ["derive", "env"] }
crossbeam-channel = "0.5.6" crossbeam-channel = "0.5.6"
deserr = { version = "0.1.2", features = ["serde-json"] } deserr = { version = "0.1.4", features = ["serde-json"] }
dump = { path = "../dump" } dump = { path = "../dump" }
either = "1.8.0" either = "1.8.0"
env_logger = "0.9.1" env_logger = "0.9.1"

View File

@ -17,7 +17,7 @@ impl ErrorCode for AuthenticationError {
fn error_code(&self) -> Code { fn error_code(&self) -> Code {
match self { match self {
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader, AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
AuthenticationError::InvalidToken => Code::InvalidToken, AuthenticationError::InvalidToken => Code::InvalidApiKey,
AuthenticationError::IrretrievableState => Code::Internal, AuthenticationError::IrretrievableState => Code::Internal,
AuthenticationError::MissingMasterKey => Code::MissingMasterKey, AuthenticationError::MissingMasterKey => Code::MissingMasterKey,
} }

View File

@ -32,7 +32,7 @@ impl<T, E> ValidatedJson<T, E> {
impl<T, E> FromRequest for ValidatedJson<T, E> impl<T, E> FromRequest for ValidatedJson<T, E>
where where
E: DeserializeError + ErrorCode + 'static, E: DeserializeError + ErrorCode + std::error::Error + 'static,
T: DeserializeFromValue<E>, T: DeserializeFromValue<E>,
{ {
type Error = actix_web::Error; type Error = actix_web::Error;
@ -55,7 +55,7 @@ pub struct ValidatedJsonExtractFut<T, E> {
impl<T, E> Future for ValidatedJsonExtractFut<T, E> impl<T, E> Future for ValidatedJsonExtractFut<T, E>
where where
T: DeserializeFromValue<E>, T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + 'static, E: DeserializeError + ErrorCode + std::error::Error + 'static,
{ {
type Output = Result<ValidatedJson<T, E>, actix_web::Error>; type Output = Result<ValidatedJson<T, E>, actix_web::Error>;

View File

@ -22,7 +22,7 @@ impl<T, E> QueryParameter<T, E> {
impl<T, E> QueryParameter<T, E> impl<T, E> QueryParameter<T, E>
where where
T: DeserializeFromValue<E>, T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + 'static, E: DeserializeError + ErrorCode + std::error::Error + 'static,
{ {
pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> { pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> {
let value = serde_urlencoded::from_str::<serde_json::Value>(query_str) let value = serde_urlencoded::from_str::<serde_json::Value>(query_str)
@ -58,7 +58,7 @@ impl<T: fmt::Display, E> fmt::Display for QueryParameter<T, E> {
impl<T, E> FromRequest for QueryParameter<T, E> impl<T, E> FromRequest for QueryParameter<T, E>
where where
T: DeserializeFromValue<E>, T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + 'static, E: DeserializeError + ErrorCode + std::error::Error + 'static,
{ {
type Error = actix_web::Error; type Error = actix_web::Error;
type Future = Ready<Result<Self, actix_web::Error>>; type Future = Ready<Result<Self, actix_web::Error>>;

View File

@ -1,20 +1,21 @@
use std::convert::Infallible; use std::str;
use std::num::ParseIntError;
use std::{fmt, str};
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use deserr::{DeserializeError, IntoValue, MergeWithError, ValuePointerRef}; use deserr::DeserializeFromValue;
use meilisearch_auth::error::AuthControllerError; use meilisearch_auth::error::AuthControllerError;
use meilisearch_auth::AuthController; use meilisearch_auth::AuthController;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError}; use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::keys::{Action, Key}; use meilisearch_types::error::{Code, DeserrError, ResponseError, TakeErrorMessage};
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime; use time::OffsetDateTime;
use uuid::Uuid; use uuid::Uuid;
use super::indexes::search::parse_usize_take_error_message;
use super::PAGINATION_DEFAULT_LIMIT;
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData; use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter; use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler; use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::Pagination; use crate::routes::Pagination;
@ -35,7 +36,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
pub async fn create_api_key( pub async fn create_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>, auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
body: web::Json<Value>, body: ValidatedJson<CreateApiKey, DeserrError>,
_req: HttpRequest, _req: HttpRequest,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let v = body.into_inner(); let v = body.into_inner();
@ -49,72 +50,28 @@ pub async fn create_api_key(
Ok(HttpResponse::Created().json(res)) Ok(HttpResponse::Created().json(res))
} }
#[derive(Debug)] #[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
pub struct PaginationDeserrError { #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
error: String, #[serde(rename_all = "camelCase", deny_unknown_fields)]
code: Code, pub struct ListApiKeys {
#[serde(default)]
#[deserr(error = DeserrError<InvalidApiKeyOffset>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
pub offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
#[deserr(error = DeserrError<InvalidApiKeyLimit>, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
pub limit: usize,
} }
impl ListApiKeys {
impl std::fmt::Display for PaginationDeserrError { fn as_pagination(self) -> Pagination {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { Pagination { offset: self.offset, limit: self.limit }
write!(f, "{}", self.error)
}
}
impl std::error::Error for PaginationDeserrError {}
impl ErrorCode for PaginationDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<PaginationDeserrError> for PaginationDeserrError {
fn merge(
_self_: Option<Self>,
other: PaginationDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl DeserializeError for PaginationDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.last_field() {
Some("offset") => Code::InvalidApiKeyLimit,
Some("limit") => Code::InvalidApiKeyOffset,
_ => Code::BadRequest,
};
Err(PaginationDeserrError { error, code })
}
}
impl MergeWithError<ParseIntError> for PaginationDeserrError {
fn merge(
_self_: Option<Self>,
other: ParseIntError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
PaginationDeserrError::error::<Infallible>(
None,
deserr::ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
} }
} }
pub async fn list_api_keys( pub async fn list_api_keys(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>, auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
paginate: QueryParameter<Pagination, PaginationDeserrError>, list_api_keys: QueryParameter<ListApiKeys, DeserrError>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let paginate = paginate.into_inner(); let paginate = list_api_keys.into_inner().as_pagination();
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let keys = auth_controller.list_keys()?; let keys = auth_controller.list_keys()?;
let page_view = paginate let page_view = paginate
@ -149,15 +106,15 @@ pub async fn get_api_key(
pub async fn patch_api_key( pub async fn patch_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>, auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
body: web::Json<Value>, body: ValidatedJson<PatchApiKey, DeserrError>,
path: web::Path<AuthParam>, path: web::Path<AuthParam>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let key = path.into_inner().key; let key = path.into_inner().key;
let body = body.into_inner(); let patch_api_key = body.into_inner();
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let uid = let uid =
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?; Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
let key = auth_controller.update_key(uid, body)?; let key = auth_controller.update_key(uid, patch_api_key)?;
Ok(KeyView::from_key(key, &auth_controller)) Ok(KeyView::from_key(key, &auth_controller))
}) })

View File

@ -1,19 +1,17 @@
use std::convert::Infallible;
use std::fmt;
use std::io::ErrorKind; use std::io::ErrorKind;
use std::num::ParseIntError; use std::num::ParseIntError;
use std::str::FromStr;
use actix_web::http::header::CONTENT_TYPE; use actix_web::http::header::CONTENT_TYPE;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse}; use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
use bstr::ByteSlice; use bstr::ByteSlice;
use deserr::{DeserializeError, DeserializeFromValue, IntoValue, MergeWithError, ValuePointerRef}; use deserr::DeserializeFromValue;
use futures::StreamExt; use futures::StreamExt;
use index_scheduler::IndexScheduler; use index_scheduler::IndexScheduler;
use log::debug; use log::debug;
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType}; use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError}; use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
use meilisearch_types::heed::RoTxn; use meilisearch_types::heed::RoTxn;
use meilisearch_types::index_uid::IndexUid; use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::update::IndexDocumentsMethod; use meilisearch_types::milli::update::IndexDocumentsMethod;
@ -29,6 +27,7 @@ use tempfile::tempfile;
use tokio::fs::File; use tokio::fs::File;
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter}; use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
use super::search::parse_usize_take_error_message;
use crate::analytics::{Analytics, DocumentDeletionKind}; use crate::analytics::{Analytics, DocumentDeletionKind};
use crate::error::MeilisearchHttpError; use crate::error::MeilisearchHttpError;
use crate::error::PayloadError::ReceivePayload; use crate::error::PayloadError::ReceivePayload;
@ -83,61 +82,16 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
} }
#[derive(Deserialize, Debug, DeserializeFromValue)] #[derive(Deserialize, Debug, DeserializeFromValue)]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct GetDocument { pub struct GetDocument {
#[deserr(error = DeserrError<InvalidDocumentFields>)]
fields: Option<CS<StarOr<String>>>, fields: Option<CS<StarOr<String>>>,
} }
#[derive(Debug)]
pub struct GetDocumentDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for GetDocumentDeserrError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for GetDocumentDeserrError {}
impl ErrorCode for GetDocumentDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<GetDocumentDeserrError> for GetDocumentDeserrError {
fn merge(
_self_: Option<Self>,
other: GetDocumentDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl DeserializeError for GetDocumentDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.last_field() {
Some("fields") => Code::InvalidDocumentFields,
_ => Code::BadRequest,
};
Err(GetDocumentDeserrError { error, code })
}
}
pub async fn get_document( pub async fn get_document(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
path: web::Path<DocumentParam>, path: web::Path<DocumentParam>,
params: QueryParameter<GetDocument, GetDocumentDeserrError>, params: QueryParameter<GetDocument, DeserrError>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let GetDocument { fields } = params.into_inner(); let GetDocument { fields } = params.into_inner();
let attributes_to_retrieve = fields.and_then(fold_star_or); let attributes_to_retrieve = fields.and_then(fold_star_or);
@ -165,81 +119,20 @@ pub async fn delete_document(
} }
#[derive(Deserialize, Debug, DeserializeFromValue)] #[derive(Deserialize, Debug, DeserializeFromValue)]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct BrowseQuery { pub struct BrowseQuery {
#[deserr(default, from(&String) = FromStr::from_str -> ParseIntError)] #[deserr(error = DeserrError<InvalidDocumentFields>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<ParseIntError>)]
offset: usize, offset: usize,
#[deserr(default = crate::routes::PAGINATION_DEFAULT_LIMIT(), from(&String) = FromStr::from_str -> ParseIntError)] #[deserr(error = DeserrError<InvalidDocumentLimit>, default = crate::routes::PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<ParseIntError>)]
limit: usize, limit: usize,
#[deserr(error = DeserrError<InvalidDocumentLimit>)]
fields: Option<CS<StarOr<String>>>, fields: Option<CS<StarOr<String>>>,
} }
#[derive(Debug)]
pub struct BrowseQueryDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for BrowseQueryDeserrError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for BrowseQueryDeserrError {}
impl ErrorCode for BrowseQueryDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<BrowseQueryDeserrError> for BrowseQueryDeserrError {
fn merge(
_self_: Option<Self>,
other: BrowseQueryDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl DeserializeError for BrowseQueryDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.last_field() {
Some("fields") => Code::InvalidDocumentFields,
Some("offset") => Code::InvalidDocumentOffset,
Some("limit") => Code::InvalidDocumentLimit,
_ => Code::BadRequest,
};
Err(BrowseQueryDeserrError { error, code })
}
}
impl MergeWithError<ParseIntError> for BrowseQueryDeserrError {
fn merge(
_self_: Option<Self>,
other: ParseIntError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
BrowseQueryDeserrError::error::<Infallible>(
None,
deserr::ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
pub async fn get_all_documents( pub async fn get_all_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>, index_uid: web::Path<String>,
params: QueryParameter<BrowseQuery, BrowseQueryDeserrError>, params: QueryParameter<BrowseQuery, DeserrError>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params); debug!("called with params: {:?}", params);
let BrowseQuery { limit, offset, fields } = params.into_inner(); let BrowseQuery { limit, offset, fields } = params.into_inner();
@ -255,61 +148,16 @@ pub async fn get_all_documents(
} }
#[derive(Deserialize, Debug, DeserializeFromValue)] #[derive(Deserialize, Debug, DeserializeFromValue)]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct UpdateDocumentsQuery { pub struct UpdateDocumentsQuery {
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
pub primary_key: Option<String>, pub primary_key: Option<String>,
} }
#[derive(Debug)]
pub struct UpdateDocumentsQueryDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for UpdateDocumentsQueryDeserrError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for UpdateDocumentsQueryDeserrError {}
impl ErrorCode for UpdateDocumentsQueryDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<UpdateDocumentsQueryDeserrError> for UpdateDocumentsQueryDeserrError {
fn merge(
_self_: Option<Self>,
other: UpdateDocumentsQueryDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl DeserializeError for UpdateDocumentsQueryDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.last_field() {
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
_ => Code::BadRequest,
};
Err(UpdateDocumentsQueryDeserrError { error, code })
}
}
pub async fn add_documents( pub async fn add_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
index_uid: web::Path<String>, index_uid: web::Path<String>,
params: QueryParameter<UpdateDocumentsQuery, UpdateDocumentsQueryDeserrError>, params: QueryParameter<UpdateDocumentsQuery, DeserrError>,
body: Payload, body: Payload,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
@ -337,7 +185,7 @@ pub async fn add_documents(
pub async fn update_documents( pub async fn update_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
path: web::Path<String>, path: web::Path<String>,
params: QueryParameter<UpdateDocumentsQuery, UpdateDocumentsQueryDeserrError>, params: QueryParameter<UpdateDocumentsQuery, DeserrError>,
body: Payload, body: Payload,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,

View File

@ -1,14 +1,10 @@
use std::convert::Infallible;
use std::num::ParseIntError;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use deserr::{ use deserr::DeserializeFromValue;
DeserializeError, DeserializeFromValue, ErrorKind, IntoValue, MergeWithError, ValuePointerRef,
};
use index_scheduler::IndexScheduler; use index_scheduler::IndexScheduler;
use log::debug; use log::debug;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError}; use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
use meilisearch_types::index_uid::IndexUid; use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::{self, FieldDistribution, Index}; use meilisearch_types::milli::{self, FieldDistribution, Index};
use meilisearch_types::tasks::KindWithContent; use meilisearch_types::tasks::KindWithContent;
@ -16,7 +12,8 @@ use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use time::OffsetDateTime; use time::OffsetDateTime;
use super::{Pagination, SummarizedTaskView}; use self::search::parse_usize_take_error_message;
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData}; use crate::extractors::authentication::{AuthenticationError, GuardedData};
@ -72,9 +69,26 @@ impl IndexView {
} }
} }
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct ListIndexes {
#[serde(default)]
#[deserr(error = DeserrError<InvalidIndexOffset>, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
pub offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
#[deserr(error = DeserrError<InvalidIndexLimit>, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
pub limit: usize,
}
impl ListIndexes {
fn as_pagination(self) -> Pagination {
Pagination { offset: self.offset, limit: self.limit }
}
}
pub async fn list_indexes( pub async fn list_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
paginate: QueryParameter<Pagination, ListIndexesDeserrError>, paginate: QueryParameter<ListIndexes, DeserrError>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let search_rules = &index_scheduler.filters().search_rules; let search_rules = &index_scheduler.filters().search_rules;
let indexes: Vec<_> = index_scheduler.indexes()?; let indexes: Vec<_> = index_scheduler.indexes()?;
@ -84,82 +98,24 @@ pub async fn list_indexes(
.map(|(name, index)| IndexView::new(name, &index)) .map(|(name, index)| IndexView::new(name, &index))
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let ret = paginate.auto_paginate_sized(indexes.into_iter()); let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter());
debug!("returns: {:?}", ret); debug!("returns: {:?}", ret);
Ok(HttpResponse::Ok().json(ret)) Ok(HttpResponse::Ok().json(ret))
} }
#[derive(Debug)]
pub struct ListIndexesDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for ListIndexesDeserrError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for ListIndexesDeserrError {}
impl ErrorCode for ListIndexesDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<ListIndexesDeserrError> for ListIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: ListIndexesDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for ListIndexesDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let code = match location.last_field() {
Some("offset") => Code::InvalidIndexLimit,
Some("limit") => Code::InvalidIndexOffset,
_ => Code::BadRequest,
};
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
Err(ListIndexesDeserrError { error, code })
}
}
impl MergeWithError<ParseIntError> for ListIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: ParseIntError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
ListIndexesDeserrError::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
#[derive(DeserializeFromValue, Debug)] #[derive(DeserializeFromValue, Debug)]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct IndexCreateRequest { pub struct IndexCreateRequest {
#[deserr(error = DeserrError<InvalidIndexUid>, missing_field_error = DeserrError::missing_index_uid)]
uid: String, uid: String,
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
primary_key: Option<String>, primary_key: Option<String>,
} }
pub async fn create_index( pub async fn create_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
body: ValidatedJson<IndexCreateRequest, CreateIndexesDeserrError>, body: ValidatedJson<IndexCreateRequest, DeserrError>,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
@ -184,58 +140,10 @@ pub async fn create_index(
} }
} }
#[derive(Debug)]
pub struct CreateIndexesDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for CreateIndexesDeserrError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for CreateIndexesDeserrError {}
impl ErrorCode for CreateIndexesDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<CreateIndexesDeserrError> for CreateIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: CreateIndexesDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for CreateIndexesDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let code = match location.last_field() {
Some("uid") => Code::InvalidIndexUid,
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
None if matches!(error, ErrorKind::MissingField { field } if field == "uid") => {
Code::MissingIndexUid
}
_ => Code::BadRequest,
};
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
Err(CreateIndexesDeserrError { error, code })
}
}
#[derive(DeserializeFromValue, Debug)] #[derive(DeserializeFromValue, Debug)]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct UpdateIndexRequest { pub struct UpdateIndexRequest {
#[deserr(error = DeserrError<InvalidIndexPrimaryKey>)]
primary_key: Option<String>, primary_key: Option<String>,
} }
@ -254,7 +162,7 @@ pub async fn get_index(
pub async fn update_index( pub async fn update_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
path: web::Path<String>, path: web::Path<String>,
body: ValidatedJson<UpdateIndexRequest, UpdateIndexesDeserrError>, body: ValidatedJson<UpdateIndexRequest, DeserrError>,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
@ -278,51 +186,6 @@ pub async fn update_index(
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
#[derive(Debug)]
pub struct UpdateIndexesDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for UpdateIndexesDeserrError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for UpdateIndexesDeserrError {}
impl ErrorCode for UpdateIndexesDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<UpdateIndexesDeserrError> for UpdateIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: UpdateIndexesDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for UpdateIndexesDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let code = match location.last_field() {
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
_ => Code::BadRequest,
};
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
Err(UpdateIndexesDeserrError { error, code })
}
}
pub async fn delete_index( pub async fn delete_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>, index_uid: web::Path<String>,

View File

@ -5,7 +5,8 @@ use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler; use index_scheduler::IndexScheduler;
use log::debug; use log::debug;
use meilisearch_auth::IndexSearchRules; use meilisearch_auth::IndexSearchRules;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage};
use serde_cs::vec::CS; use serde_cs::vec::CS;
use serde_json::Value; use serde_json::Value;
@ -15,11 +16,11 @@ use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson; use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter; use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler; use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::from_string_to_option; use crate::routes::from_string_to_option_take_error_message;
use crate::search::{ use crate::search::{
perform_search, MatchingStrategy, SearchDeserError, SearchQuery, DEFAULT_CROP_LENGTH, perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, DEFAULT_SEARCH_OFFSET,
}; };
pub fn configure(cfg: &mut web::ServiceConfig) { pub fn configure(cfg: &mut web::ServiceConfig) {
@ -30,35 +31,54 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
); );
} }
pub fn parse_usize_take_error_message(
s: &str,
) -> Result<usize, TakeErrorMessage<std::num::ParseIntError>> {
usize::from_str(s).map_err(TakeErrorMessage)
}
pub fn parse_bool_take_error_message(
s: &str,
) -> Result<bool, TakeErrorMessage<std::str::ParseBoolError>> {
s.parse().map_err(TakeErrorMessage)
}
#[derive(Debug, deserr::DeserializeFromValue)] #[derive(Debug, deserr::DeserializeFromValue)]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQueryGet { pub struct SearchQueryGet {
#[deserr(error = DeserrError<InvalidSearchQ>)]
q: Option<String>, q: Option<String>,
#[deserr(default = DEFAULT_SEARCH_OFFSET(), from(&String) = FromStr::from_str -> std::num::ParseIntError)] #[deserr(error = DeserrError<InvalidSearchOffset>, default = DEFAULT_SEARCH_OFFSET(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
offset: usize, offset: usize,
#[deserr(default = DEFAULT_SEARCH_LIMIT(), from(&String) = FromStr::from_str -> std::num::ParseIntError)] #[deserr(error = DeserrError<InvalidSearchLimit>, default = DEFAULT_SEARCH_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
limit: usize, limit: usize,
#[deserr(from(&String) = from_string_to_option -> std::num::ParseIntError)] #[deserr(error = DeserrError<InvalidSearchPage>, from(&String) = from_string_to_option_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
page: Option<usize>, page: Option<usize>,
#[deserr(from(&String) = from_string_to_option -> std::num::ParseIntError)] #[deserr(error = DeserrError<InvalidSearchHitsPerPage>, from(&String) = from_string_to_option_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
hits_per_page: Option<usize>, hits_per_page: Option<usize>,
#[deserr(error = DeserrError<InvalidSearchAttributesToRetrieve>)]
attributes_to_retrieve: Option<CS<String>>, attributes_to_retrieve: Option<CS<String>>,
#[deserr(error = DeserrError<InvalidSearchAttributesToCrop>)]
attributes_to_crop: Option<CS<String>>, attributes_to_crop: Option<CS<String>>,
#[deserr(default = DEFAULT_CROP_LENGTH(), from(&String) = FromStr::from_str -> std::num::ParseIntError)] #[deserr(error = DeserrError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage<std::num::ParseIntError>)]
crop_length: usize, crop_length: usize,
#[deserr(error = DeserrError<InvalidSearchAttributesToHighlight>)]
attributes_to_highlight: Option<CS<String>>, attributes_to_highlight: Option<CS<String>>,
#[deserr(error = DeserrError<InvalidSearchFilter>)]
filter: Option<String>, filter: Option<String>,
#[deserr(error = DeserrError<InvalidSearchSort>)]
sort: Option<String>, sort: Option<String>,
#[deserr(default, from(&String) = FromStr::from_str -> std::str::ParseBoolError)] #[deserr(error = DeserrError<InvalidSearchShowMatchesPosition>, default, from(&String) = parse_bool_take_error_message -> TakeErrorMessage<std::str::ParseBoolError>)]
show_matches_position: bool, show_matches_position: bool,
#[deserr(error = DeserrError<InvalidSearchFacets>)]
facets: Option<CS<String>>, facets: Option<CS<String>>,
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG())] #[deserr(error = DeserrError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
highlight_pre_tag: String, highlight_pre_tag: String,
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG())] #[deserr(error = DeserrError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
highlight_post_tag: String, highlight_post_tag: String,
#[deserr(default = DEFAULT_CROP_MARKER())] #[deserr(error = DeserrError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
crop_marker: String, crop_marker: String,
#[deserr(default)] #[deserr(error = DeserrError<InvalidSearchMatchingStrategy>, default)]
matching_strategy: MatchingStrategy, matching_strategy: MatchingStrategy,
} }
@ -142,7 +162,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
pub async fn search_with_url_query( pub async fn search_with_url_query(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>, index_uid: web::Path<String>,
params: QueryParameter<SearchQueryGet, SearchDeserError>, params: QueryParameter<SearchQueryGet, DeserrError>,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
@ -174,7 +194,7 @@ pub async fn search_with_url_query(
pub async fn search_with_post( pub async fn search_with_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>, index_uid: web::Path<String>,
params: ValidatedJson<SearchQuery, SearchDeserError>, params: ValidatedJson<SearchQuery, DeserrError>,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {

View File

@ -1,13 +1,10 @@
use std::fmt;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use deserr::{IntoValue, ValuePointerRef};
use index_scheduler::IndexScheduler; use index_scheduler::IndexScheduler;
use log::debug; use log::debug;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError}; use meilisearch_types::error::{DeserrError, ResponseError};
use meilisearch_types::index_uid::IndexUid; use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::settings::{settings, Settings, Unchecked}; use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
use meilisearch_types::tasks::KindWithContent; use meilisearch_types::tasks::KindWithContent;
use serde_json::json; use serde_json::json;
@ -19,7 +16,7 @@ use crate::routes::SummarizedTaskView;
#[macro_export] #[macro_export]
macro_rules! make_setting_route { macro_rules! make_setting_route {
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => { ($route:literal, $update_verb:ident, $type:ty, $err_ty:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
pub mod $attr { pub mod $attr {
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse, Resource}; use actix_web::{web, HttpRequest, HttpResponse, Resource};
@ -68,7 +65,7 @@ macro_rules! make_setting_route {
Data<IndexScheduler>, Data<IndexScheduler>,
>, >,
index_uid: actix_web::web::Path<String>, index_uid: actix_web::web::Path<String>,
body: actix_web::web::Json<Option<$type>>, body: $crate::routes::indexes::ValidatedJson<Option<$type>, $err_ty>,
req: HttpRequest, req: HttpRequest,
$analytics_var: web::Data<dyn Analytics>, $analytics_var: web::Data<dyn Analytics>,
) -> std::result::Result<HttpResponse, ResponseError> { ) -> std::result::Result<HttpResponse, ResponseError> {
@ -133,6 +130,9 @@ make_setting_route!(
"/filterable-attributes", "/filterable-attributes",
put, put,
std::collections::BTreeSet<String>, std::collections::BTreeSet<String>,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsFilterableAttributes,
>,
filterable_attributes, filterable_attributes,
"filterableAttributes", "filterableAttributes",
analytics, analytics,
@ -156,6 +156,9 @@ make_setting_route!(
"/sortable-attributes", "/sortable-attributes",
put, put,
std::collections::BTreeSet<String>, std::collections::BTreeSet<String>,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsSortableAttributes,
>,
sortable_attributes, sortable_attributes,
"sortableAttributes", "sortableAttributes",
analytics, analytics,
@ -179,6 +182,9 @@ make_setting_route!(
"/displayed-attributes", "/displayed-attributes",
put, put,
Vec<String>, Vec<String>,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsDisplayedAttributes,
>,
displayed_attributes, displayed_attributes,
"displayedAttributes", "displayedAttributes",
analytics, analytics,
@ -202,6 +208,9 @@ make_setting_route!(
"/typo-tolerance", "/typo-tolerance",
patch, patch,
meilisearch_types::settings::TypoSettings, meilisearch_types::settings::TypoSettings,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsTypoTolerance,
>,
typo_tolerance, typo_tolerance,
"typoTolerance", "typoTolerance",
analytics, analytics,
@ -212,7 +221,7 @@ make_setting_route!(
"TypoTolerance Updated".to_string(), "TypoTolerance Updated".to_string(),
json!({ json!({
"typo_tolerance": { "typo_tolerance": {
"enabled": setting.as_ref().map(|s| !matches!(s.enabled.into(), Setting::Set(false))), "enabled": setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
"disable_on_attributes": setting "disable_on_attributes": setting
.as_ref() .as_ref()
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())), .and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
@ -244,6 +253,9 @@ make_setting_route!(
"/searchable-attributes", "/searchable-attributes",
put, put,
Vec<String>, Vec<String>,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsSearchableAttributes,
>,
searchable_attributes, searchable_attributes,
"searchableAttributes", "searchableAttributes",
analytics, analytics,
@ -267,6 +279,9 @@ make_setting_route!(
"/stop-words", "/stop-words",
put, put,
std::collections::BTreeSet<String>, std::collections::BTreeSet<String>,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsStopWords,
>,
stop_words, stop_words,
"stopWords", "stopWords",
analytics, analytics,
@ -289,6 +304,9 @@ make_setting_route!(
"/synonyms", "/synonyms",
put, put,
std::collections::BTreeMap<String, Vec<String>>, std::collections::BTreeMap<String, Vec<String>>,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsSynonyms,
>,
synonyms, synonyms,
"synonyms", "synonyms",
analytics, analytics,
@ -311,6 +329,9 @@ make_setting_route!(
"/distinct-attribute", "/distinct-attribute",
put, put,
String, String,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsDistinctAttribute,
>,
distinct_attribute, distinct_attribute,
"distinctAttribute", "distinctAttribute",
analytics, analytics,
@ -331,24 +352,27 @@ make_setting_route!(
make_setting_route!( make_setting_route!(
"/ranking-rules", "/ranking-rules",
put, put,
Vec<String>, Vec<meilisearch_types::settings::RankingRuleView>,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsRankingRules,
>,
ranking_rules, ranking_rules,
"rankingRules", "rankingRules",
analytics, analytics,
|setting: &Option<Vec<String>>, req: &HttpRequest| { |setting: &Option<Vec<meilisearch_types::settings::RankingRuleView>>, req: &HttpRequest| {
use serde_json::json; use serde_json::json;
analytics.publish( analytics.publish(
"RankingRules Updated".to_string(), "RankingRules Updated".to_string(),
json!({ json!({
"ranking_rules": { "ranking_rules": {
"words_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "words")), "words_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Words))),
"typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "typo")), "typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Typo))),
"proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "proximity")), "proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Proximity))),
"attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "attribute")), "attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Attribute))),
"sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "sort")), "sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Sort))),
"exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "exactness")), "exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Exactness))),
"values": setting.as_ref().map(|rr| rr.iter().filter(|s| !s.contains(':')).cloned().collect::<Vec<_>>().join(", ")), "values": setting.as_ref().map(|rr| rr.iter().filter(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Asc(_) | meilisearch_types::settings::RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
} }
}), }),
Some(req), Some(req),
@ -360,6 +384,9 @@ make_setting_route!(
"/faceting", "/faceting",
patch, patch,
meilisearch_types::settings::FacetingSettings, meilisearch_types::settings::FacetingSettings,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsFaceting,
>,
faceting, faceting,
"faceting", "faceting",
analytics, analytics,
@ -382,6 +409,9 @@ make_setting_route!(
"/pagination", "/pagination",
patch, patch,
meilisearch_types::settings::PaginationSettings, meilisearch_types::settings::PaginationSettings,
meilisearch_types::error::DeserrError<
meilisearch_types::error::deserr_codes::InvalidSettingsPagination,
>,
pagination, pagination,
"pagination", "pagination",
analytics, analytics,
@ -428,66 +458,10 @@ generate_configure!(
faceting faceting
); );
#[derive(Debug)]
pub struct SettingsDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for SettingsDeserrError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for SettingsDeserrError {}
impl ErrorCode for SettingsDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl deserr::MergeWithError<SettingsDeserrError> for SettingsDeserrError {
fn merge(
_self_: Option<Self>,
other: SettingsDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for SettingsDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.first_field() {
Some("displayedAttributes") => Code::InvalidSettingsDisplayedAttributes,
Some("searchableAttributes") => Code::InvalidSettingsSearchableAttributes,
Some("filterableAttributes") => Code::InvalidSettingsFilterableAttributes,
Some("sortableAttributes") => Code::InvalidSettingsSortableAttributes,
Some("rankingRules") => Code::InvalidSettingsRankingRules,
Some("stopWords") => Code::InvalidSettingsStopWords,
Some("synonyms") => Code::InvalidSettingsSynonyms,
Some("distinctAttribute") => Code::InvalidSettingsDistinctAttribute,
Some("typoTolerance") => Code::InvalidSettingsTypoTolerance,
Some("faceting") => Code::InvalidSettingsFaceting,
Some("pagination") => Code::InvalidSettingsPagination,
_ => Code::BadRequest,
};
Err(SettingsDeserrError { error, code })
}
}
pub async fn update_all( pub async fn update_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>, index_uid: web::Path<String>,
body: ValidatedJson<Settings<Unchecked>, SettingsDeserrError>, body: ValidatedJson<Settings<Unchecked>, DeserrError>,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
@ -497,13 +471,13 @@ pub async fn update_all(
"Settings Updated".to_string(), "Settings Updated".to_string(),
json!({ json!({
"ranking_rules": { "ranking_rules": {
"words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "words")), "words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Words))),
"typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "typo")), "typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Typo))),
"proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "proximity")), "proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Proximity))),
"attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "attribute")), "attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Attribute))),
"sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "sort")), "sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Sort))),
"exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "exactness")), "exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Exactness))),
"values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !s.contains(':')).cloned().collect::<Vec<_>>().join(", ")), "values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !matches!(s, RankingRuleView::Asc(_) | RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
}, },
"searchable_attributes": { "searchable_attributes": {
"total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()), "total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),

View File

@ -3,10 +3,9 @@ use std::str::FromStr;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use deserr::DeserializeFromValue;
use index_scheduler::{IndexScheduler, Query}; use index_scheduler::{IndexScheduler, Query};
use log::debug; use log::debug;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::{ResponseError, TakeErrorMessage};
use meilisearch_types::settings::{Settings, Unchecked}; use meilisearch_types::settings::{Settings, Unchecked};
use meilisearch_types::star_or::StarOr; use meilisearch_types::star_or::StarOr;
use meilisearch_types::tasks::{Kind, Status, Task, TaskId}; use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
@ -57,6 +56,14 @@ where
{ {
Ok(Some(input.parse()?)) Ok(Some(input.parse()?))
} }
pub fn from_string_to_option_take_error_message<T, E>(
input: &str,
) -> Result<Option<T>, TakeErrorMessage<E>>
where
T: FromStr<Err = E>,
{
Ok(Some(input.parse().map_err(TakeErrorMessage)?))
}
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20; const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
@ -83,16 +90,8 @@ impl From<Task> for SummarizedTaskView {
} }
} }
} }
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Pagination { pub struct Pagination {
#[serde(default)]
#[deserr(default, from(&String) = FromStr::from_str -> std::num::ParseIntError)]
pub offset: usize, pub offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
#[deserr(default = PAGINATION_DEFAULT_LIMIT(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
pub limit: usize, pub limit: usize,
} }

View File

@ -1,10 +1,9 @@
use std::fmt;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use deserr::{DeserializeFromValue, IntoValue, ValuePointerRef}; use deserr::DeserializeFromValue;
use index_scheduler::IndexScheduler; use index_scheduler::IndexScheduler;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError}; use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
use meilisearch_types::error::{DeserrError, ResponseError};
use meilisearch_types::tasks::{IndexSwap, KindWithContent}; use meilisearch_types::tasks::{IndexSwap, KindWithContent};
use serde_json::json; use serde_json::json;
@ -21,14 +20,15 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
} }
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)] #[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct SwapIndexesPayload { pub struct SwapIndexesPayload {
#[deserr(error = DeserrError<InvalidSwapIndexes>)]
indexes: Vec<String>, indexes: Vec<String>,
} }
pub async fn swap_indexes( pub async fn swap_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
params: ValidatedJson<Vec<SwapIndexesPayload>, SwapIndexesDeserrError>, params: ValidatedJson<Vec<SwapIndexesPayload>, DeserrError>,
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
@ -62,49 +62,3 @@ pub async fn swap_indexes(
let task: SummarizedTaskView = task.into(); let task: SummarizedTaskView = task.into();
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
#[derive(Debug)]
pub struct SwapIndexesDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for SwapIndexesDeserrError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for SwapIndexesDeserrError {}
impl ErrorCode for SwapIndexesDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl deserr::MergeWithError<SwapIndexesDeserrError> for SwapIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: SwapIndexesDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for SwapIndexesDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.last_field() {
Some("indexes") => Code::InvalidSwapIndexes,
_ => Code::BadRequest,
};
Err(SwapIndexesDeserrError { error, code })
}
}

View File

@ -1,16 +1,12 @@
use std::cmp::min; use std::cmp::min;
use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::convert::Infallible; use std::str::FromStr;
use std::fmt;
use std::num::ParseIntError;
use std::str::{FromStr, ParseBoolError};
use std::time::Instant; use std::time::Instant;
use deserr::{ use deserr::DeserializeFromValue;
DeserializeError, DeserializeFromValue, ErrorKind, IntoValue, MergeWithError, ValuePointerRef,
};
use either::Either; use either::Either;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode}; use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::DeserrError;
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
use meilisearch_types::{milli, Document}; use meilisearch_types::{milli, Document};
use milli::tokenizer::TokenizerBuilder; use milli::tokenizer::TokenizerBuilder;
@ -34,32 +30,41 @@ pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string(); pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
#[derive(Debug, Clone, Default, PartialEq, DeserializeFromValue)] #[derive(Debug, Clone, Default, PartialEq, DeserializeFromValue)]
#[deserr(rename_all = camelCase, deny_unknown_fields)] #[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQuery { pub struct SearchQuery {
#[deserr(error = DeserrError<InvalidSearchQ>)]
pub q: Option<String>, pub q: Option<String>,
#[deserr(default = DEFAULT_SEARCH_OFFSET())] #[deserr(error = DeserrError<InvalidSearchOffset>, default = DEFAULT_SEARCH_OFFSET())]
pub offset: usize, pub offset: usize,
#[deserr(default = DEFAULT_SEARCH_LIMIT())] #[deserr(error = DeserrError<InvalidSearchLimit>, default = DEFAULT_SEARCH_LIMIT())]
pub limit: usize, pub limit: usize,
#[deserr(error = DeserrError<InvalidSearchPage>)]
pub page: Option<usize>, pub page: Option<usize>,
#[deserr(error = DeserrError<InvalidSearchHitsPerPage>)]
pub hits_per_page: Option<usize>, pub hits_per_page: Option<usize>,
#[deserr(error = DeserrError<InvalidSearchAttributesToRetrieve>)]
pub attributes_to_retrieve: Option<BTreeSet<String>>, pub attributes_to_retrieve: Option<BTreeSet<String>>,
#[deserr(error = DeserrError<InvalidSearchAttributesToCrop>)]
pub attributes_to_crop: Option<Vec<String>>, pub attributes_to_crop: Option<Vec<String>>,
#[deserr(default = DEFAULT_CROP_LENGTH())] #[deserr(error = DeserrError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
pub crop_length: usize, pub crop_length: usize,
#[deserr(error = DeserrError<InvalidSearchAttributesToHighlight>)]
pub attributes_to_highlight: Option<HashSet<String>>, pub attributes_to_highlight: Option<HashSet<String>>,
#[deserr(default)] #[deserr(error = DeserrError<InvalidSearchShowMatchesPosition>, default)]
pub show_matches_position: bool, pub show_matches_position: bool,
#[deserr(error = DeserrError<InvalidSearchFilter>)]
pub filter: Option<Value>, pub filter: Option<Value>,
#[deserr(error = DeserrError<InvalidSearchSort>)]
pub sort: Option<Vec<String>>, pub sort: Option<Vec<String>>,
#[deserr(error = DeserrError<InvalidSearchFacets>)]
pub facets: Option<Vec<String>>, pub facets: Option<Vec<String>>,
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG())] #[deserr(error = DeserrError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
pub highlight_pre_tag: String, pub highlight_pre_tag: String,
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG())] #[deserr(error = DeserrError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
pub highlight_post_tag: String, pub highlight_post_tag: String,
#[deserr(default = DEFAULT_CROP_MARKER())] #[deserr(error = DeserrError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
pub crop_marker: String, pub crop_marker: String,
#[deserr(default)] #[deserr(error = DeserrError<InvalidSearchMatchingStrategy>, default)]
pub matching_strategy: MatchingStrategy, pub matching_strategy: MatchingStrategy,
} }
@ -94,96 +99,6 @@ impl From<MatchingStrategy> for TermsMatchingStrategy {
} }
} }
#[derive(Debug)]
pub struct SearchDeserError {
error: String,
code: Code,
}
impl std::fmt::Display for SearchDeserError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for SearchDeserError {}
impl ErrorCode for SearchDeserError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<SearchDeserError> for SearchDeserError {
fn merge(
_self_: Option<Self>,
other: SearchDeserError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl DeserializeError for SearchDeserError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.last_field() {
Some("q") => Code::InvalidSearchQ,
Some("offset") => Code::InvalidSearchOffset,
Some("limit") => Code::InvalidSearchLimit,
Some("page") => Code::InvalidSearchPage,
Some("hitsPerPage") => Code::InvalidSearchHitsPerPage,
Some("attributesToRetrieve") => Code::InvalidSearchAttributesToRetrieve,
Some("attributesToCrop") => Code::InvalidSearchAttributesToCrop,
Some("cropLength") => Code::InvalidSearchCropLength,
Some("attributesToHighlight") => Code::InvalidSearchAttributesToHighlight,
Some("showMatchesPosition") => Code::InvalidSearchShowMatchesPosition,
Some("filter") => Code::InvalidSearchFilter,
Some("sort") => Code::InvalidSearchSort,
Some("facets") => Code::InvalidSearchFacets,
Some("highlightPreTag") => Code::InvalidSearchHighlightPreTag,
Some("highlightPostTag") => Code::InvalidSearchHighlightPostTag,
Some("cropMarker") => Code::InvalidSearchCropMarker,
Some("matchingStrategy") => Code::InvalidSearchMatchingStrategy,
_ => Code::BadRequest,
};
Err(SearchDeserError { error, code })
}
}
impl MergeWithError<ParseBoolError> for SearchDeserError {
fn merge(
_self_: Option<Self>,
other: ParseBoolError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
SearchDeserError::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
impl MergeWithError<ParseIntError> for SearchDeserError {
fn merge(
_self_: Option<Self>,
other: ParseIntError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
SearchDeserError::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
#[derive(Debug, Clone, Serialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, PartialEq, Eq)]
pub struct SearchHit { pub struct SearchHit {
#[serde(flatten)] #[serde(flatten)]

File diff suppressed because it is too large Load Diff

View File

@ -197,6 +197,76 @@ impl Index<'_> {
self.service.patch_encoded(url, settings, self.encoder).await self.service.patch_encoded(url, settings, self.encoder).await
} }
pub async fn update_settings_displayed_attributes(
&self,
settings: Value,
) -> (Value, StatusCode) {
let url =
format!("/indexes/{}/settings/displayed-attributes", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_searchable_attributes(
&self,
settings: Value,
) -> (Value, StatusCode) {
let url =
format!("/indexes/{}/settings/searchable-attributes", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_filterable_attributes(
&self,
settings: Value,
) -> (Value, StatusCode) {
let url =
format!("/indexes/{}/settings/filterable-attributes", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_sortable_attributes(
&self,
settings: Value,
) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/sortable-attributes", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_ranking_rules(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/ranking-rules", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_stop_words(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/stop-words", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_synonyms(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/synonyms", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_distinct_attribute(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/distinct-attribute", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_typo_tolerance(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/typo-tolerance", urlencode(self.uid.as_ref()));
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_faceting(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/faceting", urlencode(self.uid.as_ref()));
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_pagination(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/pagination", urlencode(self.uid.as_ref()));
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn delete_settings(&self) -> (Value, StatusCode) { pub async fn delete_settings(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref())); let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
self.service.delete(url).await self.service.delete(url).await

View File

@ -926,7 +926,7 @@ async fn error_primary_key_inference() {
"indexedDocuments": 1 "indexedDocuments": 1
}, },
"error": { "error": {
"message": "The primary key inference process failed because the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", "message": "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.",
"code": "index_primary_key_no_candidate_found", "code": "index_primary_key_no_candidate_found",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-primary-key-no-candidate-found" "link": "https://docs.meilisearch.com/errors#index-primary-key-no-candidate-found"
@ -966,7 +966,7 @@ async fn error_primary_key_inference() {
"indexedDocuments": 1 "indexedDocuments": 1
}, },
"error": { "error": {
"message": "The primary key inference process failed because the engine found 3 fields ending with `id` in their name, such as 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.", "message": "The primary key inference failed as the engine found 3 fields ending with `id` in their names: 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.",
"code": "index_primary_key_multiple_candidates_found", "code": "index_primary_key_multiple_candidates_found",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-primary-key-multiple-candidates-found" "link": "https://docs.meilisearch.com/errors#index-primary-key-multiple-candidates-found"

View File

@ -384,7 +384,7 @@ async fn search_bad_matching_strategy() {
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
"message": "Incorrect tag value at `.matchingStrategy`.", "message": "Json deserialize error: unknown value `doggo`, expected one of `last`, `all` at `.matchingStrategy`.",
"code": "invalid_search_matching_strategy", "code": "invalid_search_matching_strategy",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy" "link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy"
@ -395,7 +395,7 @@ async fn search_bad_matching_strategy() {
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
"message": "Incorrect tag value at `.matchingStrategy`.", "message": "Json deserialize error: unknown value `doggo`, expected one of `last`, `all` at `.matchingStrategy`.",
"code": "invalid_search_matching_strategy", "code": "invalid_search_matching_strategy",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy" "link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy"

View File

@ -18,6 +18,17 @@ async fn settings_bad_displayed_attributes() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes" "link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes"
} }
"###); "###);
let (response, code) = index.update_settings_displayed_attributes(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
"code": "invalid_settings_displayed_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -35,6 +46,17 @@ async fn settings_bad_searchable_attributes() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes" "link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes"
} }
"###); "###);
let (response, code) = index.update_settings_searchable_attributes(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
"code": "invalid_settings_searchable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -52,6 +74,17 @@ async fn settings_bad_filterable_attributes() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes" "link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes"
} }
"###); "###);
let (response, code) = index.update_settings_filterable_attributes(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
"code": "invalid_settings_filterable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -69,6 +102,17 @@ async fn settings_bad_sortable_attributes() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes" "link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes"
} }
"###); "###);
let (response, code) = index.update_settings_sortable_attributes(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
"code": "invalid_settings_sortable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -86,6 +130,17 @@ async fn settings_bad_ranking_rules() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
} }
"###); "###);
let (response, code) = index.update_settings_ranking_rules(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
"code": "invalid_settings_ranking_rules",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -103,6 +158,17 @@ async fn settings_bad_stop_words() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words" "link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words"
} }
"###); "###);
let (response, code) = index.update_settings_stop_words(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.",
"code": "invalid_settings_stop_words",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -120,6 +186,17 @@ async fn settings_bad_synonyms() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms" "link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms"
} }
"###); "###);
let (response, code) = index.update_settings_synonyms(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Map at ``.",
"code": "invalid_settings_synonyms",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -137,6 +214,17 @@ async fn settings_bad_distinct_attribute() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute" "link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute"
} }
"###); "###);
let (response, code) = index.update_settings_distinct_attribute(json!(["doggo"])).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: Sequence `[\"doggo\"]`, expected a String at ``.",
"code": "invalid_settings_distinct_attribute",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -154,6 +242,17 @@ async fn settings_bad_typo_tolerance() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance"
} }
"###); "###);
let (response, code) = index.update_settings_typo_tolerance(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Map at ``.",
"code": "invalid_settings_typo_tolerance",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -171,6 +270,17 @@ async fn settings_bad_faceting() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-faceting" "link": "https://docs.meilisearch.com/errors#invalid-settings-faceting"
} }
"###); "###);
let (response, code) = index.update_settings_faceting(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Map at ``.",
"code": "invalid_settings_faceting",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-faceting"
}
"###);
} }
#[actix_rt::test] #[actix_rt::test]
@ -188,4 +298,15 @@ async fn settings_bad_pagination() {
"link": "https://docs.meilisearch.com/errors#invalid-settings-pagination" "link": "https://docs.meilisearch.com/errors#invalid-settings-pagination"
} }
"###); "###);
let (response, code) = index.update_settings_pagination(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "invalid type: String `\"doggo\"`, expected a Map at ``.",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-pagination"
}
"###);
} }

View File

@ -179,15 +179,15 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test##! "); let index = server.index("test##! ");
let (response, code) = index.update_settings(json!({})).await; let (response, code) = index.update_settings(json!({})).await;
assert_eq!(code, 400); meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
let expected = json!({ {
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid", "code": "invalid_index_uid",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"}); "link": "https://docs.meilisearch.com/errors#invalid-index-uid"
}
assert_eq!(response, expected); "###);
} }
macro_rules! test_setting_routes { macro_rules! test_setting_routes {
@ -278,22 +278,16 @@ async fn error_set_invalid_ranking_rules() {
let index = server.index("test"); let index = server.index("test");
index.create(None).await; index.create(None).await;
let (_response, _code) = let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await; meili_snap::snapshot!(code, @"400 Bad Request");
index.wait_task(1).await; meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
let (response, code) = index.get_task(1).await; {
"message": "`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules. at `.rankingRules[0]`.",
assert_eq!(code, 200); "code": "invalid_settings_ranking_rules",
assert_eq!(response["status"], "failed"); "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
let expected_error = json!({ }
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules."#, "###);
"code": "invalid_settings_ranking_rules",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
});
assert_eq!(response["error"], expected_error);
} }
#[actix_rt::test] #[actix_rt::test]

View File

@ -1,4 +1,4 @@
use meili_snap::insta::{self, assert_json_snapshot}; use meili_snap::insta::assert_json_snapshot;
use serde_json::json; use serde_json::json;
use time::format_description::well_known::Rfc3339; use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime; use time::OffsetDateTime;
@ -179,7 +179,7 @@ async fn get_task_filter_error() {
let (response, code) = server.tasks_filter(json!( { "lol": "pied" })).await; let (response, code) = server.tasks_filter(json!( { "lol": "pied" })).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Query deserialize error: unknown field `lol`", "message": "Query deserialize error: unknown field `lol`",
"code": "bad_request", "code": "bad_request",
@ -190,7 +190,7 @@ async fn get_task_filter_error() {
let (response, code) = server.tasks_filter(json!( { "uids": "pied" })).await; let (response, code) = server.tasks_filter(json!( { "uids": "pied" })).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Task uid `pied` is invalid. It should only contain numeric characters.", "message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids", "code": "invalid_task_uids",
@ -201,7 +201,7 @@ async fn get_task_filter_error() {
let (response, code) = server.tasks_filter(json!( { "from": "pied" })).await; let (response, code) = server.tasks_filter(json!( { "from": "pied" })).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Query deserialize error: invalid digit found in string", "message": "Query deserialize error: invalid digit found in string",
"code": "bad_request", "code": "bad_request",
@ -212,7 +212,7 @@ async fn get_task_filter_error() {
let (response, code) = server.tasks_filter(json!( { "beforeStartedAt": "pied" })).await; let (response, code) = server.tasks_filter(json!( { "beforeStartedAt": "pied" })).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Task `beforeStartedAt` `pied` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "message": "Task `beforeStartedAt` `pied` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at", "code": "invalid_task_before_started_at",
@ -228,7 +228,7 @@ async fn delete_task_filter_error() {
let (response, code) = server.delete_tasks(json!(null)).await; let (response, code) = server.delete_tasks(json!(null)).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", "message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
"code": "missing_task_filters", "code": "missing_task_filters",
@ -239,7 +239,7 @@ async fn delete_task_filter_error() {
let (response, code) = server.delete_tasks(json!({ "lol": "pied" })).await; let (response, code) = server.delete_tasks(json!({ "lol": "pied" })).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Query deserialize error: unknown field `lol`", "message": "Query deserialize error: unknown field `lol`",
"code": "bad_request", "code": "bad_request",
@ -250,7 +250,7 @@ async fn delete_task_filter_error() {
let (response, code) = server.delete_tasks(json!({ "uids": "pied" })).await; let (response, code) = server.delete_tasks(json!({ "uids": "pied" })).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Task uid `pied` is invalid. It should only contain numeric characters.", "message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids", "code": "invalid_task_uids",
@ -266,7 +266,7 @@ async fn cancel_task_filter_error() {
let (response, code) = server.cancel_tasks(json!(null)).await; let (response, code) = server.cancel_tasks(json!(null)).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", "message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
"code": "missing_task_filters", "code": "missing_task_filters",
@ -277,7 +277,7 @@ async fn cancel_task_filter_error() {
let (response, code) = server.cancel_tasks(json!({ "lol": "pied" })).await; let (response, code) = server.cancel_tasks(json!({ "lol": "pied" })).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Query deserialize error: unknown field `lol`", "message": "Query deserialize error: unknown field `lol`",
"code": "bad_request", "code": "bad_request",
@ -288,7 +288,7 @@ async fn cancel_task_filter_error() {
let (response, code) = server.cancel_tasks(json!({ "uids": "pied" })).await; let (response, code) = server.cancel_tasks(json!({ "uids": "pied" })).await;
assert_eq!(code, 400, "{}", response); assert_eq!(code, 400, "{}", response);
insta::assert_json_snapshot!(response, @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Task uid `pied` is invalid. It should only contain numeric characters.", "message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids", "code": "invalid_task_uids",
@ -517,46 +517,26 @@ async fn test_summarized_settings_update() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test"); let index = server.index("test");
// here we should find my payload even in the failed task. // here we should find my payload even in the failed task.
index.update_settings(json!({ "rankingRules": ["custom"] })).await; let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await;
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "`custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules. at `.rankingRules[0]`.",
"code": "invalid_settings_ranking_rules",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
}
"###);
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.wait_task(0).await; index.wait_task(0).await;
let (task, _) = index.get_task(0).await; let (task, _) = index.get_task(0).await;
dbg!(&task);
assert_json_snapshot!(task, assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###" @r###"
{ {
"uid": 0, "uid": 0,
"indexUid": "test", "indexUid": "test",
"status": "failed",
"type": "settingsUpdate",
"canceledBy": null,
"details": {
"rankingRules": [
"custom"
]
},
"error": {
"message": "`custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
"code": "invalid_settings_ranking_rules",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"###);
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": 1,
"indexUid": "test",
"status": "succeeded", "status": "succeeded",
"type": "settingsUpdate", "type": "settingsUpdate",
"canceledBy": null, "canceledBy": null,