From 1cce613399805c4e8677c52ce3addafbcae51670 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 6 Dec 2022 16:01:09 +0100 Subject: [PATCH 001/186] Fixup dumps-destination -> dump-directory section header in help link --- config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.toml b/config.toml index 6b25b2cff..903ce1446 100644 --- a/config.toml +++ b/config.toml @@ -45,7 +45,7 @@ log_level = "INFO" dump_dir = "dumps/" # Sets the directory where Meilisearch will create dump files. -# https://docs.meilisearch.com/learn/configuration/instance_options.html#dumps-destination +# https://docs.meilisearch.com/learn/configuration/instance_options.html#dump-directory # import_dump = "./path/to/my/file.dump" # Imports the dump file located at the specified path. Path must point to a .dump file. From 436ae4e46649ec1b56f0708e1fb6394b5828c68b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Thu, 12 Jan 2023 13:55:53 +0100 Subject: [PATCH 002/186] Improve error messages generated by deserr Split Json and Query Parameter error types --- Cargo.lock | 2 +- meilisearch-types/Cargo.toml | 1 + meilisearch-types/src/error.rs | 431 +++++++++++++++++++- meilisearch-types/src/keys.rs | 30 +- meilisearch-types/src/lib.rs | 2 + meilisearch-types/src/settings.rs | 34 +- meilisearch-types/src/tasks.rs | 4 +- meilisearch/Cargo.toml | 1 - meilisearch/src/routes/api_key.rs | 16 +- meilisearch/src/routes/indexes/documents.rs | 33 +- meilisearch/src/routes/indexes/mod.rs | 30 +- meilisearch/src/routes/indexes/search.rs | 50 +-- meilisearch/src/routes/indexes/settings.rs | 26 +- meilisearch/src/routes/mod.rs | 26 +- meilisearch/src/routes/swap_indexes.rs | 9 +- meilisearch/src/routes/tasks.rs | 146 +++---- meilisearch/src/search.rs | 40 +- meilisearch/tests/auth/api_keys.rs | 32 +- meilisearch/tests/common/index.rs | 9 +- meilisearch/tests/common/server.rs | 14 +- meilisearch/tests/search/errors.rs | 58 +-- meilisearch/tests/settings/errors.rs | 44 +- meilisearch/tests/settings/get_settings.rs | 2 +- meilisearch/tests/tasks/errors.rs | 168 ++++---- meilisearch/tests/tasks/mod.rs | 42 +- 25 files changed, 802 insertions(+), 448 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f4954ca86..da1ec3011 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2348,7 +2348,6 @@ dependencies = [ "rustls-pemfile", "segment", "serde", - "serde-cs", "serde_json", "serde_urlencoded", "sha-1", @@ -2413,6 +2412,7 @@ dependencies = [ "proptest-derive", "roaring", "serde", + "serde-cs", "serde_json", "tar", "tempfile", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index bd596ba2d..4c0b1ca93 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -21,6 +21,7 @@ proptest = { version = "1.0.0", optional = true } proptest-derive = { version = "0.3.0", optional = true } roaring = { version = "0.10.0", features = ["serde"] } serde = { version = "1.0.145", features = ["derive"] } +serde-cs = "0.2.4" serde_json = "1.0.85" tar = "0.4.38" tempfile = "3.3.0" diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index bc29f9e82..2be6ffff4 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -1,14 +1,18 @@ use std::convert::Infallible; use std::marker::PhantomData; +use std::str::FromStr; use std::{fmt, io}; use actix_web::http::StatusCode; use actix_web::{self as aweb, HttpResponseBuilder}; use aweb::rt::task::JoinError; use convert_case::Casing; -use deserr::{DeserializeError, IntoValue, MergeWithError, ValuePointerRef}; +use deserr::{DeserializeError, ErrorKind, IntoValue, MergeWithError, ValueKind, ValuePointerRef}; use milli::heed::{Error as HeedError, MdbError}; use serde::{Deserialize, Serialize}; +use serde_cs::vec::CS; + +use crate::star_or::StarOr; use self::deserr_codes::MissingIndexUid; @@ -422,41 +426,49 @@ mod strategy { } } -pub struct DeserrError { +pub struct DeserrJson; +pub struct DeserrQueryParam; + +pub type DeserrJsonError = DeserrError; +pub type DeserrQueryParamError = DeserrError; + +pub struct DeserrError { pub msg: String, pub code: Code, - _phantom: PhantomData, + _phantom: PhantomData<(Format, C)>, } -impl std::fmt::Debug for DeserrError { +impl std::fmt::Debug for DeserrError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish() } } -impl std::fmt::Display for DeserrError { +impl std::fmt::Display for DeserrError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.msg) } } -impl std::error::Error for DeserrError {} -impl ErrorCode for DeserrError { +impl std::error::Error for DeserrError {} +impl ErrorCode for DeserrError { fn error_code(&self) -> Code { self.code } } -impl MergeWithError> for DeserrError { +impl + MergeWithError> for DeserrError +{ fn merge( _self_: Option, - other: DeserrError, + other: DeserrError, _merge_location: ValuePointerRef, ) -> Result { Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData }) } } -impl DeserrError { +impl DeserrJsonError { pub fn missing_index_uid(field: &str, location: ValuePointerRef) -> Self { let x = unwrap_any(Self::error::( None, @@ -467,21 +479,364 @@ impl DeserrError { } } -impl deserr::DeserializeError for DeserrError { +// if the error happened in the root, then an empty string is returned. +pub fn location_json_description(location: ValuePointerRef, article: &str) -> String { + fn rec(location: ValuePointerRef) -> String { + match location { + ValuePointerRef::Origin => String::new(), + ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key, + ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)), + } + } + match location { + ValuePointerRef::Origin => String::new(), + _ => { + format!("{article} `{}`", rec(location)) + } + } +} + +fn value_kinds_description_json(kinds: &[ValueKind]) -> String { + fn order(kind: &ValueKind) -> u8 { + match kind { + ValueKind::Null => 0, + ValueKind::Boolean => 1, + ValueKind::Integer => 2, + ValueKind::NegativeInteger => 3, + ValueKind::Float => 4, + ValueKind::String => 5, + ValueKind::Sequence => 6, + ValueKind::Map => 7, + } + } + + fn single_description(kind: &ValueKind) -> &'static str { + match kind { + ValueKind::Null => "null", + ValueKind::Boolean => "a boolean", + ValueKind::Integer => "a positive integer", + ValueKind::NegativeInteger => "an integer", + ValueKind::Float => "a number", + ValueKind::String => "a string", + ValueKind::Sequence => "an array", + ValueKind::Map => "an object", + } + } + + fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) { + let (msg_part, rest): (_, &[ValueKind]) = match kinds { + [] => (String::new(), &[]), + [ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => { + ("a number".to_owned(), rest) + } + [ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => { + ("a number".to_owned(), rest) + } + [ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => { + ("an integer".to_owned(), rest) + } + [a] => (single_description(a).to_owned(), &[]), + [a, rest @ ..] => (single_description(a).to_owned(), rest), + }; + + if rest.is_empty() { + if *count_items == 0 { + message.push_str(&msg_part); + } else if *count_items == 1 { + message.push_str(&format!(" or {msg_part}")); + } else { + message.push_str(&format!(", or {msg_part}")); + } + } else { + if *count_items == 0 { + message.push_str(&msg_part); + } else { + message.push_str(&format!(", {msg_part}")); + } + + *count_items += 1; + description_rec(rest, count_items, message); + } + } + + let mut kinds = kinds.to_owned(); + kinds.sort_by_key(order); + kinds.dedup(); + + if kinds.is_empty() { + "a different value".to_owned() + } else { + let mut message = String::new(); + description_rec(kinds.as_slice(), &mut 0, &mut message); + message + } +} + +fn value_description_with_kind_json(v: &serde_json::Value) -> String { + match v.kind() { + ValueKind::Null => "null".to_owned(), + kind => { + format!( + "{}: `{}`", + value_kinds_description_json(&[kind]), + serde_json::to_string(v).unwrap() + ) + } + } +} + +impl deserr::DeserializeError for DeserrJsonError { fn error( _self_: Option, error: deserr::ErrorKind, location: ValuePointerRef, ) -> Result { - let msg = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0; + let mut message = String::new(); - Err(DeserrError { msg, code: C::default().error_code(), _phantom: PhantomData }) + message.push_str(&match error { + ErrorKind::IncorrectValueKind { actual, accepted } => { + let expected = value_kinds_description_json(accepted); + // if we're not able to get the value as a string then we print nothing. + let received = value_description_with_kind_json(&serde_json::Value::from(actual)); + + let location = location_json_description(location, " at"); + + format!("Invalid value type{location}: expected {expected}, but found {received}") + } + ErrorKind::MissingField { field } => { + // serde_json original message: + // Json deserialize error: missing field `lol` at line 1 column 2 + let location = location_json_description(location, " inside"); + format!("Missing field `{field}`{location}") + } + ErrorKind::UnknownKey { key, accepted } => { + let location = location_json_description(location, " inside"); + format!( + "Unknown field `{}`{location}: expected one of {}", + key, + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", ") + ) + } + ErrorKind::UnknownValue { value, accepted } => { + let location = location_json_description(location, " at"); + format!( + "Unknown value `{}`{location}: expected one of {}", + value, + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", "), + ) + } + ErrorKind::Unexpected { msg } => { + let location = location_json_description(location, " at"); + // serde_json original message: + // The json payload provided is malformed. `trailing characters at line 1 column 19`. + format!("Invalid value{location}: {msg}") + } + }); + + Err(DeserrJsonError { + msg: message, + code: C::default().error_code(), + _phantom: PhantomData, + }) } } +// if the error happened in the root, then an empty string is returned. +pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String { + fn rec(location: ValuePointerRef) -> String { + match location { + ValuePointerRef::Origin => String::new(), + ValuePointerRef::Key { key, prev } => { + if matches!(prev, ValuePointerRef::Origin) { + key.to_owned() + } else { + rec(*prev) + "." + key + } + } + ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)), + } + } + match location { + ValuePointerRef::Origin => String::new(), + _ => { + format!("{article} `{}`", rec(location)) + } + } +} + +impl deserr::DeserializeError for DeserrQueryParamError { + fn error( + _self_: Option, + error: deserr::ErrorKind, + location: ValuePointerRef, + ) -> Result { + let mut message = String::new(); + + message.push_str(&match error { + ErrorKind::IncorrectValueKind { actual, accepted } => { + let expected = value_kinds_description_query_param(accepted); + // if we're not able to get the value as a string then we print nothing. + let received = value_description_with_kind_query_param(actual); + + let location = location_query_param_description(location, " for parameter"); + + format!("Invalid value type{location}: expected {expected}, but found {received}") + } + ErrorKind::MissingField { field } => { + // serde_json original message: + // Json deserialize error: missing field `lol` at line 1 column 2 + let location = location_query_param_description(location, " inside"); + format!("Missing parameter `{field}`{location}") + } + ErrorKind::UnknownKey { key, accepted } => { + let location = location_query_param_description(location, " inside"); + format!( + "Unknown parameter `{}`{location}: expected one of {}", + key, + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", ") + ) + } + ErrorKind::UnknownValue { value, accepted } => { + let location = location_query_param_description(location, " for parameter"); + format!( + "Unknown value `{}`{location}: expected one of {}", + value, + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", "), + ) + } + ErrorKind::Unexpected { msg } => { + let location = location_query_param_description(location, " in parameter"); + // serde_json original message: + // The json payload provided is malformed. `trailing characters at line 1 column 19`. + format!("Invalid value{location}: {msg}") + } + }); + + Err(DeserrQueryParamError { + msg: message, + code: C::default().error_code(), + _phantom: PhantomData, + }) + } +} + +fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String { + "a string".to_owned() +} + +fn value_description_with_kind_query_param(actual: deserr::Value) -> String { + match actual { + deserr::Value::Null => "null".to_owned(), + deserr::Value::Boolean(x) => format!("a boolean: `{x}`"), + deserr::Value::Integer(x) => format!("an integer: `{x}`"), + deserr::Value::NegativeInteger(x) => { + format!("an integer: `{x}`") + } + deserr::Value::Float(x) => { + format!("a number: `{x}`") + } + deserr::Value::String(x) => { + format!("a string: `{x}`") + } + deserr::Value::Sequence(_) => "multiple values".to_owned(), + deserr::Value::Map(_) => "multiple parameters".to_owned(), + } +} + +#[derive(Debug)] +pub struct DetailedParseIntError(String); +impl fmt::Display for DetailedParseIntError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "could not parse `{}` as a positive integer", self.0) + } +} +impl std::error::Error for DetailedParseIntError {} + +pub fn parse_u32_query_param(x: String) -> Result> { + x.parse::().map_err(|_e| TakeErrorMessage(DetailedParseIntError(x.to_owned()))) +} +pub fn parse_usize_query_param( + x: String, +) -> Result> { + x.parse::().map_err(|_e| TakeErrorMessage(DetailedParseIntError(x.to_owned()))) +} +pub fn parse_option_usize_query_param( + s: Option, +) -> Result, TakeErrorMessage> { + if let Some(s) = s { + parse_usize_query_param(s).map(Some) + } else { + Ok(None) + } +} +pub fn parse_option_u32_query_param( + s: Option, +) -> Result, TakeErrorMessage> { + if let Some(s) = s { + parse_u32_query_param(s).map(Some) + } else { + Ok(None) + } +} +pub fn parse_option_vec_u32_query_param( + s: Option>, +) -> Result>, TakeErrorMessage> { + if let Some(s) = s { + s.into_iter() + .map(parse_u32_query_param) + .collect::, TakeErrorMessage>>() + .map(Some) + } else { + Ok(None) + } +} +pub fn parse_option_cs_star_or( + s: Option>>, +) -> Result>, TakeErrorMessage> { + if let Some(s) = s.and_then(fold_star_or) as Option> { + s.into_iter() + .map(|s| T::from_str(&s)) + .collect::, T::Err>>() + .map_err(TakeErrorMessage) + .map(Some) + } else { + Ok(None) + } +} + +/// Extracts the raw values from the `StarOr` types and +/// return None if a `StarOr::Star` is encountered. +pub fn fold_star_or(content: impl IntoIterator>) -> Option +where + O: FromIterator, +{ + content + .into_iter() + .map(|value| match value { + StarOr::Star => None, + StarOr::Other(val) => Some(val), + }) + .collect() +} pub struct TakeErrorMessage(pub T); -impl MergeWithError> for DeserrError +impl MergeWithError> for DeserrJsonError where T: std::error::Error, { @@ -490,7 +845,24 @@ where other: TakeErrorMessage, merge_location: ValuePointerRef, ) -> Result { - DeserrError::error::( + DeserrJsonError::error::( + None, + deserr::ErrorKind::Unexpected { msg: other.0.to_string() }, + merge_location, + ) + } +} + +impl MergeWithError> for DeserrQueryParamError +where + T: std::error::Error, +{ + fn merge( + _self_: Option, + other: TakeErrorMessage, + merge_location: ValuePointerRef, + ) -> Result { + DeserrQueryParamError::error::( None, deserr::ErrorKind::Unexpected { msg: other.0.to_string() }, merge_location, @@ -510,3 +882,32 @@ macro_rules! internal_error { )* } } + +#[cfg(test)] +mod tests { + use deserr::ValueKind; + + use crate::error::value_kinds_description_json; + + #[test] + fn test_value_kinds_description_json() { + insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value"); + + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"an integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object"); + + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"an integer or an array"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number"); + } +} diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index b41bb06b6..53776e489 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -11,19 +11,19 @@ use time::{Date, OffsetDateTime, PrimitiveDateTime}; use uuid::Uuid; use crate::error::deserr_codes::*; -use crate::error::{unwrap_any, Code, DeserrError, ErrorCode, TakeErrorMessage}; +use crate::error::{unwrap_any, Code, DeserrJsonError, ErrorCode, TakeErrorMessage}; use crate::index_uid::{IndexUid, IndexUidFormatError}; use crate::star_or::StarOr; pub type KeyId = Uuid; -impl MergeWithError for DeserrError { +impl MergeWithError for DeserrJsonError { fn merge( _self_: Option, other: IndexUidFormatError, merge_location: deserr::ValuePointerRef, ) -> std::result::Result { - DeserrError::error::( + DeserrJsonError::error::( None, deserr::ErrorKind::Unexpected { msg: other.to_string() }, merge_location, @@ -36,19 +36,19 @@ fn parse_uuid_from_str(s: &str) -> Result> { } #[derive(Debug, DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct CreateApiKey { - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub description: Option, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub name: Option, - #[deserr(default = Uuid::new_v4(), error = DeserrError, from(&String) = parse_uuid_from_str -> TakeErrorMessage)] + #[deserr(default = Uuid::new_v4(), error = DeserrJsonError, from(&String) = parse_uuid_from_str -> TakeErrorMessage)] pub uid: KeyId, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub actions: Vec, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub indexes: Vec>, - #[deserr(error = DeserrError, default = None, from(&String) = parse_expiration_date -> TakeErrorMessage)] + #[deserr(error = DeserrJsonError, default = None, from(&String) = parse_expiration_date -> TakeErrorMessage)] pub expires_at: Option, } impl CreateApiKey { @@ -72,8 +72,8 @@ fn deny_immutable_fields_api_key( field: &str, accepted: &[&str], location: ValuePointerRef, -) -> DeserrError { - let mut error = unwrap_any(DeserrError::::error::( +) -> DeserrJsonError { + let mut error = unwrap_any(DeserrJsonError::::error::( None, deserr::ErrorKind::UnknownKey { key: field, accepted }, location, @@ -92,11 +92,11 @@ fn deny_immutable_fields_api_key( } #[derive(Debug, DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)] pub struct PatchApiKey { - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub description: Option, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub name: Option, } diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index c7f7ca7f5..f8fc47abd 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -8,8 +8,10 @@ pub mod star_or; pub mod tasks; pub mod versioning; +pub use deserr; pub use milli; pub use milli::{heed, Index}; +pub use serde_cs; use uuid::Uuid; pub use versioning::VERSION_FILE_NAME; diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index 3169920a6..0a79f865e 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -12,7 +12,7 @@ use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET}; use serde::{Deserialize, Serialize, Serializer}; use crate::error::deserr_codes::*; -use crate::error::{unwrap_any, DeserrError}; +use crate::error::{unwrap_any, DeserrJsonError}; /// The maximimum number of results that the engine /// will be able to return in one search call. @@ -66,7 +66,7 @@ fn validate_min_word_size_for_typo_setting( #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[serde(deny_unknown_fields, rename_all = "camelCase")] -#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrError)] +#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError)] pub struct MinWordSizeTyposSetting { #[serde(default, skip_serializing_if = "Setting::is_not_set")] pub one_typo: Setting, @@ -76,12 +76,12 @@ pub struct MinWordSizeTyposSetting { #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[serde(deny_unknown_fields, rename_all = "camelCase")] -#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError>)] +#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError>)] pub struct TypoSettings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] pub enabled: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub min_word_size_for_typos: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] pub disable_on_words: Setting>, @@ -105,7 +105,7 @@ pub struct PaginationSettings { pub max_total_hits: Setting, } -impl MergeWithError for DeserrError { +impl MergeWithError for DeserrJsonError { fn merge( _self_: Option, other: milli::CriterionError, @@ -128,14 +128,14 @@ impl MergeWithError for DeserrError { #[serde( default, serialize_with = "serialize_with_wildcard", skip_serializing_if = "Setting::is_not_set" )] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub displayed_attributes: Setting>, #[serde( @@ -143,35 +143,35 @@ pub struct Settings { serialize_with = "serialize_with_wildcard", skip_serializing_if = "Setting::is_not_set" )] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub searchable_attributes: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub filterable_attributes: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub sortable_attributes: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub ranking_rules: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub stop_words: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub synonyms: Setting>>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub distinct_attribute: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub typo_tolerance: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub faceting: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub pagination: Setting, #[serde(skip)] diff --git a/meilisearch-types/src/tasks.rs b/meilisearch-types/src/tasks.rs index ceddbd51c..fd2d31e06 100644 --- a/meilisearch-types/src/tasks.rs +++ b/meilisearch-types/src/tasks.rs @@ -348,7 +348,7 @@ impl FromStr for Status { } else { Err(ResponseError::from_msg( format!( - "`{}` is not a status. Available status are {}.", + "`{}` is not a valid task status. Available statuses are {}.", status, enum_iterator::all::() .map(|s| format!("`{s}`")) @@ -440,7 +440,7 @@ impl FromStr for Kind { } else { Err(ResponseError::from_msg( format!( - "`{}` is not a type. Available types are {}.", + "`{}` is not a valid task type. Available types are {}.", kind, enum_iterator::all::() .map(|k| format!( diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index a42e5cc7b..be852c02e 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -55,7 +55,6 @@ rustls = "0.20.6" rustls-pemfile = "1.0.1" segment = { version = "0.2.1", optional = true } serde = { version = "1.0.145", features = ["derive"] } -serde-cs = "0.2.4" serde_json = { version = "1.0.85", features = ["preserve_order"] } sha2 = "0.10.6" siphasher = "0.3.10" diff --git a/meilisearch/src/routes/api_key.rs b/meilisearch/src/routes/api_key.rs index 76912bbaa..ce4ab0696 100644 --- a/meilisearch/src/routes/api_key.rs +++ b/meilisearch/src/routes/api_key.rs @@ -4,8 +4,8 @@ use actix_web::{web, HttpRequest, HttpResponse}; use deserr::DeserializeFromValue; use meilisearch_auth::error::AuthControllerError; use meilisearch_auth::AuthController; -use meilisearch_types::error::deserr_codes::*; -use meilisearch_types::error::{Code, DeserrError, ResponseError, TakeErrorMessage}; +use meilisearch_types::error::{deserr_codes::*, DeserrQueryParamError}; +use meilisearch_types::error::{Code, DeserrJsonError, ResponseError, TakeErrorMessage}; use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey}; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; @@ -36,7 +36,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { pub async fn create_api_key( auth_controller: GuardedData, AuthController>, - body: ValidatedJson, + body: ValidatedJson, _req: HttpRequest, ) -> Result { let v = body.into_inner(); @@ -51,14 +51,14 @@ pub async fn create_api_key( } #[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct ListApiKeys { #[serde(default)] - #[deserr(error = DeserrError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub offset: usize, #[serde(default = "PAGINATION_DEFAULT_LIMIT")] - #[deserr(error = DeserrError, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub limit: usize, } impl ListApiKeys { @@ -69,7 +69,7 @@ impl ListApiKeys { pub async fn list_api_keys( auth_controller: GuardedData, AuthController>, - list_api_keys: QueryParameter, + list_api_keys: QueryParameter, ) -> Result { let paginate = list_api_keys.into_inner().as_pagination(); let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { @@ -106,7 +106,7 @@ pub async fn get_api_key( pub async fn patch_api_key( auth_controller: GuardedData, AuthController>, - body: ValidatedJson, + body: ValidatedJson, path: web::Path, ) -> Result { let key = path.into_inner().key; diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index 3a54bba6a..c09b12244 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -10,18 +10,18 @@ use futures::StreamExt; use index_scheduler::IndexScheduler; use log::debug; use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType}; -use meilisearch_types::error::deserr_codes::*; -use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage}; +use meilisearch_types::error::{deserr_codes::*, fold_star_or, DeserrQueryParamError}; +use meilisearch_types::error::{DeserrJsonError, ResponseError, TakeErrorMessage}; use meilisearch_types::heed::RoTxn; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::milli::update::IndexDocumentsMethod; +use meilisearch_types::serde_cs::vec::CS; use meilisearch_types::star_or::StarOr; use meilisearch_types::tasks::KindWithContent; use meilisearch_types::{milli, Document, Index}; use mime::Mime; use once_cell::sync::Lazy; use serde::Deserialize; -use serde_cs::vec::CS; use serde_json::Value; use tempfile::tempfile; use tokio::fs::File; @@ -36,7 +36,7 @@ use crate::extractors::authentication::GuardedData; use crate::extractors::payload::Payload; use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; -use crate::routes::{fold_star_or, PaginationView, SummarizedTaskView}; +use crate::routes::{PaginationView, SummarizedTaskView}; static ACCEPTED_CONTENT_TYPE: Lazy> = Lazy::new(|| { vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()] @@ -82,16 +82,17 @@ pub fn configure(cfg: &mut web::ServiceConfig) { } #[derive(Deserialize, Debug, DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct GetDocument { - #[deserr(error = DeserrError)] + // TODO: strongly typed argument here + #[deserr(error = DeserrQueryParamError)] fields: Option>>, } pub async fn get_document( index_scheduler: GuardedData, Data>, path: web::Path, - params: QueryParameter, + params: QueryParameter, ) -> Result { let GetDocument { fields } = params.into_inner(); let attributes_to_retrieve = fields.and_then(fold_star_or); @@ -119,20 +120,20 @@ pub async fn delete_document( } #[derive(Deserialize, Debug, DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct BrowseQuery { - #[deserr(error = DeserrError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] offset: usize, - #[deserr(error = DeserrError, default = crate::routes::PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = crate::routes::PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] limit: usize, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrQueryParamError)] fields: Option>>, } pub async fn get_all_documents( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: QueryParameter, + params: QueryParameter, ) -> Result { debug!("called with params: {:?}", params); let BrowseQuery { limit, offset, fields } = params.into_inner(); @@ -148,16 +149,16 @@ pub async fn get_all_documents( } #[derive(Deserialize, Debug, DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct UpdateDocumentsQuery { - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub primary_key: Option, } pub async fn add_documents( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: QueryParameter, + params: QueryParameter, body: Payload, req: HttpRequest, analytics: web::Data, @@ -185,7 +186,7 @@ pub async fn add_documents( pub async fn update_documents( index_scheduler: GuardedData, Data>, path: web::Path, - params: QueryParameter, + params: QueryParameter, body: Payload, req: HttpRequest, analytics: web::Data, diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index 7a3a97c1f..061eefaf6 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -5,8 +5,8 @@ use actix_web::{web, HttpRequest, HttpResponse}; use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef}; use index_scheduler::IndexScheduler; use log::debug; -use meilisearch_types::error::deserr_codes::*; -use meilisearch_types::error::{unwrap_any, Code, DeserrError, ResponseError, TakeErrorMessage}; +use meilisearch_types::error::{deserr_codes::*, unwrap_any, Code, DeserrQueryParamError}; +use meilisearch_types::error::{DeserrJsonError, ResponseError, TakeErrorMessage}; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::milli::{self, FieldDistribution, Index}; use meilisearch_types::tasks::KindWithContent; @@ -72,14 +72,14 @@ impl IndexView { } #[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct ListIndexes { #[serde(default)] - #[deserr(error = DeserrError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub offset: usize, #[serde(default = "PAGINATION_DEFAULT_LIMIT")] - #[deserr(error = DeserrError, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub limit: usize, } impl ListIndexes { @@ -90,7 +90,7 @@ impl ListIndexes { pub async fn list_indexes( index_scheduler: GuardedData, Data>, - paginate: QueryParameter, + paginate: QueryParameter, ) -> Result { let search_rules = &index_scheduler.filters().search_rules; let indexes: Vec<_> = index_scheduler.indexes()?; @@ -107,17 +107,17 @@ pub async fn list_indexes( } #[derive(DeserializeFromValue, Debug)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct IndexCreateRequest { - #[deserr(error = DeserrError, missing_field_error = DeserrError::missing_index_uid)] + #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_index_uid)] uid: String, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] primary_key: Option, } pub async fn create_index( index_scheduler: GuardedData, Data>, - body: ValidatedJson, + body: ValidatedJson, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -146,8 +146,8 @@ fn deny_immutable_fields_index( field: &str, accepted: &[&str], location: ValuePointerRef, -) -> DeserrError { - let mut error = unwrap_any(DeserrError::::error::( +) -> DeserrJsonError { + let mut error = unwrap_any(DeserrJsonError::::error::( None, deserr::ErrorKind::UnknownKey { key: field, accepted }, location, @@ -162,9 +162,9 @@ fn deny_immutable_fields_index( error } #[derive(DeserializeFromValue, Debug)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)] pub struct UpdateIndexRequest { - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] primary_key: Option, } @@ -183,7 +183,7 @@ pub async fn get_index( pub async fn update_index( index_scheduler: GuardedData, Data>, path: web::Path, - body: ValidatedJson, + body: ValidatedJson, req: HttpRequest, analytics: web::Data, ) -> Result { diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 6296772e0..8819ac8cf 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -5,9 +5,12 @@ use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::IndexScheduler; use log::debug; use meilisearch_auth::IndexSearchRules; -use meilisearch_types::error::deserr_codes::*; -use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage}; -use serde_cs::vec::CS; +use meilisearch_types::error::{ + deserr_codes::*, parse_option_usize_query_param, parse_usize_query_param, + DeserrQueryParamError, DetailedParseIntError, +}; +use meilisearch_types::error::{DeserrJsonError, ResponseError, TakeErrorMessage}; +use meilisearch_types::serde_cs::vec::CS; use serde_json::Value; use crate::analytics::{Analytics, SearchAggregator}; @@ -16,7 +19,6 @@ use crate::extractors::authentication::GuardedData; use crate::extractors::json::ValidatedJson; use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; -use crate::routes::from_string_to_option_take_error_message; use crate::search::{ perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, @@ -44,41 +46,41 @@ pub fn parse_bool_take_error_message( } #[derive(Debug, deserr::DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct SearchQueryGet { - #[deserr(error = DeserrError)] + #[deserr(error = DeserrQueryParamError)] q: Option, - #[deserr(error = DeserrError, default = DEFAULT_SEARCH_OFFSET(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = DEFAULT_SEARCH_OFFSET(), from(String) = parse_usize_query_param -> TakeErrorMessage)] offset: usize, - #[deserr(error = DeserrError, default = DEFAULT_SEARCH_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = DEFAULT_SEARCH_LIMIT(), from(String) = parse_usize_query_param -> TakeErrorMessage)] limit: usize, - #[deserr(error = DeserrError, from(&String) = from_string_to_option_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, from(Option) = parse_option_usize_query_param -> TakeErrorMessage)] page: Option, - #[deserr(error = DeserrError, from(&String) = from_string_to_option_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, from(Option) = parse_option_usize_query_param -> TakeErrorMessage)] hits_per_page: Option, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrQueryParamError)] attributes_to_retrieve: Option>, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrQueryParamError)] attributes_to_crop: Option>, - #[deserr(error = DeserrError, default = DEFAULT_CROP_LENGTH(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = DEFAULT_CROP_LENGTH(), from(String) = parse_usize_query_param -> TakeErrorMessage)] crop_length: usize, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrQueryParamError)] attributes_to_highlight: Option>, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrQueryParamError)] filter: Option, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrQueryParamError)] sort: Option, - #[deserr(error = DeserrError, default, from(&String) = parse_bool_take_error_message -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default, from(&String) = parse_bool_take_error_message -> TakeErrorMessage)] show_matches_position: bool, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrQueryParamError)] facets: Option>, - #[deserr(error = DeserrError, default = DEFAULT_HIGHLIGHT_PRE_TAG())] + #[deserr(error = DeserrQueryParamError, default = DEFAULT_HIGHLIGHT_PRE_TAG())] highlight_pre_tag: String, - #[deserr(error = DeserrError, default = DEFAULT_HIGHLIGHT_POST_TAG())] + #[deserr(error = DeserrQueryParamError, default = DEFAULT_HIGHLIGHT_POST_TAG())] highlight_post_tag: String, - #[deserr(error = DeserrError, default = DEFAULT_CROP_MARKER())] + #[deserr(error = DeserrQueryParamError, default = DEFAULT_CROP_MARKER())] crop_marker: String, - #[deserr(error = DeserrError, default)] + #[deserr(error = DeserrQueryParamError, default)] matching_strategy: MatchingStrategy, } @@ -162,7 +164,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec { pub async fn search_with_url_query( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: QueryParameter, + params: QueryParameter, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -194,7 +196,7 @@ pub async fn search_with_url_query( pub async fn search_with_post( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: ValidatedJson, + params: ValidatedJson, req: HttpRequest, analytics: web::Data, ) -> Result { diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 13c280d63..404835833 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -2,7 +2,7 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::IndexScheduler; use log::debug; -use meilisearch_types::error::{DeserrError, ResponseError}; +use meilisearch_types::error::{DeserrJsonError, ResponseError}; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked}; use meilisearch_types::tasks::KindWithContent; @@ -130,7 +130,7 @@ make_setting_route!( "/filterable-attributes", put, std::collections::BTreeSet, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsFilterableAttributes, >, filterable_attributes, @@ -156,7 +156,7 @@ make_setting_route!( "/sortable-attributes", put, std::collections::BTreeSet, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsSortableAttributes, >, sortable_attributes, @@ -182,7 +182,7 @@ make_setting_route!( "/displayed-attributes", put, Vec, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsDisplayedAttributes, >, displayed_attributes, @@ -208,7 +208,7 @@ make_setting_route!( "/typo-tolerance", patch, meilisearch_types::settings::TypoSettings, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsTypoTolerance, >, typo_tolerance, @@ -253,7 +253,7 @@ make_setting_route!( "/searchable-attributes", put, Vec, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsSearchableAttributes, >, searchable_attributes, @@ -279,7 +279,7 @@ make_setting_route!( "/stop-words", put, std::collections::BTreeSet, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsStopWords, >, stop_words, @@ -304,7 +304,7 @@ make_setting_route!( "/synonyms", put, std::collections::BTreeMap>, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsSynonyms, >, synonyms, @@ -329,7 +329,7 @@ make_setting_route!( "/distinct-attribute", put, String, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsDistinctAttribute, >, distinct_attribute, @@ -353,7 +353,7 @@ make_setting_route!( "/ranking-rules", put, Vec, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsRankingRules, >, ranking_rules, @@ -384,7 +384,7 @@ make_setting_route!( "/faceting", patch, meilisearch_types::settings::FacetingSettings, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsFaceting, >, faceting, @@ -409,7 +409,7 @@ make_setting_route!( "/pagination", patch, meilisearch_types::settings::PaginationSettings, - meilisearch_types::error::DeserrError< + meilisearch_types::error::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsPagination, >, pagination, @@ -461,7 +461,7 @@ generate_configure!( pub async fn update_all( index_scheduler: GuardedData, Data>, index_uid: web::Path, - body: ValidatedJson, DeserrError>, + body: ValidatedJson, DeserrJsonError>, req: HttpRequest, analytics: web::Data, ) -> Result { diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index 2e619540a..e681910a2 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -5,9 +5,8 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::{IndexScheduler, Query}; use log::debug; -use meilisearch_types::error::{ResponseError, TakeErrorMessage}; +use meilisearch_types::error::ResponseError; use meilisearch_types::settings::{Settings, Unchecked}; -use meilisearch_types::star_or::StarOr; use meilisearch_types::tasks::{Kind, Status, Task, TaskId}; use serde::{Deserialize, Serialize}; use serde_json::json; @@ -35,35 +34,12 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .service(web::scope("/swap-indexes").configure(swap_indexes::configure)); } -/// Extracts the raw values from the `StarOr` types and -/// return None if a `StarOr::Star` is encountered. -pub fn fold_star_or(content: impl IntoIterator>) -> Option -where - O: FromIterator, -{ - content - .into_iter() - .map(|value| match value { - StarOr::Star => None, - StarOr::Other(val) => Some(val), - }) - .collect() -} - pub fn from_string_to_option(input: &str) -> Result, E> where T: FromStr, { Ok(Some(input.parse()?)) } -pub fn from_string_to_option_take_error_message( - input: &str, -) -> Result, TakeErrorMessage> -where - T: FromStr, -{ - Ok(Some(input.parse().map_err(TakeErrorMessage)?)) -} const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20; diff --git a/meilisearch/src/routes/swap_indexes.rs b/meilisearch/src/routes/swap_indexes.rs index c5b371cd9..57015f1f1 100644 --- a/meilisearch/src/routes/swap_indexes.rs +++ b/meilisearch/src/routes/swap_indexes.rs @@ -3,7 +3,7 @@ use actix_web::{web, HttpRequest, HttpResponse}; use deserr::DeserializeFromValue; use index_scheduler::IndexScheduler; use meilisearch_types::error::deserr_codes::InvalidSwapIndexes; -use meilisearch_types::error::{DeserrError, ResponseError}; +use meilisearch_types::error::{DeserrJsonError, ResponseError}; use meilisearch_types::tasks::{IndexSwap, KindWithContent}; use serde_json::json; @@ -20,15 +20,15 @@ pub fn configure(cfg: &mut web::ServiceConfig) { } #[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct SwapIndexesPayload { - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] indexes: Vec, } pub async fn swap_indexes( index_scheduler: GuardedData, Data>, - params: ValidatedJson, DeserrError>, + params: ValidatedJson, DeserrJsonError>, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -44,6 +44,7 @@ pub async fn swap_indexes( let mut swaps = vec![]; for SwapIndexesPayload { indexes } in params.into_iter() { + // TODO: switch to deserr let (lhs, rhs) = match indexes.as_slice() { [lhs, rhs] => (lhs, rhs), _ => { diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index 09723623f..dbf1380e2 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -1,13 +1,15 @@ -use std::num::ParseIntError; -use std::str::FromStr; - use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use deserr::DeserializeFromValue; use index_scheduler::{IndexScheduler, Query, TaskId}; -use meilisearch_types::error::deserr_codes::*; -use meilisearch_types::error::{DeserrError, ResponseError, TakeErrorMessage}; +use meilisearch_types::error::{ + deserr_codes::*, parse_option_cs_star_or, parse_option_u32_query_param, + parse_option_vec_u32_query_param, DeserrQueryParamError, DetailedParseIntError, + TakeErrorMessage, +}; +use meilisearch_types::error::{parse_u32_query_param, ResponseError}; use meilisearch_types::index_uid::IndexUid; +use meilisearch_types::serde_cs; use meilisearch_types::settings::{Settings, Unchecked}; use meilisearch_types::star_or::StarOr; use meilisearch_types::tasks::{ @@ -21,7 +23,7 @@ use time::macros::format_description; use time::{Date, Duration, OffsetDateTime, Time}; use tokio::task; -use super::{fold_star_or, SummarizedTaskView}; +use super::SummarizedTaskView; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; @@ -164,108 +166,70 @@ impl From
for DetailsView { } } -fn parse_option_cs( - s: Option>, -) -> Result>, TakeErrorMessage> { - if let Some(s) = s { - s.into_iter() - .map(|s| T::from_str(&s)) - .collect::, T::Err>>() - .map_err(TakeErrorMessage) - .map(Some) - } else { - Ok(None) - } -} -fn parse_option_cs_star_or( - s: Option>>, -) -> Result>, TakeErrorMessage> { - if let Some(s) = s.and_then(fold_star_or) as Option> { - s.into_iter() - .map(|s| T::from_str(&s)) - .collect::, T::Err>>() - .map_err(TakeErrorMessage) - .map(Some) - } else { - Ok(None) - } -} -fn parse_option_str(s: Option) -> Result, TakeErrorMessage> { - if let Some(s) = s { - T::from_str(&s).map_err(TakeErrorMessage).map(Some) - } else { - Ok(None) - } -} - -fn parse_str(s: String) -> Result> { - T::from_str(&s).map_err(TakeErrorMessage) -} - #[derive(Debug, DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct TasksFilterQuery { - #[deserr(error = DeserrError, default = DEFAULT_LIMIT(), from(String) = parse_str:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = DEFAULT_LIMIT(), from(String) = parse_u32_query_param -> TakeErrorMessage)] pub limit: u32, - #[deserr(error = DeserrError, from(Option) = parse_option_str:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, from(Option) = parse_option_u32_query_param -> TakeErrorMessage)] pub from: Option, - #[deserr(error = DeserrError, from(Option>) = parse_option_cs:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] pub uids: Option>, - #[deserr(error = DeserrError, from(Option>) = parse_option_cs:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] pub canceled_by: Option>, - #[deserr(error = DeserrError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub types: Option>, - #[deserr(error = DeserrError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub statuses: Option>, - #[deserr(error = DeserrError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub index_uids: Option>, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_enqueued_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_enqueued_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_started_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_started_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_finished_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_finished_at: Option, } #[derive(Deserialize, Debug, DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct TaskDeletionOrCancelationQuery { - #[deserr(error = DeserrError, from(Option>) = parse_option_cs:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] pub uids: Option>, - #[deserr(error = DeserrError, from(Option>) = parse_option_cs:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] pub canceled_by: Option>, - #[deserr(error = DeserrError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub types: Option>, - #[deserr(error = DeserrError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub statuses: Option>, - #[deserr(error = DeserrError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub index_uids: Option>, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_enqueued_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_enqueued_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_started_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_started_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_finished_at: Option, - #[deserr(error = DeserrError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_finished_at: Option, } async fn cancel_tasks( index_scheduler: GuardedData, Data>, - params: QueryParameter, + params: QueryParameter, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -337,7 +301,7 @@ async fn cancel_tasks( async fn delete_tasks( index_scheduler: GuardedData, Data>, - params: QueryParameter, + params: QueryParameter, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -418,7 +382,7 @@ pub struct AllTasks { async fn get_tasks( index_scheduler: GuardedData, Data>, - params: QueryParameter, + params: QueryParameter, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -584,16 +548,16 @@ impl std::error::Error for InvalidTaskDateError {} mod tests { use deserr::DeserializeFromValue; use meili_snap::snapshot; - use meilisearch_types::error::DeserrError; + use meilisearch_types::error::DeserrQueryParamError; use crate::extractors::query_parameters::QueryParameter; use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery}; fn deserr_query_params(j: &str) -> Result where - T: DeserializeFromValue, + T: DeserializeFromValue, { - QueryParameter::::from_query(j).map(|p| p.0) + QueryParameter::::from_query(j).map(|p| p.0) } #[test] @@ -634,33 +598,33 @@ mod tests { { let params = "afterFinishedAt=2021"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } { let params = "beforeFinishedAt=2021"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } { let params = "afterEnqueuedAt=2021-12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } { let params = "beforeEnqueuedAt=2021-12-03T23"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } { let params = "afterStartedAt=2021-12-03T23:45"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } { let params = "beforeStartedAt=2021-12-03T23:45"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } } @@ -679,12 +643,12 @@ mod tests { { let params = "uids=78,hello,world"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `uids`: could not parse `hello` as a positive integer"); } { let params = "uids=cat"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `uids`: could not parse `cat` as a positive integer"); } } @@ -703,7 +667,7 @@ mod tests { { let params = "statuses=finished"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`finished` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`."); } } #[test] @@ -721,7 +685,7 @@ mod tests { { let params = "types=createIndex"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`createIndex` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`."); } } #[test] @@ -739,12 +703,12 @@ mod tests { { let params = "indexUids=1,hé"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)."); } { let params = "indexUids=hé"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"`hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)."); } } @@ -772,19 +736,19 @@ mod tests { // Stars in uids not allowed let params = "uids=*"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"invalid digit found in string at `.uids`."); + snapshot!(format!("{err}"), @"Invalid value in parameter `uids`: could not parse `*` as a positive integer"); } { // From not allowed in task deletion/cancelation queries let params = "from=12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Json deserialize error: unknown field `from`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``."); + snapshot!(format!("{err}"), @"Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`"); } { // Limit not allowed in task deletion/cancelation queries let params = "limit=12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Json deserialize error: unknown field `limit`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``."); + snapshot!(format!("{err}"), @"Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`"); } } } diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index 129137859..4e2c43f18 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -3,10 +3,10 @@ use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::str::FromStr; use std::time::Instant; -use deserr::DeserializeFromValue; use either::Either; +use meilisearch_types::deserr::DeserializeFromValue; use meilisearch_types::error::deserr_codes::*; -use meilisearch_types::error::DeserrError; +use meilisearch_types::error::DeserrJsonError; use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; use meilisearch_types::{milli, Document}; use milli::tokenizer::TokenizerBuilder; @@ -30,41 +30,41 @@ pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "".to_string(); pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "".to_string(); #[derive(Debug, Clone, Default, PartialEq, DeserializeFromValue)] -#[deserr(error = DeserrError, rename_all = camelCase, deny_unknown_fields)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct SearchQuery { - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub q: Option, - #[deserr(error = DeserrError, default = DEFAULT_SEARCH_OFFSET())] + #[deserr(error = DeserrJsonError, default = DEFAULT_SEARCH_OFFSET())] pub offset: usize, - #[deserr(error = DeserrError, default = DEFAULT_SEARCH_LIMIT())] + #[deserr(error = DeserrJsonError, default = DEFAULT_SEARCH_LIMIT())] pub limit: usize, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub page: Option, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub hits_per_page: Option, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub attributes_to_retrieve: Option>, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub attributes_to_crop: Option>, - #[deserr(error = DeserrError, default = DEFAULT_CROP_LENGTH())] + #[deserr(error = DeserrJsonError, default = DEFAULT_CROP_LENGTH())] pub crop_length: usize, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub attributes_to_highlight: Option>, - #[deserr(error = DeserrError, default)] + #[deserr(error = DeserrJsonError, default)] pub show_matches_position: bool, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub filter: Option, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub sort: Option>, - #[deserr(error = DeserrError)] + #[deserr(error = DeserrJsonError)] pub facets: Option>, - #[deserr(error = DeserrError, default = DEFAULT_HIGHLIGHT_PRE_TAG())] + #[deserr(error = DeserrJsonError, default = DEFAULT_HIGHLIGHT_PRE_TAG())] pub highlight_pre_tag: String, - #[deserr(error = DeserrError, default = DEFAULT_HIGHLIGHT_POST_TAG())] + #[deserr(error = DeserrJsonError, default = DEFAULT_HIGHLIGHT_POST_TAG())] pub highlight_post_tag: String, - #[deserr(error = DeserrError, default = DEFAULT_CROP_MARKER())] + #[deserr(error = DeserrJsonError, default = DEFAULT_CROP_MARKER())] pub crop_marker: String, - #[deserr(error = DeserrError, default)] + #[deserr(error = DeserrJsonError, default)] pub matching_strategy: MatchingStrategy, } diff --git a/meilisearch/tests/auth/api_keys.rs b/meilisearch/tests/auth/api_keys.rs index 0a14107a8..8d7cb9130 100644 --- a/meilisearch/tests/auth/api_keys.rs +++ b/meilisearch/tests/auth/api_keys.rs @@ -248,7 +248,7 @@ async fn error_add_api_key_missing_parameter() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Json deserialize error: missing field `indexes` at ``", + "message": "Missing field `indexes`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" @@ -265,7 +265,7 @@ async fn error_add_api_key_missing_parameter() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Json deserialize error: missing field `actions` at ``", + "message": "Missing field `actions`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" @@ -314,7 +314,7 @@ async fn error_add_api_key_invalid_parameters_description() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "invalid type: Map `{\"name\":\"products\"}`, expected a String at `.description`.", + "message": "Invalid value type at `.description`: expected a string, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_description", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-description" @@ -337,7 +337,7 @@ async fn error_add_api_key_invalid_parameters_name() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "invalid type: Map `{\"name\":\"products\"}`, expected a String at `.name`.", + "message": "Invalid value type at `.name`: expected a string, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_name", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-name" @@ -360,7 +360,7 @@ async fn error_add_api_key_invalid_parameters_indexes() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "invalid type: Map `{\"name\":\"products\"}`, expected a Sequence at `.indexes`.", + "message": "Invalid value type at `.indexes`: expected an array, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-indexes" @@ -386,7 +386,7 @@ async fn error_add_api_key_invalid_index_uids() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "`invalid index # / \\name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexes[0]`.", + "message": "Invalid value at `.indexes[0]`: `invalid index # / \\name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_api_key_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-indexes" @@ -411,7 +411,7 @@ async fn error_add_api_key_invalid_parameters_actions() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "invalid type: Map `{\"name\":\"products\"}`, expected a Sequence at `.actions`.", + "message": "Invalid value type at `.actions`: expected an array, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_actions", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-actions" @@ -431,7 +431,7 @@ async fn error_add_api_key_invalid_parameters_actions() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Json deserialize error: unknown value `doc.add`, expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete` at `.actions[0]`.", + "message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`", "code": "invalid_api_key_actions", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-actions" @@ -455,7 +455,7 @@ async fn error_add_api_key_invalid_parameters_expires_at() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "invalid type: Map `{\"name\":\"products\"}`, expected a String at `.expiresAt`.", + "message": "Invalid value type at `.expiresAt`: expected a string, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_expires_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-expires-at" @@ -478,7 +478,7 @@ async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() { meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "`2010-11-13T00:00:00Z` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.\n at `.expiresAt`.", + "message": "Invalid value at `.expiresAt`: `2010-11-13T00:00:00Z` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.\n", "code": "invalid_api_key_expires_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-expires-at" @@ -503,7 +503,7 @@ async fn error_add_api_key_invalid_parameters_uid() { meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "invalid length: expected length 32 for simple format, found 13 at `.uid`.", + "message": "Invalid value at `.uid`: invalid length: expected length 32 for simple format, found 13", "code": "invalid_api_key_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-uid" @@ -1403,7 +1403,7 @@ async fn error_patch_api_key_indexes() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Json deserialize error: unknown field `indexes`, expected one of `description`, `name` at ``.", + "message": "Unknown field `indexes`: expected one of `description`, `name`", "code": "immutable_api_key_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable-api-key-indexes" @@ -1480,7 +1480,7 @@ async fn error_patch_api_key_actions() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Json deserialize error: unknown field `actions`, expected one of `description`, `name` at ``.", + "message": "Unknown field `actions`: expected one of `description`, `name`", "code": "immutable_api_key_actions", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable-api-key-actions" @@ -1549,7 +1549,7 @@ async fn error_patch_api_key_expiration_date() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Json deserialize error: unknown field `expiresAt`, expected one of `description`, `name` at ``.", + "message": "Unknown field `expiresAt`: expected one of `description`, `name`", "code": "immutable_api_key_expires_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable-api-key-expires-at" @@ -1670,7 +1670,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "invalid type: Integer `13`, expected a String at `.description`.", + "message": "Invalid value type at `.description`: expected a string, but found a positive integer: `13`", "code": "invalid_api_key_description", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-description" @@ -1686,7 +1686,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "invalid type: Integer `13`, expected a String at `.name`.", + "message": "Invalid value type at `.name`: expected a string, but found a positive integer: `13`", "code": "invalid_api_key_name", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-name" diff --git a/meilisearch/tests/common/index.rs b/meilisearch/tests/common/index.rs index d42f18d2c..b98ed9827 100644 --- a/meilisearch/tests/common/index.rs +++ b/meilisearch/tests/common/index.rs @@ -289,8 +289,8 @@ impl Index<'_> { eprintln!("Error with post search"); resume_unwind(e); } - - let (response, code) = self.search_get(query).await; + let query = yaup::to_string(&query).unwrap(); + let (response, code) = self.search_get(&query).await; if let Err(e) = catch_unwind(move || test(response, code)) { eprintln!("Error with get search"); resume_unwind(e); @@ -302,9 +302,8 @@ impl Index<'_> { self.service.post_encoded(url, query, self.encoder).await } - pub async fn search_get(&self, query: Value) -> (Value, StatusCode) { - let params = yaup::to_string(&query).unwrap(); - let url = format!("/indexes/{}/search?{}", urlencode(self.uid.as_ref()), params); + pub async fn search_get(&self, query: &str) -> (Value, StatusCode) { + let url = format!("/indexes/{}/search?{}", urlencode(self.uid.as_ref()), query); self.service.get(url).await } diff --git a/meilisearch/tests/common/server.rs b/meilisearch/tests/common/server.rs index c3c9b7c60..e325da0cb 100644 --- a/meilisearch/tests/common/server.rs +++ b/meilisearch/tests/common/server.rs @@ -132,8 +132,8 @@ impl Server { self.service.get("/tasks").await } - pub async fn tasks_filter(&self, filter: Value) -> (Value, StatusCode) { - self.service.get(format!("/tasks?{}", yaup::to_string(&filter).unwrap())).await + pub async fn tasks_filter(&self, filter: &str) -> (Value, StatusCode) { + self.service.get(format!("/tasks?{}", filter)).await } pub async fn get_dump_status(&self, uid: &str) -> (Value, StatusCode) { @@ -148,14 +148,12 @@ impl Server { self.service.post("/swap-indexes", value).await } - pub async fn cancel_tasks(&self, value: Value) -> (Value, StatusCode) { - self.service - .post(format!("/tasks/cancel?{}", yaup::to_string(&value).unwrap()), json!(null)) - .await + pub async fn cancel_tasks(&self, value: &str) -> (Value, StatusCode) { + self.service.post(format!("/tasks/cancel?{}", value), json!(null)).await } - pub async fn delete_tasks(&self, value: Value) -> (Value, StatusCode) { - self.service.delete(format!("/tasks?{}", yaup::to_string(&value).unwrap())).await + pub async fn delete_tasks(&self, value: &str) -> (Value, StatusCode) { + self.service.delete(format!("/tasks?{}", value)).await } pub async fn wait_task(&self, update_id: u64) -> Value { diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index d582a3672..99f711745 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -46,7 +46,7 @@ async fn search_bad_q() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.q`.", + "message": "Invalid value type at `.q`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_search_q", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-q" @@ -64,18 +64,18 @@ async fn search_bad_offset() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Integer at `.offset`.", + "message": "Invalid value type at `.offset`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_offset", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-offset" } "###); - let (response, code) = index.search_get(json!({"offset": "doggo"})).await; + let (response, code) = index.search_get("offset=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.offset`.", + "message": "Invalid value in parameter `offset`: could not parse `doggo` as a positive integer", "code": "invalid_search_offset", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-offset" @@ -92,18 +92,18 @@ async fn search_bad_limit() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Integer at `.limit`.", + "message": "Invalid value type at `.limit`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_limit", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-limit" } "###); - let (response, code) = index.search_get(json!({"limit": "doggo"})).await; + let (response, code) = index.search_get("limit=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.limit`.", + "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", "code": "invalid_search_limit", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-limit" @@ -120,18 +120,18 @@ async fn search_bad_page() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Integer at `.page`.", + "message": "Invalid value type at `.page`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_page", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-page" } "###); - let (response, code) = index.search_get(json!({"page": "doggo"})).await; + let (response, code) = index.search_get("page=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.page`.", + "message": "Invalid value in parameter `page`: could not parse `doggo` as a positive integer", "code": "invalid_search_page", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-page" @@ -148,18 +148,18 @@ async fn search_bad_hits_per_page() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Integer at `.hitsPerPage`.", + "message": "Invalid value type at `.hitsPerPage`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_hits_per_page", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-hits-per-page" } "###); - let (response, code) = index.search_get(json!({"hitsPerPage": "doggo"})).await; + let (response, code) = index.search_get("hitsPerPage=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.hitsPerPage`.", + "message": "Invalid value in parameter `hitsPerPage`: could not parse `doggo` as a positive integer", "code": "invalid_search_hits_per_page", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-hits-per-page" @@ -176,7 +176,7 @@ async fn search_bad_attributes_to_crop() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.attributesToCrop`.", + "message": "Invalid value type at `.attributesToCrop`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_search_attributes_to_crop", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-attributes-to-crop" @@ -194,18 +194,18 @@ async fn search_bad_crop_length() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Integer at `.cropLength`.", + "message": "Invalid value type at `.cropLength`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_crop_length", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-crop-length" } "###); - let (response, code) = index.search_get(json!({"cropLength": "doggo"})).await; + let (response, code) = index.search_get("cropLength=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.cropLength`.", + "message": "Invalid value in parameter `cropLength`: could not parse `doggo` as a positive integer", "code": "invalid_search_crop_length", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-crop-length" @@ -222,7 +222,7 @@ async fn search_bad_attributes_to_highlight() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.attributesToHighlight`.", + "message": "Invalid value type at `.attributesToHighlight`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_search_attributes_to_highlight", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-attributes-to-highlight" @@ -266,7 +266,7 @@ async fn search_bad_sort() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.sort`.", + "message": "Invalid value type at `.sort`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_search_sort", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-sort" @@ -284,18 +284,18 @@ async fn search_bad_show_matches_position() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Boolean at `.showMatchesPosition`.", + "message": "Invalid value type at `.showMatchesPosition`: expected a boolean, but found a string: `\"doggo\"`", "code": "invalid_search_show_matches_position", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position" } "###); - let (response, code) = index.search_get(json!({"showMatchesPosition": "doggo"})).await; + let (response, code) = index.search_get("showMatchesPosition=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "provided string was not `true` or `false` at `.showMatchesPosition`.", + "message": "Invalid value in parameter `showMatchesPosition`: provided string was not `true` or `false`", "code": "invalid_search_show_matches_position", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position" @@ -312,7 +312,7 @@ async fn search_bad_facets() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.facets`.", + "message": "Invalid value type at `.facets`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_search_facets", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-facets" @@ -330,7 +330,7 @@ async fn search_bad_highlight_pre_tag() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.highlightPreTag`.", + "message": "Invalid value type at `.highlightPreTag`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_search_highlight_pre_tag", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-highlight-pre-tag" @@ -348,7 +348,7 @@ async fn search_bad_highlight_post_tag() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.highlightPostTag`.", + "message": "Invalid value type at `.highlightPostTag`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_search_highlight_post_tag", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-highlight-post-tag" @@ -366,7 +366,7 @@ async fn search_bad_crop_marker() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.cropMarker`.", + "message": "Invalid value type at `.cropMarker`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_search_crop_marker", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-crop-marker" @@ -384,18 +384,18 @@ async fn search_bad_matching_strategy() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Json deserialize error: unknown value `doggo`, expected one of `last`, `all` at `.matchingStrategy`.", + "message": "Unknown value `doggo` at `.matchingStrategy`: expected one of `last`, `all`", "code": "invalid_search_matching_strategy", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy" } "###); - let (response, code) = index.search_get(json!({"matchingStrategy": "doggo"})).await; + let (response, code) = index.search_get("matchingStrategy=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Json deserialize error: unknown value `doggo`, expected one of `last`, `all` at `.matchingStrategy`.", + "message": "Unknown value `doggo` for parameter `matchingStrategy`: expected one of `last`, `all`", "code": "invalid_search_matching_strategy", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy" diff --git a/meilisearch/tests/settings/errors.rs b/meilisearch/tests/settings/errors.rs index 77e62303a..a3deeccfb 100644 --- a/meilisearch/tests/settings/errors.rs +++ b/meilisearch/tests/settings/errors.rs @@ -12,7 +12,7 @@ async fn settings_bad_displayed_attributes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.displayedAttributes`.", + "message": "Invalid value type at `.displayedAttributes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_displayed_attributes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes" @@ -23,7 +23,7 @@ async fn settings_bad_displayed_attributes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.", + "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_displayed_attributes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes" @@ -40,7 +40,7 @@ async fn settings_bad_searchable_attributes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.searchableAttributes`.", + "message": "Invalid value type at `.searchableAttributes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_searchable_attributes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes" @@ -51,7 +51,7 @@ async fn settings_bad_searchable_attributes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.", + "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_searchable_attributes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes" @@ -68,7 +68,7 @@ async fn settings_bad_filterable_attributes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.filterableAttributes`.", + "message": "Invalid value type at `.filterableAttributes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_filterable_attributes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes" @@ -79,7 +79,7 @@ async fn settings_bad_filterable_attributes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.", + "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_filterable_attributes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes" @@ -96,7 +96,7 @@ async fn settings_bad_sortable_attributes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.sortableAttributes`.", + "message": "Invalid value type at `.sortableAttributes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_sortable_attributes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes" @@ -107,7 +107,7 @@ async fn settings_bad_sortable_attributes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.", + "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_sortable_attributes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes" @@ -124,7 +124,7 @@ async fn settings_bad_ranking_rules() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.rankingRules`.", + "message": "Invalid value type at `.rankingRules`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_ranking_rules", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" @@ -135,7 +135,7 @@ async fn settings_bad_ranking_rules() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.", + "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_ranking_rules", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" @@ -152,7 +152,7 @@ async fn settings_bad_stop_words() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `.stopWords`.", + "message": "Invalid value type at `.stopWords`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_stop_words", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words" @@ -163,7 +163,7 @@ async fn settings_bad_stop_words() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.", + "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_stop_words", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words" @@ -180,7 +180,7 @@ async fn settings_bad_synonyms() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at `.synonyms`.", + "message": "Invalid value type at `.synonyms`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_synonyms", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms" @@ -191,7 +191,7 @@ async fn settings_bad_synonyms() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at ``.", + "message": "Invalid value type: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_synonyms", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms" @@ -208,7 +208,7 @@ async fn settings_bad_distinct_attribute() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: Sequence `[\"doggo\"]`, expected a String at `.distinctAttribute`.", + "message": "Invalid value type at `.distinctAttribute`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_settings_distinct_attribute", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute" @@ -219,7 +219,7 @@ async fn settings_bad_distinct_attribute() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: Sequence `[\"doggo\"]`, expected a String at ``.", + "message": "Invalid value type: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_settings_distinct_attribute", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute" @@ -236,7 +236,7 @@ async fn settings_bad_typo_tolerance() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at `.typoTolerance`.", + "message": "Invalid value type at `.typoTolerance`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_typo_tolerance", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" @@ -247,7 +247,7 @@ async fn settings_bad_typo_tolerance() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at ``.", + "message": "Invalid value type: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_typo_tolerance", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" @@ -264,7 +264,7 @@ async fn settings_bad_faceting() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at `.faceting`.", + "message": "Invalid value type at `.faceting`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_faceting", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-faceting" @@ -275,7 +275,7 @@ async fn settings_bad_faceting() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at ``.", + "message": "Invalid value type: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_faceting", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-faceting" @@ -292,7 +292,7 @@ async fn settings_bad_pagination() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at `.pagination`.", + "message": "Invalid value type at `.pagination`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_pagination", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-pagination" @@ -303,7 +303,7 @@ async fn settings_bad_pagination() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at ``.", + "message": "Invalid value type: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_pagination", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-pagination" diff --git a/meilisearch/tests/settings/get_settings.rs b/meilisearch/tests/settings/get_settings.rs index 3ac7d3801..f18787e19 100644 --- a/meilisearch/tests/settings/get_settings.rs +++ b/meilisearch/tests/settings/get_settings.rs @@ -282,7 +282,7 @@ async fn error_set_invalid_ranking_rules() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules. at `.rankingRules[0]`.", + "message": "Invalid value at `.rankingRules[0]`: `manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.", "code": "invalid_settings_ranking_rules", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" diff --git a/meilisearch/tests/tasks/errors.rs b/meilisearch/tests/tasks/errors.rs index 305ab8b9c..fd4c6d489 100644 --- a/meilisearch/tests/tasks/errors.rs +++ b/meilisearch/tests/tasks/errors.rs @@ -1,5 +1,4 @@ use meili_snap::*; -use serde_json::json; use crate::common::Server; @@ -7,33 +6,44 @@ use crate::common::Server; async fn task_bad_uids() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"uids": "doggo"})).await; + let (response, code) = server.tasks_filter("uids=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.uids`.", + "message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-uids" } "###); - let (response, code) = server.cancel_tasks(json!({"uids": "doggo"})).await; + let (response, code) = server.cancel_tasks("uids=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.uids`.", + "message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-uids" } "###); - let (response, code) = server.delete_tasks(json!({"uids": "doggo"})).await; + let (response, code) = server.delete_tasks("uids=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.uids`.", + "message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer", + "code": "invalid_task_uids", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + } + "###); + + let (response, code) = server.delete_tasks("uids=1,dogo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `uids`: could not parse `dogo` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-uids" @@ -45,33 +55,33 @@ async fn task_bad_uids() { async fn task_bad_canceled_by() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"canceledBy": "doggo"})).await; + let (response, code) = server.tasks_filter("canceledBy=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.canceledBy`.", + "message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer", "code": "invalid_task_canceled_by", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by" } "###); - let (response, code) = server.cancel_tasks(json!({"canceledBy": "doggo"})).await; + let (response, code) = server.cancel_tasks("canceledBy=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.canceledBy`.", + "message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer", "code": "invalid_task_canceled_by", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by" } "###); - let (response, code) = server.delete_tasks(json!({"canceledBy": "doggo"})).await; + let (response, code) = server.delete_tasks("canceledBy=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.canceledBy`.", + "message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer", "code": "invalid_task_canceled_by", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by" @@ -83,33 +93,33 @@ async fn task_bad_canceled_by() { async fn task_bad_types() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"types": "doggo"})).await; + let (response, code) = server.tasks_filter("types=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`.", + "message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", "code": "invalid_task_types", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-types" } "###); - let (response, code) = server.cancel_tasks(json!({"types": "doggo"})).await; + let (response, code) = server.cancel_tasks("types=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`.", + "message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", "code": "invalid_task_types", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-types" } "###); - let (response, code) = server.delete_tasks(json!({"types": "doggo"})).await; + let (response, code) = server.delete_tasks("types=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is not a type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`. at `.types`.", + "message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", "code": "invalid_task_types", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-types" @@ -121,33 +131,33 @@ async fn task_bad_types() { async fn task_bad_statuses() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"statuses": "doggo"})).await; + let (response, code) = server.tasks_filter("statuses=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`.", + "message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", "code": "invalid_task_statuses", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-statuses" } "###); - let (response, code) = server.cancel_tasks(json!({"statuses": "doggo"})).await; + let (response, code) = server.cancel_tasks("statuses=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`.", + "message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", "code": "invalid_task_statuses", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-statuses" } "###); - let (response, code) = server.delete_tasks(json!({"statuses": "doggo"})).await; + let (response, code) = server.delete_tasks("statuses=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is not a status. Available status are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`. at `.statuses`.", + "message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", "code": "invalid_task_statuses", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-statuses" @@ -159,33 +169,33 @@ async fn task_bad_statuses() { async fn task_bad_index_uids() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"indexUids": "the good doggo"})).await; + let (response, code) = server.tasks_filter("indexUids=the%20good%20doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.", + "message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-index-uid" } "###); - let (response, code) = server.cancel_tasks(json!({"indexUids": "the good doggo"})).await; + let (response, code) = server.cancel_tasks("indexUids=the%20good%20doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.", + "message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-index-uid" } "###); - let (response, code) = server.delete_tasks(json!({"indexUids": "the good doggo"})).await; + let (response, code) = server.delete_tasks("indexUids=the%20good%20doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexUids`.", + "message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-index-uid" @@ -197,33 +207,33 @@ async fn task_bad_index_uids() { async fn task_bad_limit() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"limit": "doggo"})).await; + let (response, code) = server.tasks_filter("limit=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.limit`.", + "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", "code": "invalid_task_limit", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-limit" } "###); - let (response, code) = server.cancel_tasks(json!({"limit": "doggo"})).await; + let (response, code) = server.cancel_tasks("limit=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Json deserialize error: unknown field `limit`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.", + "message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" } "###); - let (response, code) = server.delete_tasks(json!({"limit": "doggo"})).await; + let (response, code) = server.delete_tasks("limit=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Json deserialize error: unknown field `limit`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.", + "message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" @@ -235,33 +245,33 @@ async fn task_bad_limit() { async fn task_bad_from() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"from": "doggo"})).await; + let (response, code) = server.tasks_filter("from=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid digit found in string at `.from`.", + "message": "Invalid value in parameter `from`: could not parse `doggo` as a positive integer", "code": "invalid_task_from", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-from" } "###); - let (response, code) = server.cancel_tasks(json!({"from": "doggo"})).await; + let (response, code) = server.cancel_tasks("from=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Json deserialize error: unknown field `from`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.", + "message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" } "###); - let (response, code) = server.delete_tasks(json!({"from": "doggo"})).await; + let (response, code) = server.delete_tasks("from=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Json deserialize error: unknown field `from`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.", + "message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" @@ -273,33 +283,33 @@ async fn task_bad_from() { async fn task_bad_after_enqueued_at() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"afterEnqueuedAt": "doggo"})).await; + let (response, code) = server.tasks_filter("afterEnqueuedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`.", + "message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_enqueued_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" } "###); - let (response, code) = server.cancel_tasks(json!({"afterEnqueuedAt": "doggo"})).await; + let (response, code) = server.cancel_tasks("afterEnqueuedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`.", + "message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_enqueued_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" } "###); - let (response, code) = server.delete_tasks(json!({"afterEnqueuedAt": "doggo"})).await; + let (response, code) = server.delete_tasks("afterEnqueuedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterEnqueuedAt`.", + "message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_enqueued_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" @@ -311,33 +321,33 @@ async fn task_bad_after_enqueued_at() { async fn task_bad_before_enqueued_at() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"beforeEnqueuedAt": "doggo"})).await; + let (response, code) = server.tasks_filter("beforeEnqueuedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`.", + "message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_enqueued_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" } "###); - let (response, code) = server.cancel_tasks(json!({"beforeEnqueuedAt": "doggo"})).await; + let (response, code) = server.cancel_tasks("beforeEnqueuedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`.", + "message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_enqueued_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" } "###); - let (response, code) = server.delete_tasks(json!({"beforeEnqueuedAt": "doggo"})).await; + let (response, code) = server.delete_tasks("beforeEnqueuedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeEnqueuedAt`.", + "message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_enqueued_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" @@ -349,33 +359,33 @@ async fn task_bad_before_enqueued_at() { async fn task_bad_after_started_at() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"afterStartedAt": "doggo"})).await; + let (response, code) = server.tasks_filter("afterStartedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`.", + "message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_started_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at" } "###); - let (response, code) = server.cancel_tasks(json!({"afterStartedAt": "doggo"})).await; + let (response, code) = server.cancel_tasks("afterStartedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`.", + "message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_started_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at" } "###); - let (response, code) = server.delete_tasks(json!({"afterStartedAt": "doggo"})).await; + let (response, code) = server.delete_tasks("afterStartedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterStartedAt`.", + "message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_started_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at" @@ -387,33 +397,33 @@ async fn task_bad_after_started_at() { async fn task_bad_before_started_at() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"beforeStartedAt": "doggo"})).await; + let (response, code) = server.tasks_filter("beforeStartedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.", + "message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" } "###); - let (response, code) = server.cancel_tasks(json!({"beforeStartedAt": "doggo"})).await; + let (response, code) = server.cancel_tasks("beforeStartedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.", + "message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" } "###); - let (response, code) = server.delete_tasks(json!({"beforeStartedAt": "doggo"})).await; + let (response, code) = server.delete_tasks("beforeStartedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.", + "message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" @@ -425,33 +435,33 @@ async fn task_bad_before_started_at() { async fn task_bad_after_finished_at() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"afterFinishedAt": "doggo"})).await; + let (response, code) = server.tasks_filter("afterFinishedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`.", + "message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_finished_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" } "###); - let (response, code) = server.cancel_tasks(json!({"afterFinishedAt": "doggo"})).await; + let (response, code) = server.cancel_tasks("afterFinishedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`.", + "message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_finished_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" } "###); - let (response, code) = server.delete_tasks(json!({"afterFinishedAt": "doggo"})).await; + let (response, code) = server.delete_tasks("afterFinishedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.afterFinishedAt`.", + "message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_finished_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" @@ -463,33 +473,33 @@ async fn task_bad_after_finished_at() { async fn task_bad_before_finished_at() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!({"beforeFinishedAt": "doggo"})).await; + let (response, code) = server.tasks_filter("beforeFinishedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`.", + "message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_finished_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" } "###); - let (response, code) = server.cancel_tasks(json!({"beforeFinishedAt": "doggo"})).await; + let (response, code) = server.cancel_tasks("beforeFinishedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`.", + "message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_finished_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" } "###); - let (response, code) = server.delete_tasks(json!({"beforeFinishedAt": "doggo"})).await; + let (response, code) = server.delete_tasks("beforeFinishedAt=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "`doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeFinishedAt`.", + "message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_finished_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 46775e05f..7fadf0a10 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -179,44 +179,44 @@ async fn list_tasks_status_and_type_filtered() { async fn get_task_filter_error() { let server = Server::new().await; - let (response, code) = server.tasks_filter(json!( { "lol": "pied" })).await; + let (response, code) = server.tasks_filter("lol=pied").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "Json deserialize error: unknown field `lol`, expected one of `limit`, `from`, `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.", + "message": "Unknown parameter `lol`: expected one of `limit`, `from`, `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" } "###); - let (response, code) = server.tasks_filter(json!( { "uids": "pied" })).await; + let (response, code) = server.tasks_filter("uids=pied").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "invalid digit found in string at `.uids`.", + "message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-uids" } "###); - let (response, code) = server.tasks_filter(json!( { "from": "pied" })).await; + let (response, code) = server.tasks_filter("from=pied").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "invalid digit found in string at `.from`.", + "message": "Invalid value in parameter `from`: could not parse `pied` as a positive integer", "code": "invalid_task_from", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-from" } "###); - let (response, code) = server.tasks_filter(json!( { "beforeStartedAt": "pied" })).await; + let (response, code) = server.tasks_filter("beforeStartedAt=pied").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "`pied` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format. at `.beforeStartedAt`.", + "message": "Invalid value in parameter `beforeStartedAt`: `pied` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" @@ -228,7 +228,7 @@ async fn get_task_filter_error() { async fn delete_task_filter_error() { let server = Server::new().await; - let (response, code) = server.delete_tasks(json!(null)).await; + let (response, code) = server.delete_tasks("").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { @@ -239,22 +239,22 @@ async fn delete_task_filter_error() { } "###); - let (response, code) = server.delete_tasks(json!({ "lol": "pied" })).await; + let (response, code) = server.delete_tasks("lol=pied").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "Json deserialize error: unknown field `lol`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.", + "message": "Unknown parameter `lol`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" } "###); - let (response, code) = server.delete_tasks(json!({ "uids": "pied" })).await; + let (response, code) = server.delete_tasks("uids=pied").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "invalid digit found in string at `.uids`.", + "message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-uids" @@ -266,7 +266,7 @@ async fn delete_task_filter_error() { async fn cancel_task_filter_error() { let server = Server::new().await; - let (response, code) = server.cancel_tasks(json!(null)).await; + let (response, code) = server.cancel_tasks("").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { @@ -277,22 +277,22 @@ async fn cancel_task_filter_error() { } "###); - let (response, code) = server.cancel_tasks(json!({ "lol": "pied" })).await; + let (response, code) = server.cancel_tasks("lol=pied").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "Json deserialize error: unknown field `lol`, expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt` at ``.", + "message": "Unknown parameter `lol`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" } "###); - let (response, code) = server.cancel_tasks(json!({ "uids": "pied" })).await; + let (response, code) = server.cancel_tasks("uids=pied").await; assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "invalid digit found in string at `.uids`.", + "message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-uids" @@ -523,7 +523,7 @@ async fn test_summarized_settings_update() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "`custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules. at `.rankingRules[0]`.", + "message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.", "code": "invalid_settings_ranking_rules", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" @@ -899,7 +899,7 @@ async fn test_summarized_task_cancelation() { // to avoid being flaky we're only going to cancel an already finished task :( index.create(None).await; index.wait_task(0).await; - server.cancel_tasks(json!({ "uids": [0] })).await; + server.cancel_tasks("uids=0").await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; assert_json_snapshot!(task, @@ -932,7 +932,7 @@ async fn test_summarized_task_deletion() { // to avoid being flaky we're only going to delete an already finished task :( index.create(None).await; index.wait_task(0).await; - server.delete_tasks(json!({ "uids": [0] })).await; + server.delete_tasks("uids=0").await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; assert_json_snapshot!(task, From 766dd830ae9434fa0da1bfd7e5fdec3973a3ef82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Thu, 12 Jan 2023 15:35:03 +0100 Subject: [PATCH 003/186] Update deserr to latest version + add new error codes for missing fields - missing_api_key_indexes - missing_api_key_actions - missing_api_key_expires_at - missing_swap_indexes_indexes --- Cargo.lock | 16 ++----- meilisearch-types/Cargo.toml | 4 +- meilisearch-types/src/error.rs | 49 ++++++++++++++++++-- meilisearch-types/src/keys.rs | 14 +++--- meilisearch-types/src/lib.rs | 1 - meilisearch-types/src/settings.rs | 31 ++++++++----- meilisearch/Cargo.toml | 2 +- meilisearch/src/routes/api_key.rs | 4 +- meilisearch/src/routes/indexes/documents.rs | 10 ++-- meilisearch/src/routes/indexes/mod.rs | 4 +- meilisearch/src/routes/indexes/search.rs | 34 +++++++------- meilisearch/src/routes/swap_indexes.rs | 2 +- meilisearch/src/routes/tasks.rs | 48 +++++++++---------- meilisearch/src/search.rs | 36 +++++++------- meilisearch/tests/auth/api_keys.rs | 27 ++++------- meilisearch/tests/documents/add_documents.rs | 4 +- 16 files changed, 158 insertions(+), 128 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index da1ec3011..6e754abd3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1025,8 +1025,6 @@ dependencies = [ [[package]] name = "deserr" version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86290491a2b5c21a1a5083da8dae831006761258fabd5617309c3eebc5f89468" dependencies = [ "deserr-internal", "serde-cs", @@ -1036,8 +1034,6 @@ dependencies = [ [[package]] name = "deserr-internal" version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7131de1c27581bc376a22166c9f570be91b76cb096be2f6aecf224c27bf7c49a" dependencies = [ "convert_case 0.5.0", "proc-macro2 1.0.49", @@ -1315,8 +1311,7 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.39.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5" +version = "0.38.0" dependencies = [ "nom", "nom_locate", @@ -1334,8 +1329,7 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.39.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5" +version = "0.38.0" dependencies = [ "serde_json", ] @@ -1899,8 +1893,7 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.39.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5" +version = "0.38.0" dependencies = [ "serde_json", ] @@ -2448,8 +2441,7 @@ dependencies = [ [[package]] name = "milli" -version = "0.39.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.0#e6bea999740b153871f665abce869ffbb5aa94c5" +version = "0.38.0" dependencies = [ "bimap", "bincode", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 4c0b1ca93..257ac9c2d 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -9,14 +9,14 @@ actix-web = { version = "4.2.1", default-features = false } anyhow = "1.0.65" convert_case = "0.6.0" csv = "1.1.6" -deserr = "0.1.4" +deserr = { path = "/Users/meilisearch/Documents/deserr" } either = { version = "1.6.1", features = ["serde"] } enum-iterator = "1.1.3" file-store = { path = "../file-store" } flate2 = "1.0.24" fst = "0.4.7" memmap2 = "0.5.7" -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.39.0", default-features = false } +milli = { path = "/Users/meilisearch/Documents/milli2/milli", default-features = false } proptest = { version = "1.0.0", optional = true } proptest-derive = { version = "0.3.0", optional = true } roaring = { version = "0.10.0", features = ["serde"] } diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 2be6ffff4..0d7f126a5 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -14,7 +14,9 @@ use serde_cs::vec::CS; use crate::star_or::StarOr; -use self::deserr_codes::MissingIndexUid; +use self::deserr_codes::{ + MissingApiKeyActions, MissingApiKeyExpiresAt, MissingApiKeyIndexes, MissingIndexUid, InvalidSwapIndexes, MissingSwapIndexesIndexes, +}; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] @@ -185,7 +187,6 @@ DumpAlreadyProcessing , invalid , CONFLICT; DumpNotFound , invalid , NOT_FOUND; DumpProcessFailed , internal , INTERNAL_SERVER_ERROR; DuplicateIndexFound , invalid , BAD_REQUEST; - ImmutableApiKeyUid , invalid , BAD_REQUEST; ImmutableApiKeyKey , invalid , BAD_REQUEST; ImmutableApiKeyActions , invalid , BAD_REQUEST; @@ -193,11 +194,9 @@ ImmutableApiKeyIndexes , invalid , BAD_REQUEST; ImmutableApiKeyExpiresAt , invalid , BAD_REQUEST; ImmutableApiKeyCreatedAt , invalid , BAD_REQUEST; ImmutableApiKeyUpdatedAt , invalid , BAD_REQUEST; - ImmutableIndexUid , invalid , BAD_REQUEST; ImmutableIndexCreatedAt , invalid , BAD_REQUEST; ImmutableIndexUpdatedAt , invalid , BAD_REQUEST; - IndexAlreadyExists , invalid , CONFLICT ; IndexCreationFailed , internal , INTERNAL_SERVER_ERROR; IndexNotFound , invalid , NOT_FOUND; @@ -281,6 +280,7 @@ MissingDocumentId , invalid , BAD_REQUEST ; MissingIndexUid , invalid , BAD_REQUEST ; MissingMasterKey , authentication, UNAUTHORIZED ; MissingPayload , invalid , BAD_REQUEST ; +MissingSwapIndexesIndexes , invalid , BAD_REQUEST ; MissingTaskFilters , invalid , BAD_REQUEST ; NoSpaceLeftOnDevice , system , UNPROCESSABLE_ENTITY; PayloadTooLarge , invalid , PAYLOAD_TOO_LARGE ; @@ -478,6 +478,47 @@ impl DeserrJsonError { Self { msg: x.msg, code: MissingIndexUid.error_code(), _phantom: PhantomData } } } +impl DeserrJsonError { + pub fn missing_api_key_actions(field: &str, location: ValuePointerRef) -> Self { + let x = unwrap_any(Self::error::( + None, + deserr::ErrorKind::MissingField { field }, + location, + )); + Self { msg: x.msg, code: MissingApiKeyActions.error_code(), _phantom: PhantomData } + } +} +impl DeserrJsonError { + pub fn missing_api_key_expires_at(field: &str, location: ValuePointerRef) -> Self { + let x = unwrap_any(Self::error::( + None, + deserr::ErrorKind::MissingField { field }, + location, + )); + Self { msg: x.msg, code: MissingApiKeyExpiresAt.error_code(), _phantom: PhantomData } + } +} +impl DeserrJsonError { + pub fn missing_api_key_indexes(field: &str, location: ValuePointerRef) -> Self { + let x = unwrap_any(Self::error::( + None, + deserr::ErrorKind::MissingField { field }, + location, + )); + Self { msg: x.msg, code: MissingApiKeyIndexes.error_code(), _phantom: PhantomData } + } +} + +impl DeserrJsonError { + pub fn missing_swap_indexes_indexes(field: &str, location: ValuePointerRef) -> Self { + let x = unwrap_any(Self::error::( + None, + deserr::ErrorKind::MissingField { field }, + location, + )); + Self { msg: x.msg, code: MissingSwapIndexesIndexes.error_code(), _phantom: PhantomData } + } +} // if the error happened in the root, then an empty string is returned. pub fn location_json_description(location: ValuePointerRef, article: &str) -> String { diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index 53776e489..d736fd8c1 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -38,17 +38,17 @@ fn parse_uuid_from_str(s: &str) -> Result> { #[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct CreateApiKey { - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub description: Option, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub name: Option, #[deserr(default = Uuid::new_v4(), error = DeserrJsonError, from(&String) = parse_uuid_from_str -> TakeErrorMessage)] pub uid: KeyId, - #[deserr(error = DeserrJsonError)] + #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_api_key_actions)] pub actions: Vec, - #[deserr(error = DeserrJsonError)] + #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_api_key_indexes)] pub indexes: Vec>, - #[deserr(error = DeserrJsonError, default = None, from(&String) = parse_expiration_date -> TakeErrorMessage)] + #[deserr(error = DeserrJsonError, from(&String) = parse_expiration_date -> TakeErrorMessage, missing_field_error = DeserrJsonError::missing_api_key_expires_at)] pub expires_at: Option, } impl CreateApiKey { @@ -94,9 +94,9 @@ fn deny_immutable_fields_api_key( #[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)] pub struct PatchApiKey { - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub description: Option, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub name: Option, } diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index f8fc47abd..354a25fa1 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -8,7 +8,6 @@ pub mod star_or; pub mod tasks; pub mod versioning; -pub use deserr; pub use milli; pub use milli::{heed, Index}; pub use serde_cs; diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index 0a79f865e..99f4ae9e3 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -69,8 +69,10 @@ fn validate_min_word_size_for_typo_setting( #[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError)] pub struct MinWordSizeTyposSetting { #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[deserr(default)] pub one_typo: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[deserr(default)] pub two_typos: Setting, } @@ -79,13 +81,16 @@ pub struct MinWordSizeTyposSetting { #[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError>)] pub struct TypoSettings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[deserr(default)] pub enabled: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub min_word_size_for_typos: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[deserr(default)] pub disable_on_words: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[deserr(default)] pub disable_on_attributes: Setting>, } @@ -94,6 +99,7 @@ pub struct TypoSettings { #[deserr(rename_all = camelCase, deny_unknown_fields)] pub struct FacetingSettings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[deserr(default)] pub max_values_per_facet: Setting, } @@ -102,6 +108,7 @@ pub struct FacetingSettings { #[deserr(rename_all = camelCase, deny_unknown_fields)] pub struct PaginationSettings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[deserr(default)] pub max_total_hits: Setting, } @@ -135,7 +142,7 @@ pub struct Settings { serialize_with = "serialize_with_wildcard", skip_serializing_if = "Setting::is_not_set" )] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub displayed_attributes: Setting>, #[serde( @@ -143,35 +150,35 @@ pub struct Settings { serialize_with = "serialize_with_wildcard", skip_serializing_if = "Setting::is_not_set" )] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub searchable_attributes: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub filterable_attributes: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub sortable_attributes: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub ranking_rules: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub stop_words: Setting>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub synonyms: Setting>>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub distinct_attribute: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub typo_tolerance: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub faceting: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub pagination: Setting, #[serde(skip)] diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index be852c02e..2c2c2aca8 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", " bytes = "1.2.1" clap = { version = "4.0.9", features = ["derive", "env"] } crossbeam-channel = "0.5.6" -deserr = "0.1.4" +deserr = { path = "/Users/meilisearch/Documents/deserr" } dump = { path = "../dump" } either = "1.8.0" env_logger = "0.9.1" diff --git a/meilisearch/src/routes/api_key.rs b/meilisearch/src/routes/api_key.rs index ce4ab0696..917a5e285 100644 --- a/meilisearch/src/routes/api_key.rs +++ b/meilisearch/src/routes/api_key.rs @@ -55,10 +55,10 @@ pub async fn create_api_key( #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct ListApiKeys { #[serde(default)] - #[deserr(error = DeserrQueryParamError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub offset: usize, #[serde(default = "PAGINATION_DEFAULT_LIMIT")] - #[deserr(error = DeserrQueryParamError, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(default = PAGINATION_DEFAULT_LIMIT(), error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub limit: usize, } impl ListApiKeys { diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index c09b12244..2c1b0f692 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -85,7 +85,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct GetDocument { // TODO: strongly typed argument here - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] fields: Option>>, } @@ -122,11 +122,11 @@ pub async fn delete_document( #[derive(Deserialize, Debug, DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct BrowseQuery { - #[deserr(error = DeserrQueryParamError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] offset: usize, - #[deserr(error = DeserrQueryParamError, default = crate::routes::PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(default = crate::routes::PAGINATION_DEFAULT_LIMIT(), error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] limit: usize, - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] fields: Option>>, } @@ -151,7 +151,7 @@ pub async fn get_all_documents( #[derive(Deserialize, Debug, DeserializeFromValue)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct UpdateDocumentsQuery { - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub primary_key: Option, } diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index 061eefaf6..216cc448e 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -76,10 +76,10 @@ impl IndexView { #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct ListIndexes { #[serde(default)] - #[deserr(error = DeserrQueryParamError, default, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub offset: usize, #[serde(default = "PAGINATION_DEFAULT_LIMIT")] - #[deserr(error = DeserrQueryParamError, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] + #[deserr(default = PAGINATION_DEFAULT_LIMIT(), error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub limit: usize, } impl ListIndexes { diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 8819ac8cf..ec9364711 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -48,39 +48,39 @@ pub fn parse_bool_take_error_message( #[derive(Debug, deserr::DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct SearchQueryGet { - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] q: Option, - #[deserr(error = DeserrQueryParamError, default = DEFAULT_SEARCH_OFFSET(), from(String) = parse_usize_query_param -> TakeErrorMessage)] + #[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrQueryParamError, from(String) = parse_usize_query_param -> TakeErrorMessage)] offset: usize, - #[deserr(error = DeserrQueryParamError, default = DEFAULT_SEARCH_LIMIT(), from(String) = parse_usize_query_param -> TakeErrorMessage)] + #[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrQueryParamError, from(String) = parse_usize_query_param -> TakeErrorMessage)] limit: usize, - #[deserr(error = DeserrQueryParamError, from(Option) = parse_option_usize_query_param -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(Option) = parse_option_usize_query_param -> TakeErrorMessage)] page: Option, - #[deserr(error = DeserrQueryParamError, from(Option) = parse_option_usize_query_param -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(Option) = parse_option_usize_query_param -> TakeErrorMessage)] hits_per_page: Option, - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] attributes_to_retrieve: Option>, - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] attributes_to_crop: Option>, - #[deserr(error = DeserrQueryParamError, default = DEFAULT_CROP_LENGTH(), from(String) = parse_usize_query_param -> TakeErrorMessage)] + #[deserr(default = DEFAULT_CROP_LENGTH(), error = DeserrQueryParamError, from(String) = parse_usize_query_param -> TakeErrorMessage)] crop_length: usize, - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] attributes_to_highlight: Option>, - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] filter: Option, - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] sort: Option, - #[deserr(error = DeserrQueryParamError, default, from(&String) = parse_bool_take_error_message -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(&String) = parse_bool_take_error_message -> TakeErrorMessage)] show_matches_position: bool, - #[deserr(error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] facets: Option>, - #[deserr(error = DeserrQueryParamError, default = DEFAULT_HIGHLIGHT_PRE_TAG())] + #[deserr( default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError)] highlight_pre_tag: String, - #[deserr(error = DeserrQueryParamError, default = DEFAULT_HIGHLIGHT_POST_TAG())] + #[deserr( default = DEFAULT_HIGHLIGHT_POST_TAG(), error = DeserrQueryParamError)] highlight_post_tag: String, - #[deserr(error = DeserrQueryParamError, default = DEFAULT_CROP_MARKER())] + #[deserr(default = DEFAULT_CROP_MARKER(), error = DeserrQueryParamError)] crop_marker: String, - #[deserr(error = DeserrQueryParamError, default)] + #[deserr(default, error = DeserrQueryParamError)] matching_strategy: MatchingStrategy, } diff --git a/meilisearch/src/routes/swap_indexes.rs b/meilisearch/src/routes/swap_indexes.rs index 57015f1f1..5d6d1e1e5 100644 --- a/meilisearch/src/routes/swap_indexes.rs +++ b/meilisearch/src/routes/swap_indexes.rs @@ -22,7 +22,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { #[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct SwapIndexesPayload { - #[deserr(error = DeserrJsonError)] + #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_swap_indexes_indexes)] indexes: Vec, } diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index dbf1380e2..d9c498e4e 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -169,61 +169,61 @@ impl From
for DetailsView { #[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct TasksFilterQuery { - #[deserr(error = DeserrQueryParamError, default = DEFAULT_LIMIT(), from(String) = parse_u32_query_param -> TakeErrorMessage)] + #[deserr(default = DEFAULT_LIMIT(), error = DeserrQueryParamError, from(String) = parse_u32_query_param -> TakeErrorMessage)] pub limit: u32, - #[deserr(error = DeserrQueryParamError, from(Option) = parse_option_u32_query_param -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(Option) = parse_option_u32_query_param -> TakeErrorMessage)] pub from: Option, - #[deserr(error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] pub uids: Option>, - #[deserr(error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] pub canceled_by: Option>, - #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub types: Option>, - #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub statuses: Option>, - #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub index_uids: Option>, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_enqueued_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_enqueued_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_started_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_started_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_finished_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_finished_at: Option, } #[derive(Deserialize, Debug, DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct TaskDeletionOrCancelationQuery { - #[deserr(error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] pub uids: Option>, - #[deserr(error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] pub canceled_by: Option>, - #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub types: Option>, - #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub statuses: Option>, - #[deserr(error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] pub index_uids: Option>, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_enqueued_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_enqueued_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_started_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_started_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] pub after_finished_at: Option, - #[deserr(error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] + #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] pub before_finished_at: Option, } diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index 4e2c43f18..bfb0bf160 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -3,8 +3,8 @@ use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::str::FromStr; use std::time::Instant; +use deserr::DeserializeFromValue; use either::Either; -use meilisearch_types::deserr::DeserializeFromValue; use meilisearch_types::error::deserr_codes::*; use meilisearch_types::error::DeserrJsonError; use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; @@ -32,39 +32,39 @@ pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "".to_string(); #[derive(Debug, Clone, Default, PartialEq, DeserializeFromValue)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct SearchQuery { - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub q: Option, - #[deserr(error = DeserrJsonError, default = DEFAULT_SEARCH_OFFSET())] + #[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError)] pub offset: usize, - #[deserr(error = DeserrJsonError, default = DEFAULT_SEARCH_LIMIT())] + #[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrJsonError)] pub limit: usize, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub page: Option, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub hits_per_page: Option, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub attributes_to_retrieve: Option>, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub attributes_to_crop: Option>, - #[deserr(error = DeserrJsonError, default = DEFAULT_CROP_LENGTH())] + #[deserr(default, error = DeserrJsonError, default = DEFAULT_CROP_LENGTH())] pub crop_length: usize, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub attributes_to_highlight: Option>, - #[deserr(error = DeserrJsonError, default)] + #[deserr(default, error = DeserrJsonError, default)] pub show_matches_position: bool, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub filter: Option, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub sort: Option>, - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub facets: Option>, - #[deserr(error = DeserrJsonError, default = DEFAULT_HIGHLIGHT_PRE_TAG())] + #[deserr(default, error = DeserrJsonError, default = DEFAULT_HIGHLIGHT_PRE_TAG())] pub highlight_pre_tag: String, - #[deserr(error = DeserrJsonError, default = DEFAULT_HIGHLIGHT_POST_TAG())] + #[deserr(default, error = DeserrJsonError, default = DEFAULT_HIGHLIGHT_POST_TAG())] pub highlight_post_tag: String, - #[deserr(error = DeserrJsonError, default = DEFAULT_CROP_MARKER())] + #[deserr(default, error = DeserrJsonError, default = DEFAULT_CROP_MARKER())] pub crop_marker: String, - #[deserr(error = DeserrJsonError, default)] + #[deserr(default, error = DeserrJsonError, default)] pub matching_strategy: MatchingStrategy, } diff --git a/meilisearch/tests/auth/api_keys.rs b/meilisearch/tests/auth/api_keys.rs index 8d7cb9130..03910c0a9 100644 --- a/meilisearch/tests/auth/api_keys.rs +++ b/meilisearch/tests/auth/api_keys.rs @@ -249,9 +249,9 @@ async fn error_add_api_key_missing_parameter() { meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { "message": "Missing field `indexes`", - "code": "bad_request", + "code": "missing_api_key_indexes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#missing-api-key-indexes" } "###); @@ -266,9 +266,9 @@ async fn error_add_api_key_missing_parameter() { meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { "message": "Missing field `actions`", - "code": "bad_request", + "code": "missing_api_key_actions", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#missing-api-key-actions" } "###); @@ -279,22 +279,13 @@ async fn error_add_api_key_missing_parameter() { "actions": ["documents.add"], }); let (response, code) = server.add_api_key(content).await; - meili_snap::snapshot!(code, @"201 Created"); + meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###" { - "name": null, - "description": "Indexing API key", - "key": "[ignored]", - "uid": "[ignored]", - "actions": [ - "documents.add" - ], - "indexes": [ - "products" - ], - "expiresAt": null, - "createdAt": "[ignored]", - "updatedAt": "[ignored]" + "message": "Missing field `expiresAt`", + "code": "missing_api_key_expires_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing-api-key-expires-at" } "###); } diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 4af365a7e..c27b899c6 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -926,7 +926,7 @@ async fn error_primary_key_inference() { "indexedDocuments": 1 }, "error": { - "message": "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", + "message": "The primary key inference process failed because the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", "code": "index_primary_key_no_candidate_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index-primary-key-no-candidate-found" @@ -966,7 +966,7 @@ async fn error_primary_key_inference() { "indexedDocuments": 1 }, "error": { - "message": "The primary key inference failed as the engine found 3 fields ending with `id` in their names: 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.", + "message": "The primary key inference process failed because the engine found 3 fields ending with `id` in their name, such as 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.", "code": "index_primary_key_multiple_candidates_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index-primary-key-multiple-candidates-found" From 49ddaaef49646f586ba6c5e583313e24a6fb3d83 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Thu, 12 Jan 2023 16:42:50 +0100 Subject: [PATCH 004/186] Fix missing_swap_indexes error code and handling of expires_at param... of create api key route --- meilisearch-types/src/error.rs | 7 ++++--- meilisearch-types/src/keys.rs | 19 +++++++++++-------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 0d7f126a5..614449ff8 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -15,7 +15,8 @@ use serde_cs::vec::CS; use crate::star_or::StarOr; use self::deserr_codes::{ - MissingApiKeyActions, MissingApiKeyExpiresAt, MissingApiKeyIndexes, MissingIndexUid, InvalidSwapIndexes, MissingSwapIndexesIndexes, + InvalidSwapIndexes, MissingApiKeyActions, MissingApiKeyExpiresAt, MissingApiKeyIndexes, + MissingIndexUid, MissingSwapIndexes, }; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] @@ -280,7 +281,7 @@ MissingDocumentId , invalid , BAD_REQUEST ; MissingIndexUid , invalid , BAD_REQUEST ; MissingMasterKey , authentication, UNAUTHORIZED ; MissingPayload , invalid , BAD_REQUEST ; -MissingSwapIndexesIndexes , invalid , BAD_REQUEST ; +MissingSwapIndexes , invalid , BAD_REQUEST ; MissingTaskFilters , invalid , BAD_REQUEST ; NoSpaceLeftOnDevice , system , UNPROCESSABLE_ENTITY; PayloadTooLarge , invalid , PAYLOAD_TOO_LARGE ; @@ -516,7 +517,7 @@ impl DeserrJsonError { deserr::ErrorKind::MissingField { field }, location, )); - Self { msg: x.msg, code: MissingSwapIndexesIndexes.error_code(), _phantom: PhantomData } + Self { msg: x.msg, code: MissingSwapIndexes.error_code(), _phantom: PhantomData } } } diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index d736fd8c1..8543651c5 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -48,7 +48,7 @@ pub struct CreateApiKey { pub actions: Vec, #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_api_key_indexes)] pub indexes: Vec>, - #[deserr(error = DeserrJsonError, from(&String) = parse_expiration_date -> TakeErrorMessage, missing_field_error = DeserrJsonError::missing_api_key_expires_at)] + #[deserr(error = DeserrJsonError, from(Option) = parse_expiration_date -> TakeErrorMessage, missing_field_error = DeserrJsonError::missing_api_key_expires_at)] pub expires_at: Option, } impl CreateApiKey { @@ -159,36 +159,39 @@ impl Display for ParseOffsetDateTimeError { impl std::error::Error for ParseOffsetDateTimeError {} fn parse_expiration_date( - string: &str, + string: Option, ) -> std::result::Result, TakeErrorMessage> { - let datetime = if let Ok(datetime) = OffsetDateTime::parse(string, &Rfc3339) { + let Some(string) = string else { + return Ok(None) + }; + let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) { datetime } else if let Ok(primitive_datetime) = PrimitiveDateTime::parse( - string, + &string, format_description!( "[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]" ), ) { primitive_datetime.assume_utc() } else if let Ok(primitive_datetime) = PrimitiveDateTime::parse( - string, + &string, format_description!( "[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]" ), ) { primitive_datetime.assume_utc() } else if let Ok(date) = Date::parse( - string, + &string, format_description!("[year repr:full base:calendar]-[month repr:numerical]-[day]"), ) { PrimitiveDateTime::new(date, time!(00:00)).assume_utc() } else { - return Err(TakeErrorMessage(ParseOffsetDateTimeError(string.to_owned()))); + return Err(TakeErrorMessage(ParseOffsetDateTimeError(string))); }; if datetime > OffsetDateTime::now_utc() { Ok(Some(datetime)) } else { - Err(TakeErrorMessage(ParseOffsetDateTimeError(string.to_owned()))) + Err(TakeErrorMessage(ParseOffsetDateTimeError(string))) } } From 9194508a0f4d8bdde1fc35882ebdb389c2a20b78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Mon, 16 Jan 2023 16:59:26 +0100 Subject: [PATCH 005/186] Refactor query parameter deserialisation logic --- Cargo.lock | 31 +- meilisearch-auth/src/lib.rs | 9 +- meilisearch-auth/src/store.rs | 3 +- meilisearch-types/Cargo.toml | 2 +- .../src/deserr/error_messages.rs | 315 +++++++ meilisearch-types/src/deserr/mod.rs | 134 +++ meilisearch-types/src/deserr/query_params.rs | 115 +++ meilisearch-types/src/error.rs | 868 ++++-------------- meilisearch-types/src/index_uid.rs | 6 + meilisearch-types/src/keys.rs | 48 +- meilisearch-types/src/lib.rs | 2 +- meilisearch-types/src/settings.rs | 3 +- meilisearch-types/src/star_or.rs | 284 ++++-- meilisearch-types/src/tasks.rs | 73 +- meilisearch/Cargo.toml | 2 +- meilisearch/src/routes/api_key.rs | 24 +- meilisearch/src/routes/indexes/documents.rs | 38 +- meilisearch/src/routes/indexes/mod.rs | 22 +- meilisearch/src/routes/indexes/search.rs | 59 +- meilisearch/src/routes/indexes/settings.rs | 25 +- meilisearch/src/routes/mod.rs | 2 +- meilisearch/src/routes/swap_indexes.rs | 5 +- meilisearch/src/routes/tasks.rs | 476 +++++----- meilisearch/src/search.rs | 7 +- meilisearch/tests/search/errors.rs | 2 +- meilisearch/tests/tasks/errors.rs | 2 +- 26 files changed, 1377 insertions(+), 1180 deletions(-) create mode 100644 meilisearch-types/src/deserr/error_messages.rs create mode 100644 meilisearch-types/src/deserr/mod.rs create mode 100644 meilisearch-types/src/deserr/query_params.rs diff --git a/Cargo.lock b/Cargo.lock index 6e754abd3..4bbe05745 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1026,7 +1026,18 @@ dependencies = [ name = "deserr" version = "0.1.4" dependencies = [ - "deserr-internal", + "deserr-internal 0.1.4", + "serde-cs", + "serde_json", +] + +[[package]] +name = "deserr" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86290491a2b5c21a1a5083da8dae831006761258fabd5617309c3eebc5f89468" +dependencies = [ + "deserr-internal 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde-cs", "serde_json", ] @@ -1041,6 +1052,18 @@ dependencies = [ "syn 1.0.107", ] +[[package]] +name = "deserr-internal" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7131de1c27581bc376a22166c9f570be91b76cb096be2f6aecf224c27bf7c49a" +dependencies = [ + "convert_case 0.5.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "deunicode" version = "1.3.3" @@ -2300,7 +2323,7 @@ dependencies = [ "cargo_toml", "clap 4.0.32", "crossbeam-channel", - "deserr", + "deserr 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "dump", "either", "env_logger", @@ -2391,7 +2414,7 @@ dependencies = [ "anyhow", "convert_case 0.6.0", "csv", - "deserr", + "deserr 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "either", "enum-iterator", "file-store", @@ -2451,7 +2474,7 @@ dependencies = [ "concat-arrays", "crossbeam-channel", "csv", - "deserr", + "deserr 0.1.4", "either", "filter-parser", "flatten-serde-json", diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 8d4a7f2b7..072b87dad 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -3,7 +3,6 @@ pub mod error; mod store; use std::collections::{HashMap, HashSet}; -use std::ops::Deref; use std::path::Path; use std::sync::Arc; @@ -86,15 +85,13 @@ impl AuthController { key.indexes .into_iter() .filter_map(|index| { - search_rules.get_index_search_rules(index.deref()).map( - |index_search_rules| { - (String::from(index), Some(index_search_rules)) - }, + search_rules.get_index_search_rules(&format!("{index}")).map( + |index_search_rules| (index.to_string(), Some(index_search_rules)), ) }) .collect(), ), - None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()), + None => SearchRules::Set(key.indexes.into_iter().map(|x| x.to_string()).collect()), }; } else if let Some(search_rules) = search_rules { filters.search_rules = search_rules; diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index b3f9ed672..2574572be 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -3,7 +3,6 @@ use std::cmp::Reverse; use std::collections::HashSet; use std::convert::{TryFrom, TryInto}; use std::fs::create_dir_all; -use std::ops::Deref; use std::path::Path; use std::str; use std::sync::Arc; @@ -135,7 +134,7 @@ impl HeedAuthStore { for index in key.indexes.iter() { db.put( &mut wtxn, - &(&uid, &action, Some(index.deref().as_bytes())), + &(&uid, &action, Some(index.to_string().as_bytes())), &key.expires_at, )?; } diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 257ac9c2d..cba3fc5d9 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -9,7 +9,7 @@ actix-web = { version = "4.2.1", default-features = false } anyhow = "1.0.65" convert_case = "0.6.0" csv = "1.1.6" -deserr = { path = "/Users/meilisearch/Documents/deserr" } +deserr = "0.1.4" either = { version = "1.6.1", features = ["serde"] } enum-iterator = "1.1.3" file-store = { path = "../file-store" } diff --git a/meilisearch-types/src/deserr/error_messages.rs b/meilisearch-types/src/deserr/error_messages.rs new file mode 100644 index 000000000..b289e454d --- /dev/null +++ b/meilisearch-types/src/deserr/error_messages.rs @@ -0,0 +1,315 @@ +/*! +This module implements the error messages of deserialization errors. + +We try to: +1. Give a human-readable description of where the error originated. +2. Use the correct terms depending on the format of the request (json/query param) +3. Categorise the type of the error (e.g. missing field, wrong value type, unexpected error, etc.) + */ +use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef}; + +use super::{DeserrJsonError, DeserrQueryParamError}; +use crate::error::ErrorCode; + +/// Return a description of the given location in a Json, preceded by the given article. +/// e.g. `at .key1[8].key2`. If the location is the origin, the given article will not be +/// included in the description. +pub fn location_json_description(location: ValuePointerRef, article: &str) -> String { + fn rec(location: ValuePointerRef) -> String { + match location { + ValuePointerRef::Origin => String::new(), + ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key, + ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)), + } + } + match location { + ValuePointerRef::Origin => String::new(), + _ => { + format!("{article} `{}`", rec(location)) + } + } +} + +/// Return a description of the list of value kinds for a Json payload. +fn value_kinds_description_json(kinds: &[ValueKind]) -> String { + // Rank each value kind so that they can be sorted (and deduplicated) + // Having a predictable order helps with pattern matching + fn order(kind: &ValueKind) -> u8 { + match kind { + ValueKind::Null => 0, + ValueKind::Boolean => 1, + ValueKind::Integer => 2, + ValueKind::NegativeInteger => 3, + ValueKind::Float => 4, + ValueKind::String => 5, + ValueKind::Sequence => 6, + ValueKind::Map => 7, + } + } + // Return a description of a single value kind, preceded by an article + fn single_description(kind: &ValueKind) -> &'static str { + match kind { + ValueKind::Null => "null", + ValueKind::Boolean => "a boolean", + ValueKind::Integer => "a positive integer", + ValueKind::NegativeInteger => "an integer", + ValueKind::Float => "a number", + ValueKind::String => "a string", + ValueKind::Sequence => "an array", + ValueKind::Map => "an object", + } + } + + fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) { + let (msg_part, rest): (_, &[ValueKind]) = match kinds { + [] => (String::new(), &[]), + [ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => { + ("a number".to_owned(), rest) + } + [ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => { + ("a number".to_owned(), rest) + } + [ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => { + ("an integer".to_owned(), rest) + } + [a] => (single_description(a).to_owned(), &[]), + [a, rest @ ..] => (single_description(a).to_owned(), rest), + }; + + if rest.is_empty() { + if *count_items == 0 { + message.push_str(&msg_part); + } else if *count_items == 1 { + message.push_str(&format!(" or {msg_part}")); + } else { + message.push_str(&format!(", or {msg_part}")); + } + } else { + if *count_items == 0 { + message.push_str(&msg_part); + } else { + message.push_str(&format!(", {msg_part}")); + } + + *count_items += 1; + description_rec(rest, count_items, message); + } + } + + let mut kinds = kinds.to_owned(); + kinds.sort_by_key(order); + kinds.dedup(); + + if kinds.is_empty() { + // Should not happen ideally + "a different value".to_owned() + } else { + let mut message = String::new(); + description_rec(kinds.as_slice(), &mut 0, &mut message); + message + } +} + +/// Return the JSON string of the value preceded by a description of its kind +fn value_description_with_kind_json(v: &serde_json::Value) -> String { + match v.kind() { + ValueKind::Null => "null".to_owned(), + kind => { + format!( + "{}: `{}`", + value_kinds_description_json(&[kind]), + serde_json::to_string(v).unwrap() + ) + } + } +} + +impl deserr::DeserializeError for DeserrJsonError { + fn error( + _self_: Option, + error: deserr::ErrorKind, + location: ValuePointerRef, + ) -> Result { + let mut message = String::new(); + + message.push_str(&match error { + ErrorKind::IncorrectValueKind { actual, accepted } => { + let expected = value_kinds_description_json(accepted); + let received = value_description_with_kind_json(&serde_json::Value::from(actual)); + + let location = location_json_description(location, " at"); + + format!("Invalid value type{location}: expected {expected}, but found {received}") + } + ErrorKind::MissingField { field } => { + let location = location_json_description(location, " inside"); + format!("Missing field `{field}`{location}") + } + ErrorKind::UnknownKey { key, accepted } => { + let location = location_json_description(location, " inside"); + format!( + "Unknown field `{}`{location}: expected one of {}", + key, + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", ") + ) + } + ErrorKind::UnknownValue { value, accepted } => { + let location = location_json_description(location, " at"); + format!( + "Unknown value `{}`{location}: expected one of {}", + value, + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", "), + ) + } + ErrorKind::Unexpected { msg } => { + let location = location_json_description(location, " at"); + format!("Invalid value{location}: {msg}") + } + }); + + Err(DeserrJsonError::new(message, C::default().error_code())) + } +} + +/// Return a description of the given location in query parameters, preceded by the +/// given article. e.g. `at key5[2]`. If the location is the origin, the given article +/// will not be included in the description. +pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String { + fn rec(location: ValuePointerRef) -> String { + match location { + ValuePointerRef::Origin => String::new(), + ValuePointerRef::Key { key, prev } => { + if matches!(prev, ValuePointerRef::Origin) { + key.to_owned() + } else { + rec(*prev) + "." + key + } + } + ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)), + } + } + match location { + ValuePointerRef::Origin => String::new(), + _ => { + format!("{article} `{}`", rec(location)) + } + } +} + +impl deserr::DeserializeError for DeserrQueryParamError { + fn error( + _self_: Option, + error: deserr::ErrorKind, + location: ValuePointerRef, + ) -> Result { + let mut message = String::new(); + + message.push_str(&match error { + ErrorKind::IncorrectValueKind { actual, accepted } => { + let expected = value_kinds_description_query_param(accepted); + let received = value_description_with_kind_query_param(actual); + + let location = location_query_param_description(location, " for parameter"); + + format!("Invalid value type{location}: expected {expected}, but found {received}") + } + ErrorKind::MissingField { field } => { + let location = location_query_param_description(location, " inside"); + format!("Missing parameter `{field}`{location}") + } + ErrorKind::UnknownKey { key, accepted } => { + let location = location_query_param_description(location, " inside"); + format!( + "Unknown parameter `{}`{location}: expected one of {}", + key, + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", ") + ) + } + ErrorKind::UnknownValue { value, accepted } => { + let location = location_query_param_description(location, " for parameter"); + format!( + "Unknown value `{}`{location}: expected one of {}", + value, + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", "), + ) + } + ErrorKind::Unexpected { msg } => { + let location = location_query_param_description(location, " in parameter"); + format!("Invalid value{location}: {msg}") + } + }); + + Err(DeserrQueryParamError::new(message, C::default().error_code())) + } +} + +/// Return a description of the list of value kinds for query parameters +/// Since query parameters are always treated as strings, we always return +/// "a string" for now. +fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String { + "a string".to_owned() +} + +fn value_description_with_kind_query_param(actual: deserr::Value) -> String { + match actual { + deserr::Value::Null => "null".to_owned(), + deserr::Value::Boolean(x) => format!("a boolean: `{x}`"), + deserr::Value::Integer(x) => format!("an integer: `{x}`"), + deserr::Value::NegativeInteger(x) => { + format!("an integer: `{x}`") + } + deserr::Value::Float(x) => { + format!("a number: `{x}`") + } + deserr::Value::String(x) => { + format!("a string: `{x}`") + } + deserr::Value::Sequence(_) => "multiple values".to_owned(), + deserr::Value::Map(_) => "multiple parameters".to_owned(), + } +} + +#[cfg(test)] +mod tests { + use deserr::ValueKind; + + use crate::deserr::error_messages::value_kinds_description_json; + + #[test] + fn test_value_kinds_description_json() { + insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value"); + + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"an integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object"); + + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"an integer or an array"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number"); + } +} diff --git a/meilisearch-types/src/deserr/mod.rs b/meilisearch-types/src/deserr/mod.rs new file mode 100644 index 000000000..c15b2c3a0 --- /dev/null +++ b/meilisearch-types/src/deserr/mod.rs @@ -0,0 +1,134 @@ +use std::convert::Infallible; +use std::fmt; +use std::marker::PhantomData; + +use deserr::{DeserializeError, MergeWithError, ValuePointerRef}; + +use crate::error::deserr_codes::{self, *}; +use crate::error::{ + unwrap_any, Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError, + ParseOffsetDateTimeError, +}; +use crate::index_uid::IndexUidFormatError; +use crate::tasks::{ParseTaskKindError, ParseTaskStatusError}; + +pub mod error_messages; +pub mod query_params; + +/// Marker type for the Json format +pub struct DeserrJson; +/// Marker type for the Query Parameter format +pub struct DeserrQueryParam; + +pub type DeserrJsonError = DeserrError; +pub type DeserrQueryParamError = DeserrError; + +/// A request deserialization error. +/// +/// The first generic paramater is a marker type describing the format of the request: either json (e.g. [`DeserrJson`] or [`DeserrQueryParam`]). +/// The second generic parameter is the default error code for the deserialization error, in case it is not given. +pub struct DeserrError { + pub msg: String, + pub code: Code, + _phantom: PhantomData<(Format, C)>, +} +impl DeserrError { + pub fn new(msg: String, code: Code) -> Self { + Self { msg, code, _phantom: PhantomData } + } +} +impl std::fmt::Debug for DeserrError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish() + } +} + +impl std::fmt::Display for DeserrError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.msg) + } +} + +impl std::error::Error for DeserrError {} +impl ErrorCode for DeserrError { + fn error_code(&self) -> Code { + self.code + } +} + +// For now, we don't accumulate errors. Only one deserialisation error is ever returned at a time. +impl + MergeWithError> for DeserrError +{ + fn merge( + _self_: Option, + other: DeserrError, + _merge_location: ValuePointerRef, + ) -> Result { + Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData }) + } +} + +impl MergeWithError for DeserrError { + fn merge( + _self_: Option, + _other: Infallible, + _merge_location: ValuePointerRef, + ) -> Result { + unreachable!() + } +} + +// Implement a convenience function to build a `missing_field` error +macro_rules! make_missing_field_convenience_builder { + ($err_code:ident, $fn_name:ident) => { + impl DeserrJsonError<$err_code> { + pub fn $fn_name(field: &str, location: ValuePointerRef) -> Self { + let x = unwrap_any(Self::error::( + None, + deserr::ErrorKind::MissingField { field }, + location, + )); + Self { msg: x.msg, code: $err_code.error_code(), _phantom: PhantomData } + } + } + }; +} +make_missing_field_convenience_builder!(MissingIndexUid, missing_index_uid); +make_missing_field_convenience_builder!(MissingApiKeyActions, missing_api_key_actions); +make_missing_field_convenience_builder!(MissingApiKeyExpiresAt, missing_api_key_expires_at); +make_missing_field_convenience_builder!(MissingApiKeyIndexes, missing_api_key_indexes); +make_missing_field_convenience_builder!(MissingSwapIndexes, missing_swap_indexes); + +// Integrate a sub-error into a [`DeserrError`] by taking its error message but using +// the default error code (C) from `Self` +macro_rules! merge_with_error_impl_take_error_message { + ($err_type:ty) => { + impl MergeWithError<$err_type> for DeserrError + where + DeserrError: deserr::DeserializeError, + { + fn merge( + _self_: Option, + other: $err_type, + merge_location: ValuePointerRef, + ) -> Result { + DeserrError::::error::( + None, + deserr::ErrorKind::Unexpected { msg: other.to_string() }, + merge_location, + ) + } + } + }; +} + +// All these errors can be merged into a `DeserrError` +merge_with_error_impl_take_error_message!(DeserrParseIntError); +merge_with_error_impl_take_error_message!(DeserrParseBoolError); +merge_with_error_impl_take_error_message!(uuid::Error); +merge_with_error_impl_take_error_message!(InvalidTaskDateError); +merge_with_error_impl_take_error_message!(ParseOffsetDateTimeError); +merge_with_error_impl_take_error_message!(ParseTaskKindError); +merge_with_error_impl_take_error_message!(ParseTaskStatusError); +merge_with_error_impl_take_error_message!(IndexUidFormatError); diff --git a/meilisearch-types/src/deserr/query_params.rs b/meilisearch-types/src/deserr/query_params.rs new file mode 100644 index 000000000..28629aa1b --- /dev/null +++ b/meilisearch-types/src/deserr/query_params.rs @@ -0,0 +1,115 @@ +/*! +This module provides helper traits, types, and functions to deserialize query parameters. + +The source of the problem is that query parameters only give us a string to work with. +This means `deserr` is never given a sequence or numbers, and thus the default deserialization +code for common types such as `usize` or `Vec` does not work. To work around it, we create a +wrapper type called `Param`, which is deserialised using the `from_query_param` method of the trait +`FromQueryParameter`. + +We also use other helper types such as `CS` (i.e. comma-separated) from `serde_cs` as well as +`StarOr`, `OptionStarOr`, and `OptionStarOrList`. +*/ + +use std::convert::Infallible; +use std::ops::Deref; +use std::str::FromStr; + +use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind}; + +use super::{DeserrParseBoolError, DeserrParseIntError}; +use crate::error::unwrap_any; +use crate::index_uid::IndexUid; +use crate::tasks::{Kind, Status}; + +/// A wrapper type indicating that the inner value should be +/// deserialised from a query parameter string. +/// +/// Note that if the field is optional, it is better to use +/// `Option>` instead of `Param>`. +#[derive(Default, Debug, Clone, Copy)] +pub struct Param(pub T); + +impl Deref for Param { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DeserializeFromValue for Param +where + E: DeserializeError + MergeWithError, + T: FromQueryParameter, +{ + fn deserialize_from_value( + value: deserr::Value, + location: deserr::ValuePointerRef, + ) -> Result { + match value { + deserr::Value::String(s) => match T::from_query_param(&s) { + Ok(x) => Ok(Param(x)), + Err(e) => Err(unwrap_any(E::merge(None, e, location))), + }, + _ => Err(unwrap_any(E::error( + None, + deserr::ErrorKind::IncorrectValueKind { + actual: value, + accepted: &[ValueKind::String], + }, + location, + ))), + } + } +} + +/// Parse a value from a query parameter string. +/// +/// This trait is functionally equivalent to `FromStr`. +/// Having a separate trait trait allows us to return better +/// deserializatio error messages. +pub trait FromQueryParameter: Sized { + type Err; + fn from_query_param(p: &str) -> Result; +} + +/// Implement `FromQueryParameter` for the given type using its `FromStr` +/// trait implementation. +macro_rules! impl_from_query_param_from_str { + ($type:ty) => { + impl FromQueryParameter for $type { + type Err = <$type as FromStr>::Err; + fn from_query_param(p: &str) -> Result { + p.parse() + } + } + }; +} +impl_from_query_param_from_str!(Kind); +impl_from_query_param_from_str!(Status); +impl_from_query_param_from_str!(IndexUid); + +/// Implement `FromQueryParameter` for the given type using its `FromStr` +/// trait implementation, replacing the returned error with a struct +/// that wraps the original query parameter. +macro_rules! impl_from_query_param_wrap_original_value_in_error { + ($type:ty, $err_type:path) => { + impl FromQueryParameter for $type { + type Err = $err_type; + fn from_query_param(p: &str) -> Result { + p.parse().map_err(|_| $err_type(p.to_owned())) + } + } + }; +} +impl_from_query_param_wrap_original_value_in_error!(usize, DeserrParseIntError); +impl_from_query_param_wrap_original_value_in_error!(u32, DeserrParseIntError); +impl_from_query_param_wrap_original_value_in_error!(bool, DeserrParseBoolError); + +impl FromQueryParameter for String { + type Err = Infallible; + fn from_query_param(p: &str) -> Result { + Ok(p.to_owned()) + } +} diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 614449ff8..2fb55ee31 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -1,30 +1,17 @@ -use std::convert::Infallible; -use std::marker::PhantomData; -use std::str::FromStr; use std::{fmt, io}; use actix_web::http::StatusCode; use actix_web::{self as aweb, HttpResponseBuilder}; use aweb::rt::task::JoinError; use convert_case::Casing; -use deserr::{DeserializeError, ErrorKind, IntoValue, MergeWithError, ValueKind, ValuePointerRef}; use milli::heed::{Error as HeedError, MdbError}; use serde::{Deserialize, Serialize}; -use serde_cs::vec::CS; - -use crate::star_or::StarOr; - -use self::deserr_codes::{ - InvalidSwapIndexes, MissingApiKeyActions, MissingApiKeyExpiresAt, MissingApiKeyIndexes, - MissingIndexUid, MissingSwapIndexes, -}; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] #[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] pub struct ResponseError { #[serde(skip)] - #[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))] code: StatusCode, message: String, #[serde(rename = "code")] @@ -43,7 +30,7 @@ impl ResponseError { Self { code: code.http(), message, - error_code: code.err_code().error_name, + error_code: code.name(), error_type: code.type_(), error_link: code.url(), } @@ -104,9 +91,9 @@ pub trait ErrorCode { #[allow(clippy::enum_variant_names)] enum ErrorType { - InternalError, - InvalidRequestError, - AuthenticationError, + Internal, + InvalidRequest, + Auth, System, } @@ -115,14 +102,24 @@ impl fmt::Display for ErrorType { use ErrorType::*; match self { - InternalError => write!(f, "internal"), - InvalidRequestError => write!(f, "invalid_request"), - AuthenticationError => write!(f, "auth"), + Internal => write!(f, "internal"), + InvalidRequest => write!(f, "invalid_request"), + Auth => write!(f, "auth"), System => write!(f, "system"), } } } +/// Implement all the error codes. +/// +/// 1. Make an enum `Code` where each error code is a variant +/// 2. Implement the `http`, `name`, and `type_` method on the enum +/// 3. Make a unit type for each error code in the module `deserr_codes`. +/// +/// The unit type's purpose is to be used as a marker type parameter, e.g. +/// `DeserrJsonError`. It implements `Default` and `ErrorCode`, +/// so we can get a value of the `Code` enum with the correct variant by calling +/// `MyErrorCode::default().error_code()`. macro_rules! make_error_codes { ($($code_ident:ident, $err_type:ident, $status:ident);*) => { #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -130,29 +127,31 @@ macro_rules! make_error_codes { $($code_ident),* } impl Code { - /// associate a `Code` variant to the actual ErrCode - fn err_code(&self) -> ErrCode { - match self { - $( - Code::$code_ident => { - ErrCode::$err_type( stringify!($code_ident).to_case(convert_case::Case::Snake), StatusCode::$status) - } - )* - } - } /// return the HTTP status code associated with the `Code` fn http(&self) -> StatusCode { - self.err_code().status_code + match self { + $( + Code::$code_ident => StatusCode::$status + ),* + } } /// return error name, used as error code fn name(&self) -> String { - self.err_code().error_name.to_string() + match self { + $( + Code::$code_ident => stringify!($code_ident).to_case(convert_case::Case::Snake) + ),* + } } /// return the error type fn type_(&self) -> String { - self.err_code().error_type.to_string() + match self { + $( + Code::$code_ident => ErrorType::$err_type.to_string() + ),* + } } /// return the doc url associated with the error @@ -177,144 +176,121 @@ macro_rules! make_error_codes { } } } + +// An exhaustive list of all the error codes used by meilisearch. make_error_codes! { -ApiKeyAlreadyExists , invalid , CONFLICT ; -ApiKeyNotFound , invalid , NOT_FOUND ; -BadParameter , invalid , BAD_REQUEST; -BadRequest , invalid , BAD_REQUEST; -DatabaseSizeLimitReached , internal , INTERNAL_SERVER_ERROR; -DocumentNotFound , invalid , NOT_FOUND; -DumpAlreadyProcessing , invalid , CONFLICT; -DumpNotFound , invalid , NOT_FOUND; -DumpProcessFailed , internal , INTERNAL_SERVER_ERROR; -DuplicateIndexFound , invalid , BAD_REQUEST; -ImmutableApiKeyUid , invalid , BAD_REQUEST; -ImmutableApiKeyKey , invalid , BAD_REQUEST; -ImmutableApiKeyActions , invalid , BAD_REQUEST; -ImmutableApiKeyIndexes , invalid , BAD_REQUEST; -ImmutableApiKeyExpiresAt , invalid , BAD_REQUEST; -ImmutableApiKeyCreatedAt , invalid , BAD_REQUEST; -ImmutableApiKeyUpdatedAt , invalid , BAD_REQUEST; -ImmutableIndexUid , invalid , BAD_REQUEST; -ImmutableIndexCreatedAt , invalid , BAD_REQUEST; -ImmutableIndexUpdatedAt , invalid , BAD_REQUEST; -IndexAlreadyExists , invalid , CONFLICT ; -IndexCreationFailed , internal , INTERNAL_SERVER_ERROR; -IndexNotFound , invalid , NOT_FOUND; -IndexPrimaryKeyAlreadyExists , invalid , BAD_REQUEST ; -IndexPrimaryKeyNoCandidateFound , invalid , BAD_REQUEST ; -IndexPrimaryKeyMultipleCandidatesFound, invalid , BAD_REQUEST; -Internal , internal , INTERNAL_SERVER_ERROR ; -InvalidApiKeyActions , invalid , BAD_REQUEST ; -InvalidApiKeyDescription , invalid , BAD_REQUEST ; -InvalidApiKeyExpiresAt , invalid , BAD_REQUEST ; -InvalidApiKeyIndexes , invalid , BAD_REQUEST ; -InvalidApiKeyLimit , invalid , BAD_REQUEST ; -InvalidApiKeyName , invalid , BAD_REQUEST ; -InvalidApiKeyOffset , invalid , BAD_REQUEST ; -InvalidApiKeyUid , invalid , BAD_REQUEST ; -InvalidApiKey , authentication, FORBIDDEN ; -InvalidContentType , invalid , UNSUPPORTED_MEDIA_TYPE ; -InvalidDocumentFields , invalid , BAD_REQUEST ; -InvalidDocumentGeoField , invalid , BAD_REQUEST ; -InvalidDocumentId , invalid , BAD_REQUEST ; -InvalidDocumentLimit , invalid , BAD_REQUEST ; -InvalidDocumentOffset , invalid , BAD_REQUEST ; -InvalidIndexLimit , invalid , BAD_REQUEST ; -InvalidIndexOffset , invalid , BAD_REQUEST ; -InvalidIndexPrimaryKey , invalid , BAD_REQUEST ; -InvalidIndexUid , invalid , BAD_REQUEST ; -InvalidMinWordLengthForTypo , invalid , BAD_REQUEST ; -InvalidSearchAttributesToCrop , invalid , BAD_REQUEST ; -InvalidSearchAttributesToHighlight , invalid , BAD_REQUEST ; -InvalidSearchAttributesToRetrieve , invalid , BAD_REQUEST ; -InvalidSearchCropLength , invalid , BAD_REQUEST ; -InvalidSearchCropMarker , invalid , BAD_REQUEST ; -InvalidSearchFacets , invalid , BAD_REQUEST ; -InvalidSearchFilter , invalid , BAD_REQUEST ; -InvalidSearchHighlightPostTag , invalid , BAD_REQUEST ; -InvalidSearchHighlightPreTag , invalid , BAD_REQUEST ; -InvalidSearchHitsPerPage , invalid , BAD_REQUEST ; -InvalidSearchLimit , invalid , BAD_REQUEST ; -InvalidSearchMatchingStrategy , invalid , BAD_REQUEST ; -InvalidSearchOffset , invalid , BAD_REQUEST ; -InvalidSearchPage , invalid , BAD_REQUEST ; -InvalidSearchQ , invalid , BAD_REQUEST ; -InvalidSearchShowMatchesPosition , invalid , BAD_REQUEST ; -InvalidSearchSort , invalid , BAD_REQUEST ; -InvalidSettingsDisplayedAttributes , invalid , BAD_REQUEST ; -InvalidSettingsDistinctAttribute , invalid , BAD_REQUEST ; -InvalidSettingsFaceting , invalid , BAD_REQUEST ; -InvalidSettingsFilterableAttributes , invalid , BAD_REQUEST ; -InvalidSettingsPagination , invalid , BAD_REQUEST ; -InvalidSettingsRankingRules , invalid , BAD_REQUEST ; -InvalidSettingsSearchableAttributes , invalid , BAD_REQUEST ; -InvalidSettingsSortableAttributes , invalid , BAD_REQUEST ; -InvalidSettingsStopWords , invalid , BAD_REQUEST ; -InvalidSettingsSynonyms , invalid , BAD_REQUEST ; -InvalidSettingsTypoTolerance , invalid , BAD_REQUEST ; -InvalidState , internal , INTERNAL_SERVER_ERROR ; -InvalidStoreFile , internal , INTERNAL_SERVER_ERROR ; -InvalidSwapDuplicateIndexFound , invalid , BAD_REQUEST ; -InvalidSwapIndexes , invalid , BAD_REQUEST ; -InvalidTaskAfterEnqueuedAt , invalid , BAD_REQUEST ; -InvalidTaskAfterFinishedAt , invalid , BAD_REQUEST ; -InvalidTaskAfterStartedAt , invalid , BAD_REQUEST ; -InvalidTaskBeforeEnqueuedAt , invalid , BAD_REQUEST ; -InvalidTaskBeforeFinishedAt , invalid , BAD_REQUEST ; -InvalidTaskBeforeStartedAt , invalid , BAD_REQUEST ; -InvalidTaskCanceledBy , invalid , BAD_REQUEST ; -InvalidTaskFrom , invalid , BAD_REQUEST ; -InvalidTaskLimit , invalid , BAD_REQUEST ; -InvalidTaskStatuses , invalid , BAD_REQUEST ; -InvalidTaskTypes , invalid , BAD_REQUEST ; -InvalidTaskUids , invalid , BAD_REQUEST ; -IoError , system , UNPROCESSABLE_ENTITY; -MalformedPayload , invalid , BAD_REQUEST ; -MaxFieldsLimitExceeded , invalid , BAD_REQUEST ; -MissingApiKeyActions , invalid , BAD_REQUEST ; -MissingApiKeyExpiresAt , invalid , BAD_REQUEST ; -MissingApiKeyIndexes , invalid , BAD_REQUEST ; -MissingAuthorizationHeader , authentication, UNAUTHORIZED ; -MissingContentType , invalid , UNSUPPORTED_MEDIA_TYPE ; -MissingDocumentId , invalid , BAD_REQUEST ; -MissingIndexUid , invalid , BAD_REQUEST ; -MissingMasterKey , authentication, UNAUTHORIZED ; -MissingPayload , invalid , BAD_REQUEST ; -MissingSwapIndexes , invalid , BAD_REQUEST ; -MissingTaskFilters , invalid , BAD_REQUEST ; -NoSpaceLeftOnDevice , system , UNPROCESSABLE_ENTITY; -PayloadTooLarge , invalid , PAYLOAD_TOO_LARGE ; -TaskNotFound , invalid , NOT_FOUND ; -TooManyOpenFiles , system , UNPROCESSABLE_ENTITY ; -UnretrievableDocument , internal , BAD_REQUEST ; -UnretrievableErrorCode , invalid , BAD_REQUEST ; -UnsupportedMediaType , invalid , UNSUPPORTED_MEDIA_TYPE -} - -/// Internal structure providing a convenient way to create error codes -struct ErrCode { - status_code: StatusCode, - error_type: ErrorType, - error_name: String, -} - -impl ErrCode { - fn authentication(error_name: String, status_code: StatusCode) -> ErrCode { - ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError } - } - - fn internal(error_name: String, status_code: StatusCode) -> ErrCode { - ErrCode { status_code, error_name, error_type: ErrorType::InternalError } - } - - fn invalid(error_name: String, status_code: StatusCode) -> ErrCode { - ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError } - } - - fn system(error_name: String, status_code: StatusCode) -> ErrCode { - ErrCode { status_code, error_name, error_type: ErrorType::System } - } +ApiKeyAlreadyExists , InvalidRequest , CONFLICT ; +ApiKeyNotFound , InvalidRequest , NOT_FOUND ; +BadParameter , InvalidRequest , BAD_REQUEST; +BadRequest , InvalidRequest , BAD_REQUEST; +DatabaseSizeLimitReached , Internal , INTERNAL_SERVER_ERROR; +DocumentNotFound , InvalidRequest , NOT_FOUND; +DumpAlreadyProcessing , InvalidRequest , CONFLICT; +DumpNotFound , InvalidRequest , NOT_FOUND; +DumpProcessFailed , Internal , INTERNAL_SERVER_ERROR; +DuplicateIndexFound , InvalidRequest , BAD_REQUEST; +ImmutableApiKeyActions , InvalidRequest , BAD_REQUEST; +ImmutableApiKeyCreatedAt , InvalidRequest , BAD_REQUEST; +ImmutableApiKeyExpiresAt , InvalidRequest , BAD_REQUEST; +ImmutableApiKeyIndexes , InvalidRequest , BAD_REQUEST; +ImmutableApiKeyKey , InvalidRequest , BAD_REQUEST; +ImmutableApiKeyUid , InvalidRequest , BAD_REQUEST; +ImmutableApiKeyUpdatedAt , InvalidRequest , BAD_REQUEST; +ImmutableIndexCreatedAt , InvalidRequest , BAD_REQUEST; +ImmutableIndexUid , InvalidRequest , BAD_REQUEST; +ImmutableIndexUpdatedAt , InvalidRequest , BAD_REQUEST; +IndexAlreadyExists , InvalidRequest , CONFLICT ; +IndexCreationFailed , Internal , INTERNAL_SERVER_ERROR; +IndexNotFound , InvalidRequest , NOT_FOUND; +IndexPrimaryKeyAlreadyExists , InvalidRequest , BAD_REQUEST ; +IndexPrimaryKeyMultipleCandidatesFound, InvalidRequest , BAD_REQUEST; +IndexPrimaryKeyNoCandidateFound , InvalidRequest , BAD_REQUEST ; +Internal , Internal , INTERNAL_SERVER_ERROR ; +InvalidApiKey , Auth , FORBIDDEN ; +InvalidApiKeyActions , InvalidRequest , BAD_REQUEST ; +InvalidApiKeyDescription , InvalidRequest , BAD_REQUEST ; +InvalidApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ; +InvalidApiKeyIndexes , InvalidRequest , BAD_REQUEST ; +InvalidApiKeyLimit , InvalidRequest , BAD_REQUEST ; +InvalidApiKeyName , InvalidRequest , BAD_REQUEST ; +InvalidApiKeyOffset , InvalidRequest , BAD_REQUEST ; +InvalidApiKeyUid , InvalidRequest , BAD_REQUEST ; +InvalidContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ; +InvalidDocumentFields , InvalidRequest , BAD_REQUEST ; +InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ; +InvalidDocumentId , InvalidRequest , BAD_REQUEST ; +InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ; +InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ; +InvalidIndexLimit , InvalidRequest , BAD_REQUEST ; +InvalidIndexOffset , InvalidRequest , BAD_REQUEST ; +InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ; +InvalidIndexUid , InvalidRequest , BAD_REQUEST ; +InvalidMinWordLengthForTypo , InvalidRequest , BAD_REQUEST ; +InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ; +InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ; +InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ; +InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ; +InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ; +InvalidSearchFacets , InvalidRequest , BAD_REQUEST ; +InvalidSearchFilter , InvalidRequest , BAD_REQUEST ; +InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ; +InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ; +InvalidSearchHitsPerPage , InvalidRequest , BAD_REQUEST ; +InvalidSearchLimit , InvalidRequest , BAD_REQUEST ; +InvalidSearchMatchingStrategy , InvalidRequest , BAD_REQUEST ; +InvalidSearchOffset , InvalidRequest , BAD_REQUEST ; +InvalidSearchPage , InvalidRequest , BAD_REQUEST ; +InvalidSearchQ , InvalidRequest , BAD_REQUEST ; +InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ; +InvalidSearchSort , InvalidRequest , BAD_REQUEST ; +InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ; +InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ; +InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ; +InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ; +InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ; +InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ; +InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ; +InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ; +InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ; +InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ; +InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ; +InvalidState , Internal , INTERNAL_SERVER_ERROR ; +InvalidStoreFile , Internal , INTERNAL_SERVER_ERROR ; +InvalidSwapDuplicateIndexFound , InvalidRequest , BAD_REQUEST ; +InvalidSwapIndexes , InvalidRequest , BAD_REQUEST ; +InvalidTaskAfterEnqueuedAt , InvalidRequest , BAD_REQUEST ; +InvalidTaskAfterFinishedAt , InvalidRequest , BAD_REQUEST ; +InvalidTaskAfterStartedAt , InvalidRequest , BAD_REQUEST ; +InvalidTaskBeforeEnqueuedAt , InvalidRequest , BAD_REQUEST ; +InvalidTaskBeforeFinishedAt , InvalidRequest , BAD_REQUEST ; +InvalidTaskBeforeStartedAt , InvalidRequest , BAD_REQUEST ; +InvalidTaskCanceledBy , InvalidRequest , BAD_REQUEST ; +InvalidTaskFrom , InvalidRequest , BAD_REQUEST ; +InvalidTaskLimit , InvalidRequest , BAD_REQUEST ; +InvalidTaskStatuses , InvalidRequest , BAD_REQUEST ; +InvalidTaskTypes , InvalidRequest , BAD_REQUEST ; +InvalidTaskUids , InvalidRequest , BAD_REQUEST ; +IoError , System , UNPROCESSABLE_ENTITY; +MalformedPayload , InvalidRequest , BAD_REQUEST ; +MaxFieldsLimitExceeded , InvalidRequest , BAD_REQUEST ; +MissingApiKeyActions , InvalidRequest , BAD_REQUEST ; +MissingApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ; +MissingApiKeyIndexes , InvalidRequest , BAD_REQUEST ; +MissingAuthorizationHeader , Auth , UNAUTHORIZED ; +MissingContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ; +MissingDocumentId , InvalidRequest , BAD_REQUEST ; +MissingIndexUid , InvalidRequest , BAD_REQUEST ; +MissingMasterKey , Auth , UNAUTHORIZED ; +MissingPayload , InvalidRequest , BAD_REQUEST ; +MissingSwapIndexes , InvalidRequest , BAD_REQUEST ; +MissingTaskFilters , InvalidRequest , BAD_REQUEST ; +NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY; +PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ; +TaskNotFound , InvalidRequest , NOT_FOUND ; +TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ; +UnretrievableDocument , Internal , BAD_REQUEST ; +UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ; +UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE } impl ErrorCode for JoinError { @@ -409,6 +385,7 @@ impl ErrorCode for io::Error { } } +/// Unwrap a result, either its Ok or Err value. pub fn unwrap_any(any: Result) -> T { match any { Ok(any) => any, @@ -416,501 +393,43 @@ pub fn unwrap_any(any: Result) -> T { } } -#[cfg(feature = "test-traits")] -mod strategy { - use proptest::strategy::Strategy; - - use super::*; - - pub(super) fn status_code_strategy() -> impl Strategy { - (100..999u16).prop_map(|i| StatusCode::from_u16(i).unwrap()) - } -} - -pub struct DeserrJson; -pub struct DeserrQueryParam; - -pub type DeserrJsonError = DeserrError; -pub type DeserrQueryParamError = DeserrError; - -pub struct DeserrError { - pub msg: String, - pub code: Code, - _phantom: PhantomData<(Format, C)>, -} -impl std::fmt::Debug for DeserrError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish() - } -} - -impl std::fmt::Display for DeserrError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.msg) - } -} - -impl std::error::Error for DeserrError {} -impl ErrorCode for DeserrError { - fn error_code(&self) -> Code { - self.code - } -} - -impl - MergeWithError> for DeserrError -{ - fn merge( - _self_: Option, - other: DeserrError, - _merge_location: ValuePointerRef, - ) -> Result { - Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData }) - } -} - -impl DeserrJsonError { - pub fn missing_index_uid(field: &str, location: ValuePointerRef) -> Self { - let x = unwrap_any(Self::error::( - None, - deserr::ErrorKind::MissingField { field }, - location, - )); - Self { msg: x.msg, code: MissingIndexUid.error_code(), _phantom: PhantomData } - } -} -impl DeserrJsonError { - pub fn missing_api_key_actions(field: &str, location: ValuePointerRef) -> Self { - let x = unwrap_any(Self::error::( - None, - deserr::ErrorKind::MissingField { field }, - location, - )); - Self { msg: x.msg, code: MissingApiKeyActions.error_code(), _phantom: PhantomData } - } -} -impl DeserrJsonError { - pub fn missing_api_key_expires_at(field: &str, location: ValuePointerRef) -> Self { - let x = unwrap_any(Self::error::( - None, - deserr::ErrorKind::MissingField { field }, - location, - )); - Self { msg: x.msg, code: MissingApiKeyExpiresAt.error_code(), _phantom: PhantomData } - } -} -impl DeserrJsonError { - pub fn missing_api_key_indexes(field: &str, location: ValuePointerRef) -> Self { - let x = unwrap_any(Self::error::( - None, - deserr::ErrorKind::MissingField { field }, - location, - )); - Self { msg: x.msg, code: MissingApiKeyIndexes.error_code(), _phantom: PhantomData } - } -} - -impl DeserrJsonError { - pub fn missing_swap_indexes_indexes(field: &str, location: ValuePointerRef) -> Self { - let x = unwrap_any(Self::error::( - None, - deserr::ErrorKind::MissingField { field }, - location, - )); - Self { msg: x.msg, code: MissingSwapIndexes.error_code(), _phantom: PhantomData } - } -} - -// if the error happened in the root, then an empty string is returned. -pub fn location_json_description(location: ValuePointerRef, article: &str) -> String { - fn rec(location: ValuePointerRef) -> String { - match location { - ValuePointerRef::Origin => String::new(), - ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key, - ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)), - } - } - match location { - ValuePointerRef::Origin => String::new(), - _ => { - format!("{article} `{}`", rec(location)) - } - } -} - -fn value_kinds_description_json(kinds: &[ValueKind]) -> String { - fn order(kind: &ValueKind) -> u8 { - match kind { - ValueKind::Null => 0, - ValueKind::Boolean => 1, - ValueKind::Integer => 2, - ValueKind::NegativeInteger => 3, - ValueKind::Float => 4, - ValueKind::String => 5, - ValueKind::Sequence => 6, - ValueKind::Map => 7, - } - } - - fn single_description(kind: &ValueKind) -> &'static str { - match kind { - ValueKind::Null => "null", - ValueKind::Boolean => "a boolean", - ValueKind::Integer => "a positive integer", - ValueKind::NegativeInteger => "an integer", - ValueKind::Float => "a number", - ValueKind::String => "a string", - ValueKind::Sequence => "an array", - ValueKind::Map => "an object", - } - } - - fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) { - let (msg_part, rest): (_, &[ValueKind]) = match kinds { - [] => (String::new(), &[]), - [ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => { - ("a number".to_owned(), rest) - } - [ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => { - ("a number".to_owned(), rest) - } - [ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => { - ("an integer".to_owned(), rest) - } - [a] => (single_description(a).to_owned(), &[]), - [a, rest @ ..] => (single_description(a).to_owned(), rest), - }; - - if rest.is_empty() { - if *count_items == 0 { - message.push_str(&msg_part); - } else if *count_items == 1 { - message.push_str(&format!(" or {msg_part}")); - } else { - message.push_str(&format!(", or {msg_part}")); - } - } else { - if *count_items == 0 { - message.push_str(&msg_part); - } else { - message.push_str(&format!(", {msg_part}")); - } - - *count_items += 1; - description_rec(rest, count_items, message); - } - } - - let mut kinds = kinds.to_owned(); - kinds.sort_by_key(order); - kinds.dedup(); - - if kinds.is_empty() { - "a different value".to_owned() - } else { - let mut message = String::new(); - description_rec(kinds.as_slice(), &mut 0, &mut message); - message - } -} - -fn value_description_with_kind_json(v: &serde_json::Value) -> String { - match v.kind() { - ValueKind::Null => "null".to_owned(), - kind => { - format!( - "{}: `{}`", - value_kinds_description_json(&[kind]), - serde_json::to_string(v).unwrap() - ) - } - } -} - -impl deserr::DeserializeError for DeserrJsonError { - fn error( - _self_: Option, - error: deserr::ErrorKind, - location: ValuePointerRef, - ) -> Result { - let mut message = String::new(); - - message.push_str(&match error { - ErrorKind::IncorrectValueKind { actual, accepted } => { - let expected = value_kinds_description_json(accepted); - // if we're not able to get the value as a string then we print nothing. - let received = value_description_with_kind_json(&serde_json::Value::from(actual)); - - let location = location_json_description(location, " at"); - - format!("Invalid value type{location}: expected {expected}, but found {received}") - } - ErrorKind::MissingField { field } => { - // serde_json original message: - // Json deserialize error: missing field `lol` at line 1 column 2 - let location = location_json_description(location, " inside"); - format!("Missing field `{field}`{location}") - } - ErrorKind::UnknownKey { key, accepted } => { - let location = location_json_description(location, " inside"); - format!( - "Unknown field `{}`{location}: expected one of {}", - key, - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", ") - ) - } - ErrorKind::UnknownValue { value, accepted } => { - let location = location_json_description(location, " at"); - format!( - "Unknown value `{}`{location}: expected one of {}", - value, - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", "), - ) - } - ErrorKind::Unexpected { msg } => { - let location = location_json_description(location, " at"); - // serde_json original message: - // The json payload provided is malformed. `trailing characters at line 1 column 19`. - format!("Invalid value{location}: {msg}") - } - }); - - Err(DeserrJsonError { - msg: message, - code: C::default().error_code(), - _phantom: PhantomData, - }) - } -} - -// if the error happened in the root, then an empty string is returned. -pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String { - fn rec(location: ValuePointerRef) -> String { - match location { - ValuePointerRef::Origin => String::new(), - ValuePointerRef::Key { key, prev } => { - if matches!(prev, ValuePointerRef::Origin) { - key.to_owned() - } else { - rec(*prev) + "." + key - } - } - ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)), - } - } - match location { - ValuePointerRef::Origin => String::new(), - _ => { - format!("{article} `{}`", rec(location)) - } - } -} - -impl deserr::DeserializeError for DeserrQueryParamError { - fn error( - _self_: Option, - error: deserr::ErrorKind, - location: ValuePointerRef, - ) -> Result { - let mut message = String::new(); - - message.push_str(&match error { - ErrorKind::IncorrectValueKind { actual, accepted } => { - let expected = value_kinds_description_query_param(accepted); - // if we're not able to get the value as a string then we print nothing. - let received = value_description_with_kind_query_param(actual); - - let location = location_query_param_description(location, " for parameter"); - - format!("Invalid value type{location}: expected {expected}, but found {received}") - } - ErrorKind::MissingField { field } => { - // serde_json original message: - // Json deserialize error: missing field `lol` at line 1 column 2 - let location = location_query_param_description(location, " inside"); - format!("Missing parameter `{field}`{location}") - } - ErrorKind::UnknownKey { key, accepted } => { - let location = location_query_param_description(location, " inside"); - format!( - "Unknown parameter `{}`{location}: expected one of {}", - key, - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", ") - ) - } - ErrorKind::UnknownValue { value, accepted } => { - let location = location_query_param_description(location, " for parameter"); - format!( - "Unknown value `{}`{location}: expected one of {}", - value, - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", "), - ) - } - ErrorKind::Unexpected { msg } => { - let location = location_query_param_description(location, " in parameter"); - // serde_json original message: - // The json payload provided is malformed. `trailing characters at line 1 column 19`. - format!("Invalid value{location}: {msg}") - } - }); - - Err(DeserrQueryParamError { - msg: message, - code: C::default().error_code(), - _phantom: PhantomData, - }) - } -} - -fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String { - "a string".to_owned() -} - -fn value_description_with_kind_query_param(actual: deserr::Value) -> String { - match actual { - deserr::Value::Null => "null".to_owned(), - deserr::Value::Boolean(x) => format!("a boolean: `{x}`"), - deserr::Value::Integer(x) => format!("an integer: `{x}`"), - deserr::Value::NegativeInteger(x) => { - format!("an integer: `{x}`") - } - deserr::Value::Float(x) => { - format!("a number: `{x}`") - } - deserr::Value::String(x) => { - format!("a string: `{x}`") - } - deserr::Value::Sequence(_) => "multiple values".to_owned(), - deserr::Value::Map(_) => "multiple parameters".to_owned(), - } -} - +/// Deserialization when `deserr` cannot parse an API key date. #[derive(Debug)] -pub struct DetailedParseIntError(String); -impl fmt::Display for DetailedParseIntError { +pub struct ParseOffsetDateTimeError(pub String); +impl fmt::Display for ParseOffsetDateTimeError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!(f, "`{original}` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", original = self.0) + } +} + +/// Deserialization when `deserr` cannot parse a task date. +#[derive(Debug)] +pub struct InvalidTaskDateError(pub String); +impl std::fmt::Display for InvalidTaskDateError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "`{}` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", self.0) + } +} + +/// Deserialization error when `deserr` cannot parse a String +/// into a bool. +#[derive(Debug)] +pub struct DeserrParseBoolError(pub String); +impl fmt::Display for DeserrParseBoolError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "could not parse `{}` as a boolean, expected either `true` or `false`", self.0) + } +} + +/// Deserialization error when `deserr` cannot parse a String +/// into an integer. +#[derive(Debug)] +pub struct DeserrParseIntError(pub String); +impl fmt::Display for DeserrParseIntError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "could not parse `{}` as a positive integer", self.0) } } -impl std::error::Error for DetailedParseIntError {} - -pub fn parse_u32_query_param(x: String) -> Result> { - x.parse::().map_err(|_e| TakeErrorMessage(DetailedParseIntError(x.to_owned()))) -} -pub fn parse_usize_query_param( - x: String, -) -> Result> { - x.parse::().map_err(|_e| TakeErrorMessage(DetailedParseIntError(x.to_owned()))) -} -pub fn parse_option_usize_query_param( - s: Option, -) -> Result, TakeErrorMessage> { - if let Some(s) = s { - parse_usize_query_param(s).map(Some) - } else { - Ok(None) - } -} -pub fn parse_option_u32_query_param( - s: Option, -) -> Result, TakeErrorMessage> { - if let Some(s) = s { - parse_u32_query_param(s).map(Some) - } else { - Ok(None) - } -} -pub fn parse_option_vec_u32_query_param( - s: Option>, -) -> Result>, TakeErrorMessage> { - if let Some(s) = s { - s.into_iter() - .map(parse_u32_query_param) - .collect::, TakeErrorMessage>>() - .map(Some) - } else { - Ok(None) - } -} -pub fn parse_option_cs_star_or( - s: Option>>, -) -> Result>, TakeErrorMessage> { - if let Some(s) = s.and_then(fold_star_or) as Option> { - s.into_iter() - .map(|s| T::from_str(&s)) - .collect::, T::Err>>() - .map_err(TakeErrorMessage) - .map(Some) - } else { - Ok(None) - } -} - -/// Extracts the raw values from the `StarOr` types and -/// return None if a `StarOr::Star` is encountered. -pub fn fold_star_or(content: impl IntoIterator>) -> Option -where - O: FromIterator, -{ - content - .into_iter() - .map(|value| match value { - StarOr::Star => None, - StarOr::Other(val) => Some(val), - }) - .collect() -} -pub struct TakeErrorMessage(pub T); - -impl MergeWithError> for DeserrJsonError -where - T: std::error::Error, -{ - fn merge( - _self_: Option, - other: TakeErrorMessage, - merge_location: ValuePointerRef, - ) -> Result { - DeserrJsonError::error::( - None, - deserr::ErrorKind::Unexpected { msg: other.0.to_string() }, - merge_location, - ) - } -} - -impl MergeWithError> for DeserrQueryParamError -where - T: std::error::Error, -{ - fn merge( - _self_: Option, - other: TakeErrorMessage, - merge_location: ValuePointerRef, - ) -> Result { - DeserrQueryParamError::error::( - None, - deserr::ErrorKind::Unexpected { msg: other.0.to_string() }, - merge_location, - ) - } -} #[macro_export] macro_rules! internal_error { @@ -924,32 +443,3 @@ macro_rules! internal_error { )* } } - -#[cfg(test)] -mod tests { - use deserr::ValueKind; - - use crate::error::value_kinds_description_json; - - #[test] - fn test_value_kinds_description_json() { - insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value"); - - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"an integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object"); - - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"an integer or an array"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number"); - } -} diff --git a/meilisearch-types/src/index_uid.rs b/meilisearch-types/src/index_uid.rs index 945a57e9e..30b707665 100644 --- a/meilisearch-types/src/index_uid.rs +++ b/meilisearch-types/src/index_uid.rs @@ -29,6 +29,12 @@ impl IndexUid { } } +impl fmt::Display for IndexUid { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } +} + impl std::ops::Deref for IndexUid { type Target = str; diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index 8543651c5..ea941b775 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -1,8 +1,8 @@ use std::convert::Infallible; -use std::fmt::Display; use std::hash::Hash; +use std::str::FromStr; -use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValuePointerRef}; +use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef}; use enum_iterator::Sequence; use serde::{Deserialize, Serialize}; use time::format_description::well_known::Rfc3339; @@ -10,31 +10,14 @@ use time::macros::{format_description, time}; use time::{Date, OffsetDateTime, PrimitiveDateTime}; use uuid::Uuid; -use crate::error::deserr_codes::*; -use crate::error::{unwrap_any, Code, DeserrJsonError, ErrorCode, TakeErrorMessage}; -use crate::index_uid::{IndexUid, IndexUidFormatError}; +use crate::deserr::DeserrJsonError; +use crate::error::{deserr_codes::*, ParseOffsetDateTimeError}; +use crate::error::{unwrap_any, Code}; +use crate::index_uid::IndexUid; use crate::star_or::StarOr; pub type KeyId = Uuid; -impl MergeWithError for DeserrJsonError { - fn merge( - _self_: Option, - other: IndexUidFormatError, - merge_location: deserr::ValuePointerRef, - ) -> std::result::Result { - DeserrJsonError::error::( - None, - deserr::ErrorKind::Unexpected { msg: other.to_string() }, - merge_location, - ) - } -} - -fn parse_uuid_from_str(s: &str) -> Result> { - Uuid::parse_str(s).map_err(TakeErrorMessage) -} - #[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct CreateApiKey { @@ -42,13 +25,13 @@ pub struct CreateApiKey { pub description: Option, #[deserr(default, error = DeserrJsonError)] pub name: Option, - #[deserr(default = Uuid::new_v4(), error = DeserrJsonError, from(&String) = parse_uuid_from_str -> TakeErrorMessage)] + #[deserr(default = Uuid::new_v4(), error = DeserrJsonError, from(&String) = Uuid::from_str -> uuid::Error)] pub uid: KeyId, #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_api_key_actions)] pub actions: Vec, #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_api_key_indexes)] pub indexes: Vec>, - #[deserr(error = DeserrJsonError, from(Option) = parse_expiration_date -> TakeErrorMessage, missing_field_error = DeserrJsonError::missing_api_key_expires_at)] + #[deserr(error = DeserrJsonError, from(Option) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)] pub expires_at: Option, } impl CreateApiKey { @@ -149,18 +132,9 @@ impl Key { } } -#[derive(Debug)] -pub struct ParseOffsetDateTimeError(String); -impl Display for ParseOffsetDateTimeError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - writeln!(f, "`{original}` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", original = self.0) - } -} -impl std::error::Error for ParseOffsetDateTimeError {} - fn parse_expiration_date( string: Option, -) -> std::result::Result, TakeErrorMessage> { +) -> std::result::Result, ParseOffsetDateTimeError> { let Some(string) = string else { return Ok(None) }; @@ -186,12 +160,12 @@ fn parse_expiration_date( ) { PrimitiveDateTime::new(date, time!(00:00)).assume_utc() } else { - return Err(TakeErrorMessage(ParseOffsetDateTimeError(string))); + return Err(ParseOffsetDateTimeError(string)); }; if datetime > OffsetDateTime::now_utc() { Ok(Some(datetime)) } else { - Err(TakeErrorMessage(ParseOffsetDateTimeError(string))) + Err(ParseOffsetDateTimeError(string)) } } diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index 354a25fa1..de4084388 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -7,7 +7,7 @@ pub mod settings; pub mod star_or; pub mod tasks; pub mod versioning; - +pub mod deserr; pub use milli; pub use milli::{heed, Index}; pub use serde_cs; diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index 99f4ae9e3..8d085d0ff 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -11,8 +11,9 @@ use milli::update::Setting; use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET}; use serde::{Deserialize, Serialize, Serializer}; +use crate::deserr::DeserrJsonError; use crate::error::deserr_codes::*; -use crate::error::{unwrap_any, DeserrJsonError}; +use crate::error::{unwrap_any}; /// The maximimum number of results that the engine /// will be able to return in one search call. diff --git a/meilisearch-types/src/star_or.rs b/meilisearch-types/src/star_or.rs index f56c30b4e..e40884925 100644 --- a/meilisearch-types/src/star_or.rs +++ b/meilisearch-types/src/star_or.rs @@ -1,13 +1,9 @@ -use std::fmt::{Display, Formatter}; -use std::marker::PhantomData; -use std::ops::Deref; -use std::str::FromStr; +use std::{fmt, marker::PhantomData, str::FromStr}; use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind}; -use serde::de::Visitor; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; -use crate::error::unwrap_any; +use crate::{deserr::query_params::FromQueryParameter, error::unwrap_any}; /// A type that tries to match either a star (*) or /// any other thing that implements `FromStr`. @@ -17,35 +13,6 @@ pub enum StarOr { Other(T), } -impl DeserializeFromValue for StarOr -where - T: FromStr, - E: MergeWithError, -{ - fn deserialize_from_value( - value: deserr::Value, - location: deserr::ValuePointerRef, - ) -> Result { - match value { - deserr::Value::String(v) => match v.as_str() { - "*" => Ok(StarOr::Star), - v => match FromStr::from_str(v) { - Ok(x) => Ok(StarOr::Other(x)), - Err(e) => Err(unwrap_any(E::merge(None, e, location))), - }, - }, - _ => Err(unwrap_any(E::error::( - None, - deserr::ErrorKind::IncorrectValueKind { - actual: value, - accepted: &[ValueKind::String], - }, - location, - ))), - } - } -} - impl FromStr for StarOr { type Err = T::Err; @@ -57,23 +24,11 @@ impl FromStr for StarOr { } } } - -impl> Deref for StarOr { - type Target = str; - - fn deref(&self) -> &Self::Target { +impl fmt::Display for StarOr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::Star => "*", - Self::Other(t) => t.deref(), - } - } -} - -impl> From> for String { - fn from(s: StarOr) -> Self { - match s { - StarOr::Star => "*".to_string(), - StarOr::Other(t) => t.into(), + StarOr::Star => write!(f, "*"), + StarOr::Other(x) => fmt::Display::fmt(x, f), } } } @@ -93,7 +48,7 @@ impl Eq for StarOr {} impl<'de, T, E> Deserialize<'de> for StarOr where T: FromStr, - E: Display, + E: fmt::Display, { fn deserialize(deserializer: D) -> Result where @@ -109,11 +64,11 @@ where impl<'de, T, FE> Visitor<'de> for StarOrVisitor where T: FromStr, - FE: Display, + FE: fmt::Display, { type Value = StarOr; - fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { + fn expecting(&self, formatter: &mut fmt::Formatter) -> std::fmt::Result { formatter.write_str("a string") } @@ -139,7 +94,7 @@ where impl Serialize for StarOr where - T: Deref, + T: ToString, { fn serialize(&self, serializer: S) -> Result where @@ -147,7 +102,222 @@ where { match self { StarOr::Star => serializer.serialize_str("*"), - StarOr::Other(other) => serializer.serialize_str(other.deref()), + StarOr::Other(other) => serializer.serialize_str(&other.to_string()), + } + } +} + +impl DeserializeFromValue for StarOr +where + T: FromStr, + E: DeserializeError + MergeWithError, +{ + fn deserialize_from_value( + value: deserr::Value, + location: deserr::ValuePointerRef, + ) -> Result { + match value { + deserr::Value::String(v) => { + if v == "*" { + Ok(StarOr::Star) + } else { + match T::from_str(&v) { + Ok(parsed) => Ok(StarOr::Other(parsed)), + Err(e) => Err(unwrap_any(E::merge(None, e, location))), + } + } + } + _ => Err(unwrap_any(E::error::( + None, + deserr::ErrorKind::IncorrectValueKind { + actual: value, + accepted: &[ValueKind::String], + }, + location, + ))), + } + } +} + +/// A type representing the content of a query parameter that can either not exist, +/// be equal to a star (*), or another value +/// +/// It is a convenient alternative to `Option>`. +#[derive(Debug, Default, Clone, Copy)] +pub enum OptionStarOr { + #[default] + None, + Star, + Other(T), +} + +impl OptionStarOr { + pub fn is_some(&self) -> bool { + match self { + Self::None => false, + Self::Star => false, + Self::Other(_) => true, + } + } + pub fn merge_star_and_none(self) -> Option { + match self { + Self::None | Self::Star => None, + Self::Other(x) => Some(x), + } + } + pub fn try_map Result>(self, map_f: F) -> Result, E> { + match self { + OptionStarOr::None => Ok(OptionStarOr::None), + OptionStarOr::Star => Ok(OptionStarOr::Star), + OptionStarOr::Other(x) => map_f(x).map(OptionStarOr::Other), + } + } +} + +impl FromQueryParameter for OptionStarOr +where + T: FromQueryParameter, +{ + type Err = T::Err; + fn from_query_param(p: &str) -> Result { + match p { + "*" => Ok(OptionStarOr::Star), + s => T::from_query_param(s).map(OptionStarOr::Other), + } + } +} + +impl DeserializeFromValue for OptionStarOr +where + E: DeserializeError + MergeWithError, + T: FromQueryParameter, +{ + fn deserialize_from_value( + value: deserr::Value, + location: deserr::ValuePointerRef, + ) -> Result { + match value { + deserr::Value::String(s) => match s.as_str() { + "*" => Ok(OptionStarOr::Star), + s => match T::from_query_param(s) { + Ok(x) => Ok(OptionStarOr::Other(x)), + Err(e) => Err(unwrap_any(E::merge(None, e, location))), + }, + }, + _ => Err(unwrap_any(E::error::( + None, + deserr::ErrorKind::IncorrectValueKind { + actual: value, + accepted: &[ValueKind::String], + }, + location, + ))), + } + } +} + +/// A type representing the content of a query parameter that can either not exist, be equal to a star (*), or represent a list of other values +#[derive(Debug, Default, Clone)] +pub enum OptionStarOrList { + #[default] + None, + Star, + List(Vec), +} + +impl OptionStarOrList { + pub fn is_some(&self) -> bool { + match self { + Self::None => false, + Self::Star => false, + Self::List(_) => true, + } + } + pub fn map U>(self, map_f: F) -> OptionStarOrList { + match self { + Self::None => OptionStarOrList::None, + Self::Star => OptionStarOrList::Star, + Self::List(xs) => OptionStarOrList::List(xs.into_iter().map(map_f).collect()), + } + } + pub fn try_map Result>( + self, + map_f: F, + ) -> Result, E> { + match self { + Self::None => Ok(OptionStarOrList::None), + Self::Star => Ok(OptionStarOrList::Star), + Self::List(xs) => { + xs.into_iter().map(map_f).collect::, _>>().map(OptionStarOrList::List) + } + } + } + pub fn merge_star_and_none(self) -> Option> { + match self { + Self::None | Self::Star => None, + Self::List(xs) => Some(xs), + } + } + pub fn push(&mut self, el: T) { + match self { + Self::None => *self = Self::List(vec![el]), + Self::Star => (), + Self::List(xs) => xs.push(el), + } + } +} + +impl DeserializeFromValue for OptionStarOrList +where + E: DeserializeError + MergeWithError, + T: FromQueryParameter, +{ + fn deserialize_from_value( + value: deserr::Value, + location: deserr::ValuePointerRef, + ) -> Result { + match value { + deserr::Value::String(s) => { + let mut error = None; + let mut is_star = false; + // CS::::from_str is infaillible + let cs = serde_cs::vec::CS::::from_str(&s).unwrap(); + let len_cs = cs.0.len(); + let mut els = vec![]; + for (i, el_str) in cs.into_iter().enumerate() { + if el_str == "*" { + is_star = true; + } else { + match T::from_query_param(&el_str) { + Ok(el) => { + els.push(el); + } + Err(e) => { + let location = + if len_cs > 1 { location.push_index(i) } else { location }; + error = Some(E::merge(error, e, location)?); + } + } + } + } + if let Some(error) = error { + return Err(error); + } + + if is_star { + Ok(OptionStarOrList::Star) + } else { + Ok(OptionStarOrList::List(els)) + } + } + _ => Err(unwrap_any(E::error::( + None, + deserr::ErrorKind::IncorrectValueKind { + actual: value, + accepted: &[ValueKind::String], + }, + location, + ))), } } } diff --git a/meilisearch-types/src/tasks.rs b/meilisearch-types/src/tasks.rs index fd2d31e06..3b7efb97b 100644 --- a/meilisearch-types/src/tasks.rs +++ b/meilisearch-types/src/tasks.rs @@ -1,3 +1,4 @@ +use core::fmt; use std::collections::HashSet; use std::fmt::{Display, Write}; use std::str::FromStr; @@ -9,7 +10,7 @@ use serde::{Deserialize, Serialize, Serializer}; use time::{Duration, OffsetDateTime}; use uuid::Uuid; -use crate::error::{Code, ResponseError}; +use crate::error::ResponseError; use crate::keys::Key; use crate::settings::{Settings, Unchecked}; use crate::InstanceUid; @@ -332,7 +333,7 @@ impl Display for Status { } impl FromStr for Status { - type Err = ResponseError; + type Err = ParseTaskStatusError; fn from_str(status: &str) -> Result { if status.eq_ignore_ascii_case("enqueued") { @@ -346,21 +347,28 @@ impl FromStr for Status { } else if status.eq_ignore_ascii_case("canceled") { Ok(Status::Canceled) } else { - Err(ResponseError::from_msg( - format!( - "`{}` is not a valid task status. Available statuses are {}.", - status, - enum_iterator::all::() - .map(|s| format!("`{s}`")) - .collect::>() - .join(", ") - ), - Code::BadRequest, - )) + Err(ParseTaskStatusError(status.to_owned())) } } } +#[derive(Debug)] +pub struct ParseTaskStatusError(pub String); +impl fmt::Display for ParseTaskStatusError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "`{}` is not a valid task status. Available statuses are {}.", + self.0, + enum_iterator::all::() + .map(|s| format!("`{s}`")) + .collect::>() + .join(", ") + ) + } +} +impl std::error::Error for ParseTaskStatusError {} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Sequence)] #[serde(rename_all = "camelCase")] pub enum Kind { @@ -412,7 +420,7 @@ impl Display for Kind { } } impl FromStr for Kind { - type Err = ResponseError; + type Err = ParseTaskKindError; fn from_str(kind: &str) -> Result { if kind.eq_ignore_ascii_case("indexCreation") { @@ -438,25 +446,32 @@ impl FromStr for Kind { } else if kind.eq_ignore_ascii_case("snapshotCreation") { Ok(Kind::SnapshotCreation) } else { - Err(ResponseError::from_msg( - format!( - "`{}` is not a valid task type. Available types are {}.", - kind, - enum_iterator::all::() - .map(|k| format!( - "`{}`", - // by default serde is going to insert `"` around the value. - serde_json::to_string(&k).unwrap().trim_matches('"') - )) - .collect::>() - .join(", ") - ), - Code::BadRequest, - )) + Err(ParseTaskKindError(kind.to_owned())) } } } +#[derive(Debug)] +pub struct ParseTaskKindError(pub String); +impl fmt::Display for ParseTaskKindError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "`{}` is not a valid task type. Available types are {}.", + self.0, + enum_iterator::all::() + .map(|k| format!( + "`{}`", + // by default serde is going to insert `"` around the value. + serde_json::to_string(&k).unwrap().trim_matches('"') + )) + .collect::>() + .join(", ") + ) + } +} +impl std::error::Error for ParseTaskKindError {} + #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub enum Details { DocumentAdditionOrUpdate { received_documents: u64, indexed_documents: Option }, diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index 2c2c2aca8..be852c02e 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", " bytes = "1.2.1" clap = { version = "4.0.9", features = ["derive", "env"] } crossbeam-channel = "0.5.6" -deserr = { path = "/Users/meilisearch/Documents/deserr" } +deserr = "0.1.4" dump = { path = "../dump" } either = "1.8.0" env_logger = "0.9.1" diff --git a/meilisearch/src/routes/api_key.rs b/meilisearch/src/routes/api_key.rs index 917a5e285..cd5bfe0c7 100644 --- a/meilisearch/src/routes/api_key.rs +++ b/meilisearch/src/routes/api_key.rs @@ -4,14 +4,15 @@ use actix_web::{web, HttpRequest, HttpResponse}; use deserr::DeserializeFromValue; use meilisearch_auth::error::AuthControllerError; use meilisearch_auth::AuthController; -use meilisearch_types::error::{deserr_codes::*, DeserrQueryParamError}; -use meilisearch_types::error::{Code, DeserrJsonError, ResponseError, TakeErrorMessage}; +use meilisearch_types::deserr::query_params::Param; +use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; +use meilisearch_types::error::deserr_codes::*; +use meilisearch_types::error::{Code, ResponseError}; use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey}; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; use uuid::Uuid; -use super::indexes::search::parse_usize_take_error_message; use super::PAGINATION_DEFAULT_LIMIT; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; @@ -50,20 +51,17 @@ pub async fn create_api_key( Ok(HttpResponse::Created().json(res)) } -#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)] +#[derive(DeserializeFromValue, Debug, Clone, Copy)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct ListApiKeys { - #[serde(default)] - #[deserr(default, error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] - pub offset: usize, - #[serde(default = "PAGINATION_DEFAULT_LIMIT")] - #[deserr(default = PAGINATION_DEFAULT_LIMIT(), error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] - pub limit: usize, + #[deserr(default, error = DeserrQueryParamError)] + pub offset: Param, + #[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError)] + pub limit: Param, } impl ListApiKeys { fn as_pagination(self) -> Pagination { - Pagination { offset: self.offset, limit: self.limit } + Pagination { offset: self.offset.0, limit: self.limit.0 } } } @@ -172,7 +170,7 @@ impl KeyView { key: generated_key, uid: key.uid, actions: key.actions, - indexes: key.indexes.into_iter().map(String::from).collect(), + indexes: key.indexes.into_iter().map(|x| x.to_string()).collect(), expires_at: key.expires_at, created_at: key.created_at, updated_at: key.updated_at, diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index 2c1b0f692..3316ee10b 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -1,5 +1,4 @@ use std::io::ErrorKind; -use std::num::ParseIntError; use actix_web::http::header::CONTENT_TYPE; use actix_web::web::Data; @@ -9,14 +8,15 @@ use deserr::DeserializeFromValue; use futures::StreamExt; use index_scheduler::IndexScheduler; use log::debug; +use meilisearch_types::deserr::query_params::Param; +use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType}; -use meilisearch_types::error::{deserr_codes::*, fold_star_or, DeserrQueryParamError}; -use meilisearch_types::error::{DeserrJsonError, ResponseError, TakeErrorMessage}; +use meilisearch_types::error::deserr_codes::*; +use meilisearch_types::error::ResponseError; use meilisearch_types::heed::RoTxn; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::milli::update::IndexDocumentsMethod; -use meilisearch_types::serde_cs::vec::CS; -use meilisearch_types::star_or::StarOr; +use meilisearch_types::star_or::OptionStarOrList; use meilisearch_types::tasks::KindWithContent; use meilisearch_types::{milli, Document, Index}; use mime::Mime; @@ -27,7 +27,6 @@ use tempfile::tempfile; use tokio::fs::File; use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter}; -use super::search::parse_usize_take_error_message; use crate::analytics::{Analytics, DocumentDeletionKind}; use crate::error::MeilisearchHttpError; use crate::error::PayloadError::ReceivePayload; @@ -36,7 +35,7 @@ use crate::extractors::authentication::GuardedData; use crate::extractors::payload::Payload; use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; -use crate::routes::{PaginationView, SummarizedTaskView}; +use crate::routes::{PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT}; static ACCEPTED_CONTENT_TYPE: Lazy> = Lazy::new(|| { vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()] @@ -81,12 +80,11 @@ pub fn configure(cfg: &mut web::ServiceConfig) { ); } -#[derive(Deserialize, Debug, DeserializeFromValue)] +#[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct GetDocument { - // TODO: strongly typed argument here #[deserr(default, error = DeserrQueryParamError)] - fields: Option>>, + fields: OptionStarOrList, } pub async fn get_document( @@ -95,7 +93,7 @@ pub async fn get_document( params: QueryParameter, ) -> Result { let GetDocument { fields } = params.into_inner(); - let attributes_to_retrieve = fields.and_then(fold_star_or); + let attributes_to_retrieve = fields.merge_star_and_none(); let index = index_scheduler.index(&path.index_uid)?; let document = retrieve_document(&index, &path.document_id, attributes_to_retrieve)?; @@ -119,15 +117,15 @@ pub async fn delete_document( Ok(HttpResponse::Accepted().json(task)) } -#[derive(Deserialize, Debug, DeserializeFromValue)] +#[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct BrowseQuery { - #[deserr(default, error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] - offset: usize, - #[deserr(default = crate::routes::PAGINATION_DEFAULT_LIMIT(), error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] - limit: usize, + #[deserr(default, error = DeserrQueryParamError)] + offset: Param, + #[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError)] + limit: Param, #[deserr(default, error = DeserrQueryParamError)] - fields: Option>>, + fields: OptionStarOrList, } pub async fn get_all_documents( @@ -137,12 +135,12 @@ pub async fn get_all_documents( ) -> Result { debug!("called with params: {:?}", params); let BrowseQuery { limit, offset, fields } = params.into_inner(); - let attributes_to_retrieve = fields.and_then(fold_star_or); + let attributes_to_retrieve = fields.merge_star_and_none(); let index = index_scheduler.index(&index_uid)?; - let (total, documents) = retrieve_documents(&index, offset, limit, attributes_to_retrieve)?; + let (total, documents) = retrieve_documents(&index, offset.0, limit.0, attributes_to_retrieve)?; - let ret = PaginationView::new(offset, limit, total as usize, documents); + let ret = PaginationView::new(offset.0, limit.0, total as usize, documents); debug!("returns: {:?}", ret); Ok(HttpResponse::Ok().json(ret)) diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index 216cc448e..9e76f3be6 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -5,8 +5,10 @@ use actix_web::{web, HttpRequest, HttpResponse}; use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef}; use index_scheduler::IndexScheduler; use log::debug; -use meilisearch_types::error::{deserr_codes::*, unwrap_any, Code, DeserrQueryParamError}; -use meilisearch_types::error::{DeserrJsonError, ResponseError, TakeErrorMessage}; +use meilisearch_types::deserr::query_params::Param; +use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; +use meilisearch_types::error::ResponseError; +use meilisearch_types::error::{deserr_codes::*, unwrap_any, Code}; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::milli::{self, FieldDistribution, Index}; use meilisearch_types::tasks::KindWithContent; @@ -14,7 +16,6 @@ use serde::{Deserialize, Serialize}; use serde_json::json; use time::OffsetDateTime; -use self::search::parse_usize_take_error_message; use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT}; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -71,20 +72,17 @@ impl IndexView { } } -#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)] +#[derive(DeserializeFromValue, Debug, Clone, Copy)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct ListIndexes { - #[serde(default)] - #[deserr(default, error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] - pub offset: usize, - #[serde(default = "PAGINATION_DEFAULT_LIMIT")] - #[deserr(default = PAGINATION_DEFAULT_LIMIT(), error = DeserrQueryParamError, from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] - pub limit: usize, + #[deserr(default, error = DeserrQueryParamError)] + pub offset: Param, + #[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError)] + pub limit: Param, } impl ListIndexes { fn as_pagination(self) -> Pagination { - Pagination { offset: self.offset, limit: self.limit } + Pagination { offset: self.offset.0, limit: self.limit.0 } } } diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index ec9364711..7eabfc2ee 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -1,15 +1,12 @@ -use std::str::FromStr; - use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::IndexScheduler; use log::debug; use meilisearch_auth::IndexSearchRules; -use meilisearch_types::error::{ - deserr_codes::*, parse_option_usize_query_param, parse_usize_query_param, - DeserrQueryParamError, DetailedParseIntError, -}; -use meilisearch_types::error::{DeserrJsonError, ResponseError, TakeErrorMessage}; +use meilisearch_types::deserr::{DeserrQueryParamError, DeserrJsonError}; +use meilisearch_types::deserr::query_params::Param; +use meilisearch_types::error::deserr_codes::*; +use meilisearch_types::error::ResponseError; use meilisearch_types::serde_cs::vec::CS; use serde_json::Value; @@ -33,45 +30,33 @@ pub fn configure(cfg: &mut web::ServiceConfig) { ); } -pub fn parse_usize_take_error_message( - s: &str, -) -> Result> { - usize::from_str(s).map_err(TakeErrorMessage) -} - -pub fn parse_bool_take_error_message( - s: &str, -) -> Result> { - s.parse().map_err(TakeErrorMessage) -} - #[derive(Debug, deserr::DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct SearchQueryGet { #[deserr(default, error = DeserrQueryParamError)] q: Option, - #[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrQueryParamError, from(String) = parse_usize_query_param -> TakeErrorMessage)] - offset: usize, - #[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrQueryParamError, from(String) = parse_usize_query_param -> TakeErrorMessage)] - limit: usize, - #[deserr(default, error = DeserrQueryParamError, from(Option) = parse_option_usize_query_param -> TakeErrorMessage)] - page: Option, - #[deserr(default, error = DeserrQueryParamError, from(Option) = parse_option_usize_query_param -> TakeErrorMessage)] - hits_per_page: Option, + #[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError)] + offset: Param, + #[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError)] + limit: Param, + #[deserr(default, error = DeserrQueryParamError)] + page: Option>, + #[deserr(default, error = DeserrQueryParamError)] + hits_per_page: Option>, #[deserr(default, error = DeserrQueryParamError)] attributes_to_retrieve: Option>, #[deserr(default, error = DeserrQueryParamError)] attributes_to_crop: Option>, - #[deserr(default = DEFAULT_CROP_LENGTH(), error = DeserrQueryParamError, from(String) = parse_usize_query_param -> TakeErrorMessage)] - crop_length: usize, + #[deserr(default = Param(DEFAULT_CROP_LENGTH()), error = DeserrQueryParamError)] + crop_length: Param, #[deserr(default, error = DeserrQueryParamError)] attributes_to_highlight: Option>, #[deserr(default, error = DeserrQueryParamError)] filter: Option, #[deserr(default, error = DeserrQueryParamError)] sort: Option, - #[deserr(default, error = DeserrQueryParamError, from(&String) = parse_bool_take_error_message -> TakeErrorMessage)] - show_matches_position: bool, + #[deserr(default, error = DeserrQueryParamError)] + show_matches_position: Param, #[deserr(default, error = DeserrQueryParamError)] facets: Option>, #[deserr( default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError)] @@ -96,17 +81,17 @@ impl From for SearchQuery { Self { q: other.q, - offset: other.offset, - limit: other.limit, - page: other.page, - hits_per_page: other.hits_per_page, + offset: other.offset.0, + limit: other.limit.0, + page: other.page.as_deref().copied(), + hits_per_page: other.hits_per_page.as_deref().copied(), attributes_to_retrieve: other.attributes_to_retrieve.map(|o| o.into_iter().collect()), attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()), - crop_length: other.crop_length, + crop_length: other.crop_length.0, attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()), filter, sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)), - show_matches_position: other.show_matches_position, + show_matches_position: other.show_matches_position.0, facets: other.facets.map(|o| o.into_iter().collect()), highlight_pre_tag: other.highlight_pre_tag, highlight_post_tag: other.highlight_post_tag, diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 404835833..91c3473fa 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -2,7 +2,8 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::IndexScheduler; use log::debug; -use meilisearch_types::error::{DeserrJsonError, ResponseError}; +use meilisearch_types::deserr::DeserrJsonError; +use meilisearch_types::error::ResponseError; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked}; use meilisearch_types::tasks::KindWithContent; @@ -130,7 +131,7 @@ make_setting_route!( "/filterable-attributes", put, std::collections::BTreeSet, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsFilterableAttributes, >, filterable_attributes, @@ -156,7 +157,7 @@ make_setting_route!( "/sortable-attributes", put, std::collections::BTreeSet, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsSortableAttributes, >, sortable_attributes, @@ -182,7 +183,7 @@ make_setting_route!( "/displayed-attributes", put, Vec, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsDisplayedAttributes, >, displayed_attributes, @@ -208,7 +209,7 @@ make_setting_route!( "/typo-tolerance", patch, meilisearch_types::settings::TypoSettings, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsTypoTolerance, >, typo_tolerance, @@ -253,7 +254,7 @@ make_setting_route!( "/searchable-attributes", put, Vec, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsSearchableAttributes, >, searchable_attributes, @@ -279,7 +280,7 @@ make_setting_route!( "/stop-words", put, std::collections::BTreeSet, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsStopWords, >, stop_words, @@ -304,7 +305,7 @@ make_setting_route!( "/synonyms", put, std::collections::BTreeMap>, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsSynonyms, >, synonyms, @@ -329,7 +330,7 @@ make_setting_route!( "/distinct-attribute", put, String, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsDistinctAttribute, >, distinct_attribute, @@ -353,7 +354,7 @@ make_setting_route!( "/ranking-rules", put, Vec, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsRankingRules, >, ranking_rules, @@ -384,7 +385,7 @@ make_setting_route!( "/faceting", patch, meilisearch_types::settings::FacetingSettings, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsFaceting, >, faceting, @@ -409,7 +410,7 @@ make_setting_route!( "/pagination", patch, meilisearch_types::settings::PaginationSettings, - meilisearch_types::error::DeserrJsonError< + meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsPagination, >, pagination, diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index e681910a2..52ad92c23 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -41,7 +41,7 @@ where Ok(Some(input.parse()?)) } -const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20; +const PAGINATION_DEFAULT_LIMIT: usize = 20; #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] diff --git a/meilisearch/src/routes/swap_indexes.rs b/meilisearch/src/routes/swap_indexes.rs index 5d6d1e1e5..9adbfecdd 100644 --- a/meilisearch/src/routes/swap_indexes.rs +++ b/meilisearch/src/routes/swap_indexes.rs @@ -2,8 +2,9 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use deserr::DeserializeFromValue; use index_scheduler::IndexScheduler; +use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::deserr_codes::InvalidSwapIndexes; -use meilisearch_types::error::{DeserrJsonError, ResponseError}; +use meilisearch_types::error::ResponseError; use meilisearch_types::tasks::{IndexSwap, KindWithContent}; use serde_json::json; @@ -22,7 +23,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { #[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct SwapIndexesPayload { - #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_swap_indexes_indexes)] + #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_swap_indexes)] indexes: Vec, } diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index d9c498e4e..060c86910 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -2,21 +2,17 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use deserr::DeserializeFromValue; use index_scheduler::{IndexScheduler, Query, TaskId}; -use meilisearch_types::error::{ - deserr_codes::*, parse_option_cs_star_or, parse_option_u32_query_param, - parse_option_vec_u32_query_param, DeserrQueryParamError, DetailedParseIntError, - TakeErrorMessage, -}; -use meilisearch_types::error::{parse_u32_query_param, ResponseError}; +use meilisearch_types::deserr::query_params::Param; +use meilisearch_types::deserr::DeserrQueryParamError; +use meilisearch_types::error::ResponseError; +use meilisearch_types::error::{deserr_codes::*, InvalidTaskDateError}; use meilisearch_types::index_uid::IndexUid; -use meilisearch_types::serde_cs; use meilisearch_types::settings::{Settings, Unchecked}; -use meilisearch_types::star_or::StarOr; +use meilisearch_types::star_or::{OptionStarOr, OptionStarOrList}; use meilisearch_types::tasks::{ serialize_duration, Details, IndexSwap, Kind, KindWithContent, Status, Task, }; -use serde::{Deserialize, Serialize}; -use serde_cs::vec::CS; +use serde::Serialize; use serde_json::json; use time::format_description::well_known::Rfc3339; use time::macros::format_description; @@ -30,7 +26,7 @@ use crate::extractors::authentication::GuardedData; use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; -const DEFAULT_LIMIT: fn() -> u32 = || 20; +const DEFAULT_LIMIT: u32 = 20; pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service( @@ -169,62 +165,121 @@ impl From
for DetailsView { #[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct TasksFilterQuery { - #[deserr(default = DEFAULT_LIMIT(), error = DeserrQueryParamError, from(String) = parse_u32_query_param -> TakeErrorMessage)] - pub limit: u32, - #[deserr(default, error = DeserrQueryParamError, from(Option) = parse_option_u32_query_param -> TakeErrorMessage)] - pub from: Option, + #[deserr(default = Param(DEFAULT_LIMIT), error = DeserrQueryParamError)] + pub limit: Param, + #[deserr(default, error = DeserrQueryParamError)] + pub from: Option>, - #[deserr(default, error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] - pub uids: Option>, - #[deserr(default, error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] - pub canceled_by: Option>, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] - pub types: Option>, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] - pub statuses: Option>, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] - pub index_uids: Option>, + #[deserr(default, error = DeserrQueryParamError)] + pub uids: OptionStarOrList, + #[deserr(default, error = DeserrQueryParamError)] + pub canceled_by: OptionStarOrList, + #[deserr(default, error = DeserrQueryParamError)] + pub types: OptionStarOrList, + #[deserr(default, error = DeserrQueryParamError)] + pub statuses: OptionStarOrList, + #[deserr(default, error = DeserrQueryParamError)] + pub index_uids: OptionStarOrList, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] - pub after_enqueued_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] - pub before_enqueued_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] - pub after_started_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] - pub before_started_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] - pub after_finished_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] - pub before_finished_at: Option, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + pub after_enqueued_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + pub before_enqueued_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + pub after_started_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + pub before_started_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + pub after_finished_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + pub before_finished_at: OptionStarOr, +} +impl TasksFilterQuery { + fn into_query(self) -> Query { + Query { + limit: Some(self.limit.0), + from: self.from.as_deref().copied(), + statuses: self.statuses.merge_star_and_none(), + types: self.types.merge_star_and_none(), + index_uids: self.index_uids.map(|x| x.to_string()).merge_star_and_none(), + uids: self.uids.merge_star_and_none(), + canceled_by: self.canceled_by.merge_star_and_none(), + before_enqueued_at: self.before_enqueued_at.merge_star_and_none(), + after_enqueued_at: self.after_enqueued_at.merge_star_and_none(), + before_started_at: self.before_started_at.merge_star_and_none(), + after_started_at: self.after_started_at.merge_star_and_none(), + before_finished_at: self.before_finished_at.merge_star_and_none(), + after_finished_at: self.after_finished_at.merge_star_and_none(), + } + } } -#[derive(Deserialize, Debug, DeserializeFromValue)] +impl TaskDeletionOrCancelationQuery { + fn is_empty(&self) -> bool { + matches!( + self, + TaskDeletionOrCancelationQuery { + uids: OptionStarOrList::None, + canceled_by: OptionStarOrList::None, + types: OptionStarOrList::None, + statuses: OptionStarOrList::None, + index_uids: OptionStarOrList::None, + after_enqueued_at: OptionStarOr::None, + before_enqueued_at: OptionStarOr::None, + after_started_at: OptionStarOr::None, + before_started_at: OptionStarOr::None, + after_finished_at: OptionStarOr::None, + before_finished_at: OptionStarOr::None + } + ) + } +} + +#[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct TaskDeletionOrCancelationQuery { - #[deserr(default, error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] - pub uids: Option>, - #[deserr(default, error = DeserrQueryParamError, from(Option>) = parse_option_vec_u32_query_param -> TakeErrorMessage)] - pub canceled_by: Option>, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] - pub types: Option>, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] - pub statuses: Option>, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option>>) = parse_option_cs_star_or:: -> TakeErrorMessage)] - pub index_uids: Option>, + #[deserr(default, error = DeserrQueryParamError)] + pub uids: OptionStarOrList, + #[deserr(default, error = DeserrQueryParamError)] + pub canceled_by: OptionStarOrList, + #[deserr(default, error = DeserrQueryParamError)] + pub types: OptionStarOrList, + #[deserr(default, error = DeserrQueryParamError)] + pub statuses: OptionStarOrList, + #[deserr(default, error = DeserrQueryParamError)] + pub index_uids: OptionStarOrList, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] - pub after_enqueued_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] - pub before_enqueued_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] - pub after_started_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] - pub before_started_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_after -> TakeErrorMessage)] - pub after_finished_at: Option, - #[deserr(default, error = DeserrQueryParamError, default = None, from(Option) = deserialize_date_before -> TakeErrorMessage)] - pub before_finished_at: Option, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + pub after_enqueued_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + pub before_enqueued_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + pub after_started_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + pub before_started_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + pub after_finished_at: OptionStarOr, + #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + pub before_finished_at: OptionStarOr, +} +impl TaskDeletionOrCancelationQuery { + fn into_query(self) -> Query { + Query { + limit: None, + from: None, + statuses: self.statuses.merge_star_and_none(), + types: self.types.merge_star_and_none(), + index_uids: self.index_uids.map(|x| x.to_string()).merge_star_and_none(), + uids: self.uids.merge_star_and_none(), + canceled_by: self.canceled_by.merge_star_and_none(), + before_enqueued_at: self.before_enqueued_at.merge_star_and_none(), + after_enqueued_at: self.after_enqueued_at.merge_star_and_none(), + before_started_at: self.before_started_at.merge_star_and_none(), + after_started_at: self.after_started_at.merge_star_and_none(), + before_finished_at: self.before_finished_at.merge_star_and_none(), + after_finished_at: self.after_finished_at.merge_star_and_none(), + } + } } async fn cancel_tasks( @@ -233,57 +288,31 @@ async fn cancel_tasks( req: HttpRequest, analytics: web::Data, ) -> Result { - let TaskDeletionOrCancelationQuery { - types, - uids, - canceled_by, - statuses, - index_uids, - after_enqueued_at, - before_enqueued_at, - after_started_at, - before_started_at, - after_finished_at, - before_finished_at, - } = params.into_inner(); + let params = params.into_inner(); + + if params.is_empty() { + return Err(index_scheduler::Error::TaskCancelationWithEmptyQuery.into()); + } analytics.publish( "Tasks Canceled".to_string(), json!({ - "filtered_by_uid": uids.is_some(), - "filtered_by_index_uid": index_uids.is_some(), - "filtered_by_type": types.is_some(), - "filtered_by_status": statuses.is_some(), - "filtered_by_canceled_by": canceled_by.is_some(), - "filtered_by_before_enqueued_at": before_enqueued_at.is_some(), - "filtered_by_after_enqueued_at": after_enqueued_at.is_some(), - "filtered_by_before_started_at": before_started_at.is_some(), - "filtered_by_after_started_at": after_started_at.is_some(), - "filtered_by_before_finished_at": before_finished_at.is_some(), - "filtered_by_after_finished_at": after_finished_at.is_some(), + "filtered_by_uid": params.uids.is_some(), + "filtered_by_index_uid": params.index_uids.is_some(), + "filtered_by_type": params.types.is_some(), + "filtered_by_status": params.statuses.is_some(), + "filtered_by_canceled_by": params.canceled_by.is_some(), + "filtered_by_before_enqueued_at": params.before_enqueued_at.is_some(), + "filtered_by_after_enqueued_at": params.after_enqueued_at.is_some(), + "filtered_by_before_started_at": params.before_started_at.is_some(), + "filtered_by_after_started_at": params.after_started_at.is_some(), + "filtered_by_before_finished_at": params.before_finished_at.is_some(), + "filtered_by_after_finished_at": params.after_finished_at.is_some(), }), Some(&req), ); - let query = Query { - limit: None, - from: None, - statuses, - types, - index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()), - uids, - canceled_by, - before_enqueued_at, - after_enqueued_at, - before_started_at, - after_started_at, - before_finished_at, - after_finished_at, - }; - - if query.is_empty() { - return Err(index_scheduler::Error::TaskCancelationWithEmptyQuery.into()); - } + let query = params.into_query(); let tasks = index_scheduler.get_task_ids_from_authorized_indexes( &index_scheduler.read_txn()?, @@ -305,58 +334,30 @@ async fn delete_tasks( req: HttpRequest, analytics: web::Data, ) -> Result { - let TaskDeletionOrCancelationQuery { - types, - uids, - canceled_by, - statuses, - index_uids, + let params = params.into_inner(); - after_enqueued_at, - before_enqueued_at, - after_started_at, - before_started_at, - after_finished_at, - before_finished_at, - } = params.into_inner(); + if params.is_empty() { + return Err(index_scheduler::Error::TaskDeletionWithEmptyQuery.into()); + } analytics.publish( "Tasks Deleted".to_string(), json!({ - "filtered_by_uid": uids.is_some(), - "filtered_by_index_uid": index_uids.is_some(), - "filtered_by_type": types.is_some(), - "filtered_by_status": statuses.is_some(), - "filtered_by_canceled_by": canceled_by.is_some(), - "filtered_by_before_enqueued_at": before_enqueued_at.is_some(), - "filtered_by_after_enqueued_at": after_enqueued_at.is_some(), - "filtered_by_before_started_at": before_started_at.is_some(), - "filtered_by_after_started_at": after_started_at.is_some(), - "filtered_by_before_finished_at": before_finished_at.is_some(), - "filtered_by_after_finished_at": after_finished_at.is_some(), + "filtered_by_uid": params.uids.is_some(), + "filtered_by_index_uid": params.index_uids.is_some(), + "filtered_by_type": params.types.is_some(), + "filtered_by_status": params.statuses.is_some(), + "filtered_by_canceled_by": params.canceled_by.is_some(), + "filtered_by_before_enqueued_at": params.before_enqueued_at.is_some(), + "filtered_by_after_enqueued_at": params.after_enqueued_at.is_some(), + "filtered_by_before_started_at": params.before_started_at.is_some(), + "filtered_by_after_started_at": params.after_started_at.is_some(), + "filtered_by_before_finished_at": params.before_finished_at.is_some(), + "filtered_by_after_finished_at": params.after_finished_at.is_some(), }), Some(&req), ); - - let query = Query { - limit: None, - from: None, - statuses, - types, - index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()), - uids, - canceled_by, - after_enqueued_at, - before_enqueued_at, - after_started_at, - before_started_at, - after_finished_at, - before_finished_at, - }; - - if query.is_empty() { - return Err(index_scheduler::Error::TaskDeletionWithEmptyQuery.into()); - } + let query = params.into_query(); let tasks = index_scheduler.get_task_ids_from_authorized_indexes( &index_scheduler.read_txn()?, @@ -386,43 +387,13 @@ async fn get_tasks( req: HttpRequest, analytics: web::Data, ) -> Result { - let params = params.into_inner(); + let mut params = params.into_inner(); analytics.get_tasks(¶ms, &req); - let TasksFilterQuery { - types, - uids, - canceled_by, - statuses, - index_uids, - limit, - from, - after_enqueued_at, - before_enqueued_at, - after_started_at, - before_started_at, - after_finished_at, - before_finished_at, - } = params; - // We +1 just to know if there is more after this "page" or not. - let limit = limit.saturating_add(1); - - let query = index_scheduler::Query { - limit: Some(limit), - from, - statuses, - types, - index_uids: index_uids.map(|xs| xs.into_iter().map(|s| s.to_string()).collect()), - uids, - canceled_by, - before_enqueued_at, - after_enqueued_at, - before_started_at, - after_started_at, - before_finished_at, - after_finished_at, - }; + params.limit.0 = params.limit.0.saturating_add(1); + let limit = params.limit.0; + let query = params.into_query(); let mut tasks_results: Vec = index_scheduler .get_tasks_from_authorized_indexes( @@ -488,7 +459,7 @@ pub enum DeserializeDateOption { pub fn deserialize_date( value: &str, option: DeserializeDateOption, -) -> std::result::Result> { +) -> std::result::Result { // We can't parse using time's rfc3339 format, since then we won't know what part of the // datetime was not explicitly specified, and thus we won't be able to increment it to the // next step. @@ -510,45 +481,26 @@ pub fn deserialize_date( } } } else { - Err(TakeErrorMessage(InvalidTaskDateError(value.to_owned()))) + Err(InvalidTaskDateError(value.to_owned())) } } -pub fn deserialize_date_before( - value: Option, -) -> std::result::Result, TakeErrorMessage> { - if let Some(value) = value { - let date = deserialize_date(&value, DeserializeDateOption::Before)?; - Ok(Some(date)) - } else { - Ok(None) - } -} pub fn deserialize_date_after( - value: Option, -) -> std::result::Result, TakeErrorMessage> { - if let Some(value) = value { - let date = deserialize_date(&value, DeserializeDateOption::After)?; - Ok(Some(date)) - } else { - Ok(None) - } + value: OptionStarOr, +) -> std::result::Result, InvalidTaskDateError> { + value.try_map(|x| deserialize_date(&x, DeserializeDateOption::After)) } - -#[derive(Debug)] -pub struct InvalidTaskDateError(String); -impl std::fmt::Display for InvalidTaskDateError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "`{}` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", self.0) - } +pub fn deserialize_date_before( + value: OptionStarOr, +) -> std::result::Result, InvalidTaskDateError> { + value.try_map(|x| deserialize_date(&x, DeserializeDateOption::Before)) } -impl std::error::Error for InvalidTaskDateError {} #[cfg(test)] mod tests { use deserr::DeserializeFromValue; use meili_snap::snapshot; - use meilisearch_types::error::DeserrQueryParamError; + use meilisearch_types::deserr::DeserrQueryParamError; use crate::extractors::query_parameters::QueryParameter; use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery}; @@ -566,65 +518,71 @@ mod tests { let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00"); - snapshot!(format!("{:?}", query.before_enqueued_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00"); - snapshot!(format!("{:?}", query.after_started_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00"); - snapshot!(format!("{:?}", query.before_started_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00"); - snapshot!(format!("{:?}", query.after_finished_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00"); - snapshot!(format!("{:?}", query.before_finished_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00"); + snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)"); + snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)"); + snapshot!(format!("{:?}", query.after_started_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)"); + snapshot!(format!("{:?}", query.before_started_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)"); + snapshot!(format!("{:?}", query.after_finished_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)"); + snapshot!(format!("{:?}", query.before_finished_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)"); } { let params = "afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00"); - snapshot!(format!("{:?}", query.before_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00"); + snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)"); + snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)"); } { let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 -06:20:00"); + snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 -06:20:00)"); } { let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 +00:00:00"); + snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 +00:00:00)"); } { let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.2000003 +00:00:00"); + snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.2000003 +00:00:00)"); + } + { + // Stars are allowed in date fields as well + let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*"; + let query = deserr_query_params::(params).unwrap(); + snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }"); } { let params = "afterFinishedAt=2021"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_after_finished_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" }"###); } { let params = "beforeFinishedAt=2021"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_before_finished_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" }"###); } { let params = "afterEnqueuedAt=2021-12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_after_enqueued_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" }"###); } { let params = "beforeEnqueuedAt=2021-12-03T23"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_before_enqueued_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" }"###); } { let params = "afterStartedAt=2021-12-03T23:45"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_after_started_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-after-started-at" }"###); } { let params = "beforeStartedAt=2021-12-03T23:45"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_before_started_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-before-started-at" }"###); } } @@ -633,22 +591,27 @@ mod tests { { let params = "uids=78,1,12,73"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.uids.unwrap()), @"[78, 1, 12, 73]"); + snapshot!(format!("{:?}", query.uids), @"List([78, 1, 12, 73])"); } { let params = "uids=1"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.uids.unwrap()), @"[1]"); + snapshot!(format!("{:?}", query.uids), @"List([1])"); + } + { + let params = "uids=cat,*,dog"; + let err = deserr_query_params::(params).unwrap_err(); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer", error_code: "invalid_task_uids", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-uids" }"###); } { let params = "uids=78,hello,world"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `uids`: could not parse `hello` as a positive integer"); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer", error_code: "invalid_task_uids", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-uids" }"###); } { let params = "uids=cat"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `uids`: could not parse `cat` as a positive integer"); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `uids`: could not parse `cat` as a positive integer", error_code: "invalid_task_uids", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-uids" }"###); } } @@ -657,17 +620,17 @@ mod tests { { let params = "statuses=succeeded,failed,enqueued,processing,canceled"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.statuses.unwrap()), @"[Succeeded, Failed, Enqueued, Processing, Canceled]"); + snapshot!(format!("{:?}", query.statuses), @"List([Succeeded, Failed, Enqueued, Processing, Canceled])"); } { let params = "statuses=enqueued"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.statuses.unwrap()), @"[Enqueued]"); + snapshot!(format!("{:?}", query.statuses), @"List([Enqueued])"); } { let params = "statuses=finished"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", error_code: "invalid_task_statuses", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-statuses" }"###); } } #[test] @@ -675,17 +638,17 @@ mod tests { { let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.types.unwrap()), @"[DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation]"); + snapshot!(format!("{:?}", query.types), @"List([DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation])"); } { let params = "types=settingsUpdate"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.types.unwrap()), @"[SettingsUpdate]"); + snapshot!(format!("{:?}", query.types), @"List([SettingsUpdate])"); } { let params = "types=createIndex"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", error_code: "invalid_task_types", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-types" }"###); } } #[test] @@ -693,22 +656,22 @@ mod tests { { let params = "indexUids=toto,tata-78"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.index_uids.unwrap()), @r###"[IndexUid("toto"), IndexUid("tata-78")]"###); + snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("toto"), IndexUid("tata-78")])"###); } { let params = "indexUids=index_a"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query.index_uids.unwrap()), @r###"[IndexUid("index_a")]"###); + snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("index_a")])"###); } { let params = "indexUids=1,hé"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", error_code: "invalid_index_uid", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-index-uid" }"###); } { let params = "indexUids=hé"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)."); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", error_code: "invalid_index_uid", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-index-uid" }"###); } } @@ -717,38 +680,53 @@ mod tests { { let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: 15, from: Some(12), uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: Some([Succeeded, Enqueued]), index_uids: Some([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###); + snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###); } { // Stars should translate to `None` in the query // Verify value of the default limit let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: 20, from: None, uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"); + snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"); } { // Stars should also translate to `None` in task deletion/cancelation queries let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3"; let query = deserr_query_params::(params).unwrap(); - snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: Some([1, 2, 3]), canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"); + snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"); } { - // Stars in uids not allowed - let params = "uids=*"; + // Star in from not allowed + let params = "uids=*&from=*"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Invalid value in parameter `uids`: could not parse `*` as a positive integer"); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `from`: could not parse `*` as a positive integer", error_code: "invalid_task_from", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-from" }"###); } { // From not allowed in task deletion/cancelation queries let params = "from=12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`"); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", error_code: "bad_request", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#bad-request" }"###); } { // Limit not allowed in task deletion/cancelation queries let params = "limit=12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err}"), @"Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`"); + snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", error_code: "bad_request", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#bad-request" }"###); + } + } + + #[test] + fn deserialize_task_delete_or_cancel_empty() { + { + let params = ""; + let query = deserr_query_params::(params).unwrap(); + assert!(query.is_empty()); + } + { + let params = "statuses=*"; + let query = deserr_query_params::(params).unwrap(); + assert!(!query.is_empty()); + snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"); } } } diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index bfb0bf160..c199944f1 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -5,8 +5,8 @@ use std::time::Instant; use deserr::DeserializeFromValue; use either::Either; +use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::deserr_codes::*; -use meilisearch_types::error::DeserrJsonError; use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; use meilisearch_types::{milli, Document}; use milli::tokenizer::TokenizerBuilder; @@ -15,7 +15,7 @@ use milli::{ SortError, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET, }; use regex::Regex; -use serde::{Deserialize, Serialize}; +use serde::Serialize; use serde_json::{json, Value}; use crate::error::MeilisearchHttpError; @@ -74,9 +74,8 @@ impl SearchQuery { } } -#[derive(Deserialize, Debug, Clone, PartialEq, Eq, DeserializeFromValue)] +#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)] #[deserr(rename_all = camelCase)] -#[serde(rename_all = "camelCase")] pub enum MatchingStrategy { /// Remove query words from last to first Last, diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index 99f711745..d8a19fff1 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -295,7 +295,7 @@ async fn search_bad_show_matches_position() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Invalid value in parameter `showMatchesPosition`: provided string was not `true` or `false`", + "message": "Invalid value in parameter `showMatchesPosition`: could not parse `doggo` as a boolean, expected either `true` or `false`", "code": "invalid_search_show_matches_position", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position" diff --git a/meilisearch/tests/tasks/errors.rs b/meilisearch/tests/tasks/errors.rs index fd4c6d489..a15c0eca0 100644 --- a/meilisearch/tests/tasks/errors.rs +++ b/meilisearch/tests/tasks/errors.rs @@ -43,7 +43,7 @@ async fn task_bad_uids() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Invalid value in parameter `uids`: could not parse `dogo` as a positive integer", + "message": "Invalid value in parameter `uids[1]`: could not parse `dogo` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-task-uids" From 07b90dec08fac0f5c7938d1e6cd7336b2df2030e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Tue, 17 Jan 2023 09:40:42 +0100 Subject: [PATCH 006/186] Remove unused proptest dependency --- Cargo.lock | 303 +++++++++-------------------- dump/src/reader/v4/errors.rs | 2 - dump/src/reader/v5/errors.rs | 1 - meilisearch-types/Cargo.toml | 5 - meilisearch-types/src/error.rs | 1 - meilisearch-types/src/index_uid.rs | 5 +- 6 files changed, 90 insertions(+), 227 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4bbe05745..9bbaa0525 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -77,8 +77,8 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" dependencies = [ - "quote 1.0.23", - "syn 1.0.107", + "quote", + "syn", ] [[package]] @@ -211,9 +211,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fa9362663c8643d67b2d5eafba49e4cb2c8a053a29ed00a0bea121f17c76b13" dependencies = [ "actix-router", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -324,9 +324,9 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -335,9 +335,9 @@ version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -423,21 +423,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bit-set" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - [[package]] name = "bitflags" version = "1.3.2" @@ -541,9 +526,9 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fe233b960f12f8007e3db2d136e3cb1c291bfd7396e384ee76025fc1a3932b4" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -702,9 +687,9 @@ checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65" dependencies = [ "heck", "proc-macro-error", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -715,9 +700,9 @@ checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014" dependencies = [ "heck", "proc-macro-error", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -744,9 +729,9 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -961,10 +946,10 @@ checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.49", - "quote 1.0.23", + "proc-macro2", + "quote", "strsim", - "syn 1.0.107", + "syn", ] [[package]] @@ -974,8 +959,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e" dependencies = [ "darling_core", - "quote 1.0.23", - "syn 1.0.107", + "quote", + "syn", ] [[package]] @@ -994,9 +979,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f91d4cfa921f1c05904dc3c57b4a32c38aed3340cce209f3a6fd1478babafc4" dependencies = [ "darling", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -1006,7 +991,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68" dependencies = [ "derive_builder_core", - "syn 1.0.107", + "syn", ] [[package]] @@ -1016,10 +1001,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case 0.4.0", - "proc-macro2 1.0.49", - "quote 1.0.23", + "proc-macro2", + "quote", "rustc_version", - "syn 1.0.107", + "syn", ] [[package]] @@ -1047,9 +1032,9 @@ name = "deserr-internal" version = "0.1.4" dependencies = [ "convert_case 0.5.0", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -1239,9 +1224,9 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "828de45d0ca18782232dfb8f3ea9cc428e8ced380eb26a520baaacfc70de39ce" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -1304,9 +1289,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c9bb4a2c13ffb3a93a39902aaf4e7190a1706a4779b6db0449aee433d26c4a" dependencies = [ "darling", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", "uuid 0.8.2", ] @@ -1432,9 +1417,9 @@ version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -1510,9 +1495,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e45727250e75cc04ff2846a66397da8ef2b3db8e40e0cef4df67950a07621eb9" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -2254,9 +2239,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81" dependencies = [ "log", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -2276,9 +2261,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f08150cf2bab1fc47c2196f4f41173a27fcd0f684165e5458c0046b53a472e2f" dependencies = [ "once_cell", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -2424,8 +2409,6 @@ dependencies = [ "meili-snap", "memmap2", "milli", - "proptest", - "proptest-derive", "roaring", "serde", "serde-cs", @@ -2815,9 +2798,9 @@ checksum = "46b53634d8c8196302953c74d5352f33d0c512a9499bd2ce468fc9f4128fa27c" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -2915,9 +2898,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", "version_check", ] @@ -2927,20 +2910,11 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", + "proc-macro2", + "quote", "version_check", ] -[[package]] -name = "proc-macro2" -version = "0.4.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" -dependencies = [ - "unicode-xid 0.1.0", -] - [[package]] name = "proc-macro2" version = "1.0.49" @@ -2980,71 +2954,19 @@ dependencies = [ "thiserror", ] -[[package]] -name = "proptest" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5" -dependencies = [ - "bit-set", - "bitflags", - "byteorder", - "lazy_static", - "num-traits", - "quick-error 2.0.1", - "rand", - "rand_chacha", - "rand_xorshift", - "regex-syntax", - "rusty-fork", - "tempfile", -] - -[[package]] -name = "proptest-derive" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90b46295382dc76166cb7cf2bb4a97952464e4b7ed5a43e6cd34e1fec3349ddc" -dependencies = [ - "proc-macro2 0.4.30", - "quote 0.6.13", - "syn 0.15.44", -] - [[package]] name = "protobuf" version = "2.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - -[[package]] -name = "quote" -version = "0.6.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" -dependencies = [ - "proc-macro2 0.4.30", -] - [[package]] name = "quote" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" dependencies = [ - "proc-macro2 1.0.49", + "proc-macro2", ] [[package]] @@ -3077,15 +2999,6 @@ dependencies = [ "getrandom", ] -[[package]] -name = "rand_xorshift" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" -dependencies = [ - "rand_core", -] - [[package]] name = "rayon" version = "1.6.1" @@ -3300,18 +3213,6 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" -[[package]] -name = "rusty-fork" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" -dependencies = [ - "fnv", - "quick-error 1.2.3", - "tempfile", - "wait-timeout", -] - [[package]] name = "ryu" version = "1.0.12" @@ -3387,9 +3288,9 @@ version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -3584,25 +3485,14 @@ version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" -[[package]] -name = "syn" -version = "0.15.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" -dependencies = [ - "proc-macro2 0.4.30", - "quote 0.6.13", - "unicode-xid 0.1.0", -] - [[package]] name = "syn" version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", + "proc-macro2", + "quote", "unicode-ident", ] @@ -3621,10 +3511,10 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", - "unicode-xid 0.2.4", + "proc-macro2", + "quote", + "syn", + "unicode-xid", ] [[package]] @@ -3706,9 +3596,9 @@ version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -3779,9 +3669,9 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -3910,12 +3800,6 @@ version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" - [[package]] name = "unicode-xid" version = "0.2.4" @@ -3998,15 +3882,6 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" -[[package]] -name = "wait-timeout" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" -dependencies = [ - "libc", -] - [[package]] name = "walkdir" version = "2.3.2" @@ -4053,9 +3928,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", "wasm-bindgen-shared", ] @@ -4077,7 +3952,7 @@ version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" dependencies = [ - "quote 1.0.23", + "quote", "wasm-bindgen-macro-support", ] @@ -4087,9 +3962,9 @@ version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" dependencies = [ - "proc-macro2 1.0.49", - "quote 1.0.23", - "syn 1.0.107", + "proc-macro2", + "quote", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4286,8 +4161,8 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb" dependencies = [ - "proc-macro2 1.0.49", - "syn 1.0.107", + "proc-macro2", + "syn", "synstructure", ] diff --git a/dump/src/reader/v4/errors.rs b/dump/src/reader/v4/errors.rs index 5a9a8d5df..afa640de4 100644 --- a/dump/src/reader/v4/errors.rs +++ b/dump/src/reader/v4/errors.rs @@ -5,10 +5,8 @@ use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] pub struct ResponseError { #[serde(skip)] - #[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))] pub code: StatusCode, pub message: String, #[serde(rename = "code")] diff --git a/dump/src/reader/v5/errors.rs b/dump/src/reader/v5/errors.rs index c918c301c..f4067d4c6 100644 --- a/dump/src/reader/v5/errors.rs +++ b/dump/src/reader/v5/errors.rs @@ -5,7 +5,6 @@ use serde::Deserialize; #[derive(Debug, Deserialize, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] #[cfg_attr(test, derive(serde::Serialize))] pub struct ResponseError { #[serde(skip)] diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index cba3fc5d9..467454b4e 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -17,8 +17,6 @@ flate2 = "1.0.24" fst = "0.4.7" memmap2 = "0.5.7" milli = { path = "/Users/meilisearch/Documents/milli2/milli", default-features = false } -proptest = { version = "1.0.0", optional = true } -proptest-derive = { version = "0.3.0", optional = true } roaring = { version = "0.10.0", features = ["serde"] } serde = { version = "1.0.145", features = ["derive"] } serde-cs = "0.2.4" @@ -33,8 +31,6 @@ uuid = { version = "1.1.2", features = ["serde", "v4"] } [dev-dependencies] insta = "1.19.1" meili-snap = { path = "../meili-snap" } -proptest = "1.0.0" -proptest-derive = "0.3.0" [features] # all specialized tokenizations @@ -48,4 +44,3 @@ hebrew = ["milli/hebrew"] japanese = ["milli/japanese"] # thai specialized tokenization thai = ["milli/thai"] -test-traits = ["proptest", "proptest-derive"] diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 2fb55ee31..21c079678 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -9,7 +9,6 @@ use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] pub struct ResponseError { #[serde(skip)] code: StatusCode, diff --git a/meilisearch-types/src/index_uid.rs b/meilisearch-types/src/index_uid.rs index 30b707665..0fa5ef530 100644 --- a/meilisearch-types/src/index_uid.rs +++ b/meilisearch-types/src/index_uid.rs @@ -9,10 +9,7 @@ use crate::error::{Code, ErrorCode}; /// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] -pub struct IndexUid( - #[cfg_attr(feature = "test-traits", proptest(regex("[a-zA-Z0-9_-]{1,400}")))] String, -); +pub struct IndexUid(String); impl IndexUid { pub fn new_unchecked(s: impl AsRef) -> Self { From b781f9a0f911f471ca06728f401e2439529346c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Tue, 17 Jan 2023 11:05:01 +0100 Subject: [PATCH 007/186] cargo fmt --- meilisearch-types/src/keys.rs | 4 ++-- meilisearch-types/src/lib.rs | 5 ++--- meilisearch-types/src/settings.rs | 2 +- meilisearch-types/src/star_or.rs | 10 +++++++--- meilisearch/src/routes/indexes/mod.rs | 4 ++-- meilisearch/src/routes/indexes/search.rs | 2 +- meilisearch/src/routes/tasks.rs | 4 ++-- 7 files changed, 17 insertions(+), 14 deletions(-) diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index ea941b775..655af9b31 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -11,8 +11,8 @@ use time::{Date, OffsetDateTime, PrimitiveDateTime}; use uuid::Uuid; use crate::deserr::DeserrJsonError; -use crate::error::{deserr_codes::*, ParseOffsetDateTimeError}; -use crate::error::{unwrap_any, Code}; +use crate::error::deserr_codes::*; +use crate::error::{unwrap_any, Code, ParseOffsetDateTimeError}; use crate::index_uid::IndexUid; use crate::star_or::StarOr; diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index de4084388..14602b5aa 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -1,4 +1,5 @@ pub mod compression; +pub mod deserr; pub mod document_formats; pub mod error; pub mod index_uid; @@ -7,12 +8,10 @@ pub mod settings; pub mod star_or; pub mod tasks; pub mod versioning; -pub mod deserr; -pub use milli; pub use milli::{heed, Index}; -pub use serde_cs; use uuid::Uuid; pub use versioning::VERSION_FILE_NAME; +pub use {milli, serde_cs}; pub type Document = serde_json::Map; pub type InstanceUid = Uuid; diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index 8d085d0ff..57899a9d1 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize, Serializer}; use crate::deserr::DeserrJsonError; use crate::error::deserr_codes::*; -use crate::error::{unwrap_any}; +use crate::error::unwrap_any; /// The maximimum number of results that the engine /// will be able to return in one search call. diff --git a/meilisearch-types/src/star_or.rs b/meilisearch-types/src/star_or.rs index e40884925..135f610c4 100644 --- a/meilisearch-types/src/star_or.rs +++ b/meilisearch-types/src/star_or.rs @@ -1,9 +1,13 @@ -use std::{fmt, marker::PhantomData, str::FromStr}; +use std::fmt; +use std::marker::PhantomData; +use std::str::FromStr; use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind}; -use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; +use serde::de::Visitor; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use crate::{deserr::query_params::FromQueryParameter, error::unwrap_any}; +use crate::deserr::query_params::FromQueryParameter; +use crate::error::unwrap_any; /// A type that tries to match either a star (*) or /// any other thing that implements `FromStr`. diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index 9e76f3be6..d19dc4773 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -7,8 +7,8 @@ use index_scheduler::IndexScheduler; use log::debug; use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; -use meilisearch_types::error::ResponseError; -use meilisearch_types::error::{deserr_codes::*, unwrap_any, Code}; +use meilisearch_types::error::deserr_codes::*; +use meilisearch_types::error::{unwrap_any, Code, ResponseError}; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::milli::{self, FieldDistribution, Index}; use meilisearch_types::tasks::KindWithContent; diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 7eabfc2ee..6bf5e3dae 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -3,8 +3,8 @@ use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::IndexScheduler; use log::debug; use meilisearch_auth::IndexSearchRules; -use meilisearch_types::deserr::{DeserrQueryParamError, DeserrJsonError}; use meilisearch_types::deserr::query_params::Param; +use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::error::deserr_codes::*; use meilisearch_types::error::ResponseError; use meilisearch_types::serde_cs::vec::CS; diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index 060c86910..17260cad4 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -4,8 +4,8 @@ use deserr::DeserializeFromValue; use index_scheduler::{IndexScheduler, Query, TaskId}; use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::DeserrQueryParamError; -use meilisearch_types::error::ResponseError; -use meilisearch_types::error::{deserr_codes::*, InvalidTaskDateError}; +use meilisearch_types::error::deserr_codes::*; +use meilisearch_types::error::{InvalidTaskDateError, ResponseError}; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::settings::{Settings, Unchecked}; use meilisearch_types::star_or::{OptionStarOr, OptionStarOrList}; From f073a8638757b56751aefa238619f9724b9b1c35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Tue, 17 Jan 2023 11:28:19 +0100 Subject: [PATCH 008/186] Update deserr to latest version --- milli/Cargo.toml | 2 +- milli/src/update/settings.rs | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/milli/Cargo.toml b/milli/Cargo.toml index 5bbd7a8ff..981468f85 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -12,7 +12,7 @@ byteorder = "1.4.3" charabia = { version = "0.7.0", default-features = false } concat-arrays = "0.1.2" crossbeam-channel = "0.5.6" -deserr = "0.1.4" +deserr = "0.1.5" either = "1.8.0" flatten-serde-json = { path = "../flatten-serde-json" } fst = "0.4.7" diff --git a/milli/src/update/settings.rs b/milli/src/update/settings.rs index f10bfe4e9..4242654cb 100644 --- a/milli/src/update/settings.rs +++ b/milli/src/update/settings.rs @@ -37,9 +37,6 @@ where _ => T::deserialize_from_value(value, location).map(Setting::Set), } } - fn default() -> Option { - Some(Self::NotSet) - } } impl Default for Setting { From e3d30e28efe147432699abbee348a5c88948e0a5 Mon Sep 17 00:00:00 2001 From: curquiza Date: Tue, 17 Jan 2023 10:50:29 +0000 Subject: [PATCH 009/186] Update version for the next release (v0.39.1) in Cargo.toml files --- benchmarks/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- filter-parser/Cargo.toml | 2 +- flatten-serde-json/Cargo.toml | 2 +- json-depth-checker/Cargo.toml | 2 +- milli/Cargo.toml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index 1cb63db4a..9f5b6190e 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "benchmarks" -version = "0.39.0" +version = "0.39.1" edition = "2018" publish = false diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 7ecc3fa33..42fa4353a 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cli" -version = "0.39.0" +version = "0.39.1" edition = "2018" description = "A CLI to interact with a milli index" publish = false diff --git a/filter-parser/Cargo.toml b/filter-parser/Cargo.toml index 9202c3875..38427ccdf 100644 --- a/filter-parser/Cargo.toml +++ b/filter-parser/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "filter-parser" -version = "0.39.0" +version = "0.39.1" edition = "2021" description = "The parser for the Meilisearch filter syntax" publish = false diff --git a/flatten-serde-json/Cargo.toml b/flatten-serde-json/Cargo.toml index 2fb668f86..904baa0b4 100644 --- a/flatten-serde-json/Cargo.toml +++ b/flatten-serde-json/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flatten-serde-json" -version = "0.39.0" +version = "0.39.1" edition = "2021" description = "Flatten serde-json objects like elastic search" readme = "README.md" diff --git a/json-depth-checker/Cargo.toml b/json-depth-checker/Cargo.toml index feb245e5e..5210b8bf4 100644 --- a/json-depth-checker/Cargo.toml +++ b/json-depth-checker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "json-depth-checker" -version = "0.39.0" +version = "0.39.1" edition = "2021" description = "A library that indicates if a JSON must be flattened" publish = false diff --git a/milli/Cargo.toml b/milli/Cargo.toml index e23051b69..b591fb466 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "milli" -version = "0.39.0" +version = "0.39.1" authors = ["Kerollmops "] edition = "2018" From c71a8ea183afa53312c8a91724ae93ff87124667 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Tue, 17 Jan 2023 13:10:38 +0100 Subject: [PATCH 010/186] Update to latest milli and deserr --- Cargo.lock | 47 ++++++++++++------------------------ meilisearch-types/Cargo.toml | 4 +-- meilisearch-types/src/lib.rs | 3 ++- meilisearch/Cargo.toml | 2 +- 4 files changed, 21 insertions(+), 35 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9bbaa0525..5c41a3406 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1009,39 +1009,20 @@ dependencies = [ [[package]] name = "deserr" -version = "0.1.4" -dependencies = [ - "deserr-internal 0.1.4", - "serde-cs", - "serde_json", -] - -[[package]] -name = "deserr" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86290491a2b5c21a1a5083da8dae831006761258fabd5617309c3eebc5f89468" +checksum = "5d3c6417f0bf7561774690e3d47f9659b0cbc3614c7af7bfda404fda7a2c11d3" dependencies = [ - "deserr-internal 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "deserr-internal", "serde-cs", "serde_json", ] [[package]] name = "deserr-internal" -version = "0.1.4" -dependencies = [ - "convert_case 0.5.0", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "deserr-internal" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7131de1c27581bc376a22166c9f570be91b76cb096be2f6aecf224c27bf7c49a" +checksum = "196415cbd3b782cddecbdd69da18cd9b19e1bb0bdbb649e87b5afd83fa8d322b" dependencies = [ "convert_case 0.5.0", "proc-macro2", @@ -1319,7 +1300,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.38.0" +version = "0.39.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.1#0c7d1f761e5db6d086f27d3f0f47a97c7f4a5f08" dependencies = [ "nom", "nom_locate", @@ -1337,7 +1319,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.38.0" +version = "0.39.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.1#0c7d1f761e5db6d086f27d3f0f47a97c7f4a5f08" dependencies = [ "serde_json", ] @@ -1901,7 +1884,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.38.0" +version = "0.39.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.1#0c7d1f761e5db6d086f27d3f0f47a97c7f4a5f08" dependencies = [ "serde_json", ] @@ -2308,7 +2292,7 @@ dependencies = [ "cargo_toml", "clap 4.0.32", "crossbeam-channel", - "deserr 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "deserr", "dump", "either", "env_logger", @@ -2399,7 +2383,7 @@ dependencies = [ "anyhow", "convert_case 0.6.0", "csv", - "deserr 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "deserr", "either", "enum-iterator", "file-store", @@ -2447,7 +2431,8 @@ dependencies = [ [[package]] name = "milli" -version = "0.38.0" +version = "0.39.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.1#0c7d1f761e5db6d086f27d3f0f47a97c7f4a5f08" dependencies = [ "bimap", "bincode", @@ -2457,7 +2442,7 @@ dependencies = [ "concat-arrays", "crossbeam-channel", "csv", - "deserr 0.1.4", + "deserr", "either", "filter-parser", "flatten-serde-json", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 467454b4e..8d7f673d9 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -9,14 +9,14 @@ actix-web = { version = "4.2.1", default-features = false } anyhow = "1.0.65" convert_case = "0.6.0" csv = "1.1.6" -deserr = "0.1.4" +deserr = "0.1.5" either = { version = "1.6.1", features = ["serde"] } enum-iterator = "1.1.3" file-store = { path = "../file-store" } flate2 = "1.0.24" fst = "0.4.7" memmap2 = "0.5.7" -milli = { path = "/Users/meilisearch/Documents/milli2/milli", default-features = false } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.39.1", default-features = false } roaring = { version = "0.10.0", features = ["serde"] } serde = { version = "1.0.145", features = ["derive"] } serde-cs = "0.2.4" diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index 14602b5aa..1e2985379 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -8,10 +8,11 @@ pub mod settings; pub mod star_or; pub mod tasks; pub mod versioning; +pub use milli; pub use milli::{heed, Index}; +pub use serde_cs; use uuid::Uuid; pub use versioning::VERSION_FILE_NAME; -pub use {milli, serde_cs}; pub type Document = serde_json::Map; pub type InstanceUid = Uuid; diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index be852c02e..9a0c9bd0b 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", " bytes = "1.2.1" clap = { version = "4.0.9", features = ["derive", "env"] } crossbeam-channel = "0.5.6" -deserr = "0.1.4" +deserr = "0.1.5" dump = { path = "../dump" } either = "1.8.0" env_logger = "0.9.1" From 56e79fa850e8141c01856e91d2f99aadee7ed1fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Tue, 17 Jan 2023 13:17:47 +0100 Subject: [PATCH 011/186] Update task snapshot test and clean up details --- meilisearch-types/src/index_uid.rs | 4 +- meilisearch-types/src/lib.rs | 3 +- meilisearch/src/routes/mod.rs | 8 - meilisearch/src/routes/tasks.rs | 156 ++++++++++++++++--- meilisearch/tests/documents/add_documents.rs | 4 +- 5 files changed, 141 insertions(+), 34 deletions(-) diff --git a/meilisearch-types/src/index_uid.rs b/meilisearch-types/src/index_uid.rs index 0fa5ef530..1a5102355 100644 --- a/meilisearch-types/src/index_uid.rs +++ b/meilisearch-types/src/index_uid.rs @@ -2,13 +2,11 @@ use std::error::Error; use std::fmt; use std::str::FromStr; -use serde::{Deserialize, Serialize}; - use crate::error::{Code, ErrorCode}; /// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct IndexUid(String); impl IndexUid { diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index 1e2985379..14602b5aa 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -8,11 +8,10 @@ pub mod settings; pub mod star_or; pub mod tasks; pub mod versioning; -pub use milli; pub use milli::{heed, Index}; -pub use serde_cs; use uuid::Uuid; pub use versioning::VERSION_FILE_NAME; +pub use {milli, serde_cs}; pub type Document = serde_json::Map; pub type InstanceUid = Uuid; diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index 52ad92c23..9ef036554 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -1,5 +1,4 @@ use std::collections::BTreeMap; -use std::str::FromStr; use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; @@ -34,13 +33,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .service(web::scope("/swap-indexes").configure(swap_indexes::configure)); } -pub fn from_string_to_option(input: &str) -> Result, E> -where - T: FromStr, -{ - Ok(Some(input.parse()?)) -} - const PAGINATION_DEFAULT_LIMIT: usize = 20; #[derive(Debug, Serialize)] diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index 17260cad4..eb5cadf2d 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -501,15 +501,21 @@ mod tests { use deserr::DeserializeFromValue; use meili_snap::snapshot; use meilisearch_types::deserr::DeserrQueryParamError; + use meilisearch_types::error::{Code, ResponseError}; - use crate::extractors::query_parameters::QueryParameter; use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery}; - fn deserr_query_params(j: &str) -> Result + fn deserr_query_params(j: &str) -> Result where T: DeserializeFromValue, { - QueryParameter::::from_query(j).map(|p| p.0) + let value = serde_urlencoded::from_str::(j) + .map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?; + + match deserr::deserialize::<_, _, DeserrQueryParamError>(value) { + Ok(data) => Ok(data), + Err(e) => Err(ResponseError::from(e)), + } } #[test] @@ -556,33 +562,75 @@ mod tests { { let params = "afterFinishedAt=2021"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_after_finished_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_after_finished_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" + } + "###); } { let params = "beforeFinishedAt=2021"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_before_finished_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_before_finished_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" + } + "###); } { let params = "afterEnqueuedAt=2021-12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_after_enqueued_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_after_enqueued_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" + } + "###); } { let params = "beforeEnqueuedAt=2021-12-03T23"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_before_enqueued_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_before_enqueued_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" + } + "###); } { let params = "afterStartedAt=2021-12-03T23:45"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_after_started_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-after-started-at" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_after_started_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at" + } + "###); } { let params = "beforeStartedAt=2021-12-03T23:45"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", error_code: "invalid_task_before_started_at", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-before-started-at" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_before_started_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" + } + "###); } } @@ -601,17 +649,38 @@ mod tests { { let params = "uids=cat,*,dog"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer", error_code: "invalid_task_uids", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-uids" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer", + "code": "invalid_task_uids", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + } + "###); } { let params = "uids=78,hello,world"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer", error_code: "invalid_task_uids", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-uids" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer", + "code": "invalid_task_uids", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + } + "###); } { let params = "uids=cat"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `uids`: could not parse `cat` as a positive integer", error_code: "invalid_task_uids", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-uids" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer", + "code": "invalid_task_uids", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + } + "###); } } @@ -630,7 +699,14 @@ mod tests { { let params = "statuses=finished"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", error_code: "invalid_task_statuses", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-statuses" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", + "code": "invalid_task_statuses", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-statuses" + } + "###); } } #[test] @@ -648,7 +724,14 @@ mod tests { { let params = "types=createIndex"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", error_code: "invalid_task_types", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-types" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", + "code": "invalid_task_types", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-types" + } + "###); } } #[test] @@ -666,12 +749,26 @@ mod tests { { let params = "indexUids=1,hé"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", error_code: "invalid_index_uid", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-index-uid" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + } + "###); } { let params = "indexUids=hé"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", error_code: "invalid_index_uid", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-index-uid" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + } + "###); } } @@ -699,19 +796,40 @@ mod tests { // Star in from not allowed let params = "uids=*&from=*"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Invalid value in parameter `from`: could not parse `*` as a positive integer", error_code: "invalid_task_from", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-task-from" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Invalid value in parameter `from`: could not parse `*` as a positive integer", + "code": "invalid_task_from", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-task-from" + } + "###); } { // From not allowed in task deletion/cancelation queries let params = "from=12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", error_code: "bad_request", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#bad-request" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad-request" + } + "###); } { // Limit not allowed in task deletion/cancelation queries let params = "limit=12"; let err = deserr_query_params::(params).unwrap_err(); - snapshot!(format!("{err:?}"), @r###"ResponseError { code: 400, message: "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", error_code: "bad_request", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#bad-request" }"###); + snapshot!(meili_snap::json_string!(err), @r###" + { + "message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad-request" + } + "###); } } diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index c27b899c6..4af365a7e 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -926,7 +926,7 @@ async fn error_primary_key_inference() { "indexedDocuments": 1 }, "error": { - "message": "The primary key inference process failed because the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", + "message": "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", "code": "index_primary_key_no_candidate_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index-primary-key-no-candidate-found" @@ -966,7 +966,7 @@ async fn error_primary_key_inference() { "indexedDocuments": 1 }, "error": { - "message": "The primary key inference process failed because the engine found 3 fields ending with `id` in their name, such as 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.", + "message": "The primary key inference failed as the engine found 3 fields ending with `id` in their names: 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.", "code": "index_primary_key_multiple_candidates_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index-primary-key-multiple-candidates-found" From e2256083376ccca383dc3a8f8403635e363185bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Tue, 17 Jan 2023 13:51:07 +0100 Subject: [PATCH 012/186] Use invalid_index_uid error code in more places --- meilisearch-types/src/index_uid.rs | 5 ++- meilisearch/src/error.rs | 4 +- meilisearch/src/routes/indexes/documents.rs | 45 +++++++++++++-------- meilisearch/src/routes/indexes/mod.rs | 22 +++++----- meilisearch/src/routes/indexes/search.rs | 5 +++ meilisearch/src/routes/indexes/settings.rs | 20 +++++++-- meilisearch/src/routes/swap_indexes.rs | 5 ++- meilisearch/tests/index/create_index.rs | 19 ++++----- meilisearch/tests/index/get_index.rs | 21 +++++----- 9 files changed, 91 insertions(+), 55 deletions(-) diff --git a/meilisearch-types/src/index_uid.rs b/meilisearch-types/src/index_uid.rs index 1a5102355..2f3f6e5df 100644 --- a/meilisearch-types/src/index_uid.rs +++ b/meilisearch-types/src/index_uid.rs @@ -2,11 +2,14 @@ use std::error::Error; use std::fmt; use std::str::FromStr; +use deserr::DeserializeFromValue; + use crate::error::{Code, ErrorCode}; /// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)] +#[deserr(from(String) = IndexUid::try_from -> IndexUidFormatError)] pub struct IndexUid(String); impl IndexUid { diff --git a/meilisearch/src/error.rs b/meilisearch/src/error.rs index 23a101080..9c77f4d3e 100644 --- a/meilisearch/src/error.rs +++ b/meilisearch/src/error.rs @@ -2,7 +2,7 @@ use actix_web as aweb; use aweb::error::{JsonPayloadError, QueryPayloadError}; use meilisearch_types::document_formats::{DocumentFormatError, PayloadType}; use meilisearch_types::error::{Code, ErrorCode, ResponseError}; -use meilisearch_types::index_uid::IndexUidFormatError; +use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError}; use serde_json::Value; use tokio::task::JoinError; @@ -27,7 +27,7 @@ pub enum MeilisearchHttpError { #[error("Two indexes must be given for each swap. The list `{:?}` contains {} indexes.", .0, .0.len() )] - SwapIndexPayloadWrongLength(Vec), + SwapIndexPayloadWrongLength(Vec), #[error(transparent)] IndexUid(#[from] IndexUidFormatError), #[error(transparent)] diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index 3316ee10b..2b36ba834 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -89,14 +89,17 @@ pub struct GetDocument { pub async fn get_document( index_scheduler: GuardedData, Data>, - path: web::Path, + document_param: web::Path, params: QueryParameter, ) -> Result { + let DocumentParam { index_uid, document_id } = document_param.into_inner(); + let index_uid = IndexUid::try_from(index_uid)?; + let GetDocument { fields } = params.into_inner(); let attributes_to_retrieve = fields.merge_star_and_none(); - let index = index_scheduler.index(&path.index_uid)?; - let document = retrieve_document(&index, &path.document_id, attributes_to_retrieve)?; + let index = index_scheduler.index(&index_uid)?; + let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?; debug!("returns: {:?}", document); Ok(HttpResponse::Ok().json(document)) } @@ -107,10 +110,15 @@ pub async fn delete_document( req: HttpRequest, analytics: web::Data, ) -> Result { + let DocumentParam { index_uid, document_id } = path.into_inner(); + let index_uid = IndexUid::try_from(index_uid)?; + analytics.delete_documents(DocumentDeletionKind::PerDocumentId, &req); - let DocumentParam { document_id, index_uid } = path.into_inner(); - let task = KindWithContent::DocumentDeletion { index_uid, documents_ids: vec![document_id] }; + let task = KindWithContent::DocumentDeletion { + index_uid: index_uid.to_string(), + documents_ids: vec![document_id], + }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); debug!("returns: {:?}", task); @@ -133,6 +141,7 @@ pub async fn get_all_documents( index_uid: web::Path, params: QueryParameter, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; debug!("called with params: {:?}", params); let BrowseQuery { limit, offset, fields } = params.into_inner(); let attributes_to_retrieve = fields.merge_star_and_none(); @@ -161,6 +170,8 @@ pub async fn add_documents( req: HttpRequest, analytics: web::Data, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + debug!("called with params: {:?}", params); let params = params.into_inner(); @@ -170,7 +181,7 @@ pub async fn add_documents( let task = document_addition( extract_mime_type(&req)?, index_scheduler, - index_uid.into_inner(), + index_uid, params.primary_key, body, IndexDocumentsMethod::ReplaceDocuments, @@ -183,14 +194,15 @@ pub async fn add_documents( pub async fn update_documents( index_scheduler: GuardedData, Data>, - path: web::Path, + index_uid: web::Path, params: QueryParameter, body: Payload, req: HttpRequest, analytics: web::Data, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + debug!("called with params: {:?}", params); - let index_uid = path.into_inner(); analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req); @@ -212,7 +224,7 @@ pub async fn update_documents( async fn document_addition( mime_type: Option, index_scheduler: GuardedData, Data>, - index_uid: String, + index_uid: IndexUid, primary_key: Option, mut body: Payload, method: IndexDocumentsMethod, @@ -233,9 +245,6 @@ async fn document_addition( } }; - // is your indexUid valid? - let index_uid = IndexUid::try_from(index_uid)?.into_inner(); - let (uuid, mut update_file) = index_scheduler.create_update_file()?; let temp_file = match tempfile() { @@ -311,7 +320,7 @@ async fn document_addition( documents_count, primary_key, allow_index_creation, - index_uid, + index_uid: index_uid.to_string(), }; let scheduler = index_scheduler.clone(); @@ -329,12 +338,13 @@ async fn document_addition( pub async fn delete_documents( index_scheduler: GuardedData, Data>, - path: web::Path, + index_uid: web::Path, body: web::Json>, req: HttpRequest, analytics: web::Data, ) -> Result { debug!("called with params: {:?}", body); + let index_uid = IndexUid::try_from(index_uid.into_inner())?; analytics.delete_documents(DocumentDeletionKind::PerBatch, &req); @@ -344,7 +354,7 @@ pub async fn delete_documents( .collect(); let task = - KindWithContent::DocumentDeletion { index_uid: path.into_inner(), documents_ids: ids }; + KindWithContent::DocumentDeletion { index_uid: index_uid.to_string(), documents_ids: ids }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); @@ -354,13 +364,14 @@ pub async fn delete_documents( pub async fn clear_all_documents( index_scheduler: GuardedData, Data>, - path: web::Path, + index_uid: web::Path, req: HttpRequest, analytics: web::Data, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; analytics.delete_documents(DocumentDeletionKind::ClearAll, &req); - let task = KindWithContent::DocumentClear { index_uid: path.into_inner() }; + let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index d19dc4773..d2a842fe3 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -12,7 +12,7 @@ use meilisearch_types::error::{unwrap_any, Code, ResponseError}; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::milli::{self, FieldDistribution, Index}; use meilisearch_types::tasks::KindWithContent; -use serde::{Deserialize, Serialize}; +use serde::Serialize; use serde_json::json; use time::OffsetDateTime; @@ -49,7 +49,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { ); } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct IndexView { pub uid: String, @@ -108,8 +108,8 @@ pub async fn list_indexes( #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct IndexCreateRequest { #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_index_uid)] - uid: String, - #[deserr(error = DeserrJsonError)] + uid: IndexUid, + #[deserr(default, error = DeserrJsonError)] primary_key: Option, } @@ -120,7 +120,6 @@ pub async fn create_index( analytics: web::Data, ) -> Result { let IndexCreateRequest { primary_key, uid } = body.into_inner(); - let uid = IndexUid::try_from(uid)?.into_inner(); let allow_index_creation = index_scheduler.filters().search_rules.is_index_authorized(&uid); if allow_index_creation { @@ -130,7 +129,7 @@ pub async fn create_index( Some(&req), ); - let task = KindWithContent::IndexCreation { index_uid: uid, primary_key }; + let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); @@ -162,7 +161,7 @@ fn deny_immutable_fields_index( #[derive(DeserializeFromValue, Debug)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)] pub struct UpdateIndexRequest { - #[deserr(error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] primary_key: Option, } @@ -170,6 +169,8 @@ pub async fn get_index( index_scheduler: GuardedData, Data>, index_uid: web::Path, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let index = index_scheduler.index(&index_uid)?; let index_view = IndexView::new(index_uid.into_inner(), &index)?; @@ -180,12 +181,13 @@ pub async fn get_index( pub async fn update_index( index_scheduler: GuardedData, Data>, - path: web::Path, + index_uid: web::Path, body: ValidatedJson, req: HttpRequest, analytics: web::Data, ) -> Result { debug!("called with params: {:?}", body); + let index_uid = IndexUid::try_from(index_uid.into_inner())?; let body = body.into_inner(); analytics.publish( "Index Updated".to_string(), @@ -194,7 +196,7 @@ pub async fn update_index( ); let task = KindWithContent::IndexUpdate { - index_uid: path.into_inner(), + index_uid: index_uid.into_inner(), primary_key: body.primary_key, }; @@ -209,6 +211,7 @@ pub async fn delete_index( index_scheduler: GuardedData, Data>, index_uid: web::Path, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); @@ -222,6 +225,7 @@ pub async fn get_index_stats( req: HttpRequest, analytics: web::Data, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": true }), Some(&req)); let stats = IndexStats::new((*index_scheduler).clone(), index_uid.into_inner())?; diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 6bf5e3dae..545c69ec5 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -7,6 +7,7 @@ use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::error::deserr_codes::*; use meilisearch_types::error::ResponseError; +use meilisearch_types::index_uid::IndexUid; use meilisearch_types::serde_cs::vec::CS; use serde_json::Value; @@ -154,6 +155,8 @@ pub async fn search_with_url_query( analytics: web::Data, ) -> Result { debug!("called with params: {:?}", params); + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let mut query: SearchQuery = params.into_inner().into(); // Tenant token search_rules. @@ -185,6 +188,8 @@ pub async fn search_with_post( req: HttpRequest, analytics: web::Data, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let mut query = params.into_inner(); debug!("search called with params: {:?}", query); diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 91c3473fa..0c864cc73 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -41,12 +41,14 @@ macro_rules! make_setting_route { >, index_uid: web::Path, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() }; let allow_index_creation = index_scheduler.filters().allow_index_creation; - let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner(); + let task = KindWithContent::SettingsUpdate { - index_uid, + index_uid: index_uid.to_string(), new_settings: Box::new(new_settings), is_deletion: true, allow_index_creation, @@ -70,6 +72,8 @@ macro_rules! make_setting_route { req: HttpRequest, $analytics_var: web::Data, ) -> std::result::Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let body = body.into_inner(); $analytics(&body, &req); @@ -83,9 +87,9 @@ macro_rules! make_setting_route { }; let allow_index_creation = index_scheduler.filters().allow_index_creation; - let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner(); + let task = KindWithContent::SettingsUpdate { - index_uid, + index_uid: index_uid.to_string(), new_settings: Box::new(new_settings), is_deletion: false, allow_index_creation, @@ -106,6 +110,8 @@ macro_rules! make_setting_route { >, index_uid: actix_web::web::Path, ) -> std::result::Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let index = index_scheduler.index(&index_uid)?; let rtxn = index.read_txn()?; let settings = settings(&index, &rtxn)?; @@ -466,6 +472,8 @@ pub async fn update_all( req: HttpRequest, analytics: web::Data, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let new_settings = body.into_inner(); analytics.publish( @@ -571,6 +579,8 @@ pub async fn get_all( index_scheduler: GuardedData, Data>, index_uid: web::Path, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let index = index_scheduler.index(&index_uid)?; let rtxn = index.read_txn()?; let new_settings = settings(&index, &rtxn)?; @@ -582,6 +592,8 @@ pub async fn delete_all( index_scheduler: GuardedData, Data>, index_uid: web::Path, ) -> Result { + let index_uid = IndexUid::try_from(index_uid.into_inner())?; + let new_settings = Settings::cleared().into_unchecked(); let allow_index_creation = index_scheduler.filters().allow_index_creation; diff --git a/meilisearch/src/routes/swap_indexes.rs b/meilisearch/src/routes/swap_indexes.rs index 9adbfecdd..4a7802f2e 100644 --- a/meilisearch/src/routes/swap_indexes.rs +++ b/meilisearch/src/routes/swap_indexes.rs @@ -5,6 +5,7 @@ use index_scheduler::IndexScheduler; use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::deserr_codes::InvalidSwapIndexes; use meilisearch_types::error::ResponseError; +use meilisearch_types::index_uid::IndexUid; use meilisearch_types::tasks::{IndexSwap, KindWithContent}; use serde_json::json; @@ -24,7 +25,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct SwapIndexesPayload { #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_swap_indexes)] - indexes: Vec, + indexes: Vec, } pub async fn swap_indexes( @@ -55,7 +56,7 @@ pub async fn swap_indexes( if !search_rules.is_index_authorized(lhs) || !search_rules.is_index_authorized(rhs) { return Err(AuthenticationError::InvalidToken.into()); } - swaps.push(IndexSwap { indexes: (lhs.clone(), rhs.clone()) }); + swaps.push(IndexSwap { indexes: (lhs.to_string(), rhs.to_string()) }); } let task = KindWithContent::IndexSwap { swaps }; diff --git a/meilisearch/tests/index/create_index.rs b/meilisearch/tests/index/create_index.rs index 6c5adb5c6..884a0b069 100644 --- a/meilisearch/tests/index/create_index.rs +++ b/meilisearch/tests/index/create_index.rs @@ -1,6 +1,7 @@ use actix_web::http::header::ContentType; use actix_web::test; use http::header::ACCEPT_ENCODING; +use meili_snap::{json_string, snapshot}; use serde_json::{json, Value}; use crate::common::encoder::Encoder; @@ -188,13 +189,13 @@ async fn error_create_with_invalid_index_uid() { let index = server.index("test test#!"); let (response, code) = index.create(None).await; - let expected_response = json!({ - "message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", - "code": "invalid_index_uid", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" - }); - - assert_eq!(response, expected_response); - assert_eq!(code, 400); + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value at `.uid`: `test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + } + "###); } diff --git a/meilisearch/tests/index/get_index.rs b/meilisearch/tests/index/get_index.rs index 7bd8a0184..6e70484f6 100644 --- a/meilisearch/tests/index/get_index.rs +++ b/meilisearch/tests/index/get_index.rs @@ -1,3 +1,4 @@ +use meili_snap::{json_string, snapshot}; use serde_json::{json, Value}; use crate::common::Server; @@ -182,15 +183,13 @@ async fn get_invalid_index_uid() { let index = server.index("this is not a valid index name"); let (response, code) = index.get().await; - assert_eq!(code, 404); - assert_eq!( - response, - json!( - { - "message": "Index `this is not a valid index name` not found.", - "code": "index_not_found", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" - }) - ); + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "`this is not a valid index name` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + } + "###); } From b6ec1f1c6dbace826e329a5fe5fbf1aa717c420e Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 12 Jan 2023 18:56:27 +0100 Subject: [PATCH 013/186] add functionnal + error tests on the swap_indexes route --- meilisearch/tests/integration.rs | 1 + meilisearch/tests/swap_indexes/errors.rs | 78 +++++ meilisearch/tests/swap_indexes/mod.rs | 357 +++++++++++++++++++++++ 3 files changed, 436 insertions(+) create mode 100644 meilisearch/tests/swap_indexes/errors.rs create mode 100644 meilisearch/tests/swap_indexes/mod.rs diff --git a/meilisearch/tests/integration.rs b/meilisearch/tests/integration.rs index 25b4e49b6..4383aea57 100644 --- a/meilisearch/tests/integration.rs +++ b/meilisearch/tests/integration.rs @@ -8,6 +8,7 @@ mod search; mod settings; mod snapshot; mod stats; +mod swap_indexes; mod tasks; // Tests are isolated by features in different modules to allow better readability, test diff --git a/meilisearch/tests/swap_indexes/errors.rs b/meilisearch/tests/swap_indexes/errors.rs new file mode 100644 index 000000000..066e586ce --- /dev/null +++ b/meilisearch/tests/swap_indexes/errors.rs @@ -0,0 +1,78 @@ +use meili_snap::*; +use serde_json::json; + +use crate::common::Server; + +#[actix_rt::test] +async fn swap_indexes_bad_format() { + let server = Server::new().await; + + let (response, code) = server.index_swap(json!("doggo")).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad-request" + } + "###); + let (response, code) = server.index_swap(json!(["doggo"])).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "invalid type: String `\"doggo\"`, expected a Map at `[0]`.", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad-request" + } + "###); +} + +#[actix_rt::test] +async fn swap_indexes_bad_indexes() { + let server = Server::new().await; + + let (response, code) = server.index_swap(json!([{ "indexes": "doggo"}])).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "invalid type: String `\"doggo\"`, expected a Sequence at `[0].indexes`.", + "code": "invalid_swap_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" + } + "###); + let (response, code) = server.index_swap(json!([{ "indexes": ["doggo"]}])).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Two indexes must be given for each swap. The list `[\"doggo\"]` contains 1 indexes.", + "code": "invalid_swap_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" + } + "###); + let (response, code) = server.index_swap(json!([{ "indexes": ["doggo", "doggo"]}])).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Indexes must be declared only once during a swap. `doggo` was specified several times.", + "code": "invalid_swap_duplicate_index_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-swap-duplicate-index-found" + } + "###); + let (response, code) = server + .index_swap(json!([{ "indexes": ["doggo", "catto"]}, { "indexes": ["girafo", "doggo"]}])) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Indexes must be declared only once during a swap. `doggo` was specified several times.", + "code": "invalid_swap_duplicate_index_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-swap-duplicate-index-found" + } + "###); +} diff --git a/meilisearch/tests/swap_indexes/mod.rs b/meilisearch/tests/swap_indexes/mod.rs new file mode 100644 index 000000000..78f1de92e --- /dev/null +++ b/meilisearch/tests/swap_indexes/mod.rs @@ -0,0 +1,357 @@ +mod errors; + +use meili_snap::{json_string, snapshot}; +use serde_json::json; + +use crate::common::{GetAllDocumentsOptions, Server}; + +#[actix_rt::test] +async fn swap_indexes() { + let server = Server::new().await; + let a = server.index("a"); + let (_, code) = a.add_documents(json!({ "id": 1, "index": "a"}), None).await; + snapshot!(code, @"202 Accepted"); + let b = server.index("b"); + let (res, code) = b.add_documents(json!({ "id": 1, "index": "b"}), None).await; + snapshot!(code, @"202 Accepted"); + snapshot!(res["taskUid"], @"1"); + server.wait_task(1).await; + + let (tasks, code) = server.tasks().await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###" + { + "results": [ + { + "uid": 1, + "indexUid": "b", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 0, + "indexUid": "a", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + ], + "limit": 20, + "from": 1, + "next": null + } + "###); + + let (res, code) = server.index_swap(json!([{ "indexes": ["a", "b"] }])).await; + snapshot!(code, @"202 Accepted"); + snapshot!(res["taskUid"], @"2"); + server.wait_task(2).await; + + let (tasks, code) = server.tasks().await; + snapshot!(code, @"200 OK"); + + // Notice how the task 0 which was initially representing the creation of the index `A` now represents the creation of the index `B`. + snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###" + { + "results": [ + { + "uid": 2, + "indexUid": null, + "status": "succeeded", + "type": "indexSwap", + "canceledBy": null, + "details": { + "swaps": [ + { + "indexes": [ + "a", + "b" + ] + } + ] + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 1, + "indexUid": "a", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 0, + "indexUid": "b", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + ], + "limit": 20, + "from": 2, + "next": null + } + "###); + + // BUT, the data in `a` should now points to the data that was in `b`. + // And the opposite is true as well + let (res, _) = a.get_all_documents(GetAllDocumentsOptions::default()).await; + snapshot!(res["results"], @r###"[{"id":1,"index":"b"}]"###); + let (res, _) = b.get_all_documents(GetAllDocumentsOptions::default()).await; + snapshot!(res["results"], @r###"[{"id":1,"index":"a"}]"###); + + // ================ + // And now we're going to attempt the famous and dangerous DOUBLE index swap 🤞 + + let c = server.index("c"); + let (res, code) = c.add_documents(json!({ "id": 1, "index": "c"}), None).await; + snapshot!(code, @"202 Accepted"); + snapshot!(res["taskUid"], @"3"); + let d = server.index("d"); + let (res, code) = d.add_documents(json!({ "id": 1, "index": "d"}), None).await; + snapshot!(code, @"202 Accepted"); + snapshot!(res["taskUid"], @"4"); + server.wait_task(4).await; + + // ensure the index creation worked properly + let (tasks, code) = server.tasks_filter(json!({ "limit": 2 })).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###" + { + "results": [ + { + "uid": 4, + "indexUid": "d", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 3, + "indexUid": "c", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + ], + "limit": 2, + "from": 4, + "next": 2 + } + "###); + + // It's happening 😲 + + let (res, code) = + server.index_swap(json!([{ "indexes": ["a", "b"] }, { "indexes": ["c", "d"] } ])).await; + snapshot!(res["taskUid"], @"5"); + snapshot!(code, @"202 Accepted"); + server.wait_task(5).await; + + // ensure the index creation worked properly + let (tasks, code) = server.tasks().await; + snapshot!(code, @"200 OK"); + + // What should we check for each tasks in this test: + // Task number; + // 0. should have the indexUid `a` again + // 1. should have the indexUid `b` again + // 2. stays unchanged + // 3. now have the indexUid `d` instead of `c` + // 4. now have the indexUid `c` instead of `d` + snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###" + { + "results": [ + { + "uid": 5, + "indexUid": null, + "status": "succeeded", + "type": "indexSwap", + "canceledBy": null, + "details": { + "swaps": [ + { + "indexes": [ + "a", + "b" + ] + }, + { + "indexes": [ + "c", + "d" + ] + } + ] + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 4, + "indexUid": "c", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 3, + "indexUid": "d", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 2, + "indexUid": null, + "status": "succeeded", + "type": "indexSwap", + "canceledBy": null, + "details": { + "swaps": [ + { + "indexes": [ + "b", + "a" + ] + } + ] + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 1, + "indexUid": "b", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + }, + { + "uid": 0, + "indexUid": "a", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + ], + "limit": 20, + "from": 5, + "next": null + } + "###); + + // - The data in `a` should point to `a`. + // - The data in `b` should point to `b`. + // - The data in `c` should point to `d`. + // - The data in `d` should point to `c`. + let (res, _) = a.get_all_documents(GetAllDocumentsOptions::default()).await; + snapshot!(res["results"], @r###"[{"id":1,"index":"a"}]"###); + let (res, _) = b.get_all_documents(GetAllDocumentsOptions::default()).await; + snapshot!(res["results"], @r###"[{"id":1,"index":"b"}]"###); + let (res, _) = c.get_all_documents(GetAllDocumentsOptions::default()).await; + snapshot!(res["results"], @r###"[{"id":1,"index":"d"}]"###); + let (res, _) = d.get_all_documents(GetAllDocumentsOptions::default()).await; + snapshot!(res["results"], @r###"[{"id":1,"index":"c"}]"###); +} From 82bdb545377d3f77ed78808ecac1e571fe50415a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Wed, 18 Jan 2023 09:40:41 +0100 Subject: [PATCH 014/186] Update the index swap tests after git rebase --- meilisearch/tests/swap_indexes/errors.rs | 8 ++++---- meilisearch/tests/swap_indexes/mod.rs | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/meilisearch/tests/swap_indexes/errors.rs b/meilisearch/tests/swap_indexes/errors.rs index 066e586ce..a2697a37d 100644 --- a/meilisearch/tests/swap_indexes/errors.rs +++ b/meilisearch/tests/swap_indexes/errors.rs @@ -11,7 +11,7 @@ async fn swap_indexes_bad_format() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at ``.", + "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" @@ -21,7 +21,7 @@ async fn swap_indexes_bad_format() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Map at `[0]`.", + "message": "Invalid value type at `[0]`: expected an object, but found a string: `\"doggo\"`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad-request" @@ -37,7 +37,7 @@ async fn swap_indexes_bad_indexes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "invalid type: String `\"doggo\"`, expected a Sequence at `[0].indexes`.", + "message": "Invalid value type at `[0].indexes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_swap_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" @@ -47,7 +47,7 @@ async fn swap_indexes_bad_indexes() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Two indexes must be given for each swap. The list `[\"doggo\"]` contains 1 indexes.", + "message": "Two indexes must be given for each swap. The list `[IndexUid(\"doggo\")]` contains 1 indexes.", "code": "invalid_swap_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" diff --git a/meilisearch/tests/swap_indexes/mod.rs b/meilisearch/tests/swap_indexes/mod.rs index 78f1de92e..42d92e2af 100644 --- a/meilisearch/tests/swap_indexes/mod.rs +++ b/meilisearch/tests/swap_indexes/mod.rs @@ -155,7 +155,7 @@ async fn swap_indexes() { server.wait_task(4).await; // ensure the index creation worked properly - let (tasks, code) = server.tasks_filter(json!({ "limit": 2 })).await; + let (tasks, code) = server.tasks_filter("limit=2").await; snapshot!(code, @"200 OK"); snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###" { From 0f727d079b0d21a6e00c2e4279de131a178a31cd Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 18 Jan 2023 12:28:46 +0100 Subject: [PATCH 015/186] fix the wrong error code on minWordSizeForTypos --- dump/src/reader/compat/v5_to_v6.rs | 2 +- meilisearch-types/src/error.rs | 3 +-- meilisearch-types/src/settings.rs | 6 +++--- meilisearch/tests/settings/errors.rs | 27 +++++++++++++++++++++++++++ 4 files changed, 32 insertions(+), 6 deletions(-) diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index 237381414..8d345be45 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -260,7 +260,7 @@ impl From for v6::ResponseError { "index_already_exists" => v6::Code::IndexAlreadyExists, "index_not_found" => v6::Code::IndexNotFound, "invalid_index_uid" => v6::Code::InvalidIndexUid, - "invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo, + "invalid_min_word_length_for_typo" => v6::Code::InvalidSettingsTypoTolerance, "invalid_state" => v6::Code::InvalidState, "primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound, "index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists, diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 21c079678..bf38bb14f 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -224,7 +224,6 @@ InvalidIndexLimit , InvalidRequest , BAD_REQUEST ; InvalidIndexOffset , InvalidRequest , BAD_REQUEST ; InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ; InvalidIndexUid , InvalidRequest , BAD_REQUEST ; -InvalidMinWordLengthForTypo , InvalidRequest , BAD_REQUEST ; InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ; InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ; InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ; @@ -334,7 +333,7 @@ impl ErrorCode for milli::Error { UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField, UserError::SortError(_) => Code::InvalidSearchSort, UserError::InvalidMinTypoWordLenSetting(_, _) => { - Code::InvalidMinWordLengthForTypo + Code::InvalidSettingsTypoTolerance } } } diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index 57899a9d1..b4ab1eff6 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -67,7 +67,7 @@ fn validate_min_word_size_for_typo_setting( #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[serde(deny_unknown_fields, rename_all = "camelCase")] -#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError)] +#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError)] pub struct MinWordSizeTyposSetting { #[serde(default, skip_serializing_if = "Setting::is_not_set")] #[deserr(default)] @@ -79,13 +79,13 @@ pub struct MinWordSizeTyposSetting { #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] #[serde(deny_unknown_fields, rename_all = "camelCase")] -#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError>)] +#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError>)] pub struct TypoSettings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] #[deserr(default)] pub enabled: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(default, error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub min_word_size_for_typos: Setting, #[serde(default, skip_serializing_if = "Setting::is_not_set")] #[deserr(default)] diff --git a/meilisearch/tests/settings/errors.rs b/meilisearch/tests/settings/errors.rs index a3deeccfb..b4bdb27ca 100644 --- a/meilisearch/tests/settings/errors.rs +++ b/meilisearch/tests/settings/errors.rs @@ -243,6 +243,18 @@ async fn settings_bad_typo_tolerance() { } "###); + let (response, code) = + index.update_settings(json!({ "typoTolerance": { "minWordSizeForTypos": "doggo" }})).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.typoTolerance.minWordSizeForTypos`: expected an object, but found a string: `\"doggo\"`", + "code": "invalid_settings_typo_tolerance", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" + } + "###); + let (response, code) = index.update_settings_typo_tolerance(json!("doggo")).await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" @@ -253,6 +265,21 @@ async fn settings_bad_typo_tolerance() { "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" } "###); + + let (response, code) = index + .update_settings_typo_tolerance( + json!({ "typoTolerance": { "minWordSizeForTypos": "doggo" }}), + ) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown field `typoTolerance`: expected one of `enabled`, `minWordSizeForTypos`, `disableOnWords`, `disableOnAttributes`", + "code": "invalid_settings_typo_tolerance", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" + } + "###); } #[actix_rt::test] From 497187083b918b6cee3832fe7ddca5467e4bd69a Mon Sep 17 00:00:00 2001 From: Philipp Ahlner Date: Wed, 18 Jan 2023 13:24:26 +0100 Subject: [PATCH 016/186] Add test for bug #3007: Wrong error message Adds a test for #3007: Wrong error message when lat and lng are unparseable --- milli/src/index.rs | 36 +++++++++++++++++++ .../bug_3007/geo_faceted_documents_ids.snap | 4 +++ 2 files changed, 40 insertions(+) create mode 100644 milli/src/snapshots/index.rs/bug_3007/geo_faceted_documents_ids.snap diff --git a/milli/src/index.rs b/milli/src/index.rs index 46f8eb6a3..7ed9af424 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -2292,4 +2292,40 @@ pub(crate) mod tests { assert!(all_ids.insert(id)); } } + + #[test] + fn bug_3007() { + // https://github.com/meilisearch/meilisearch/issues/3007 + + use crate::error::{GeoError, UserError}; + let index = TempIndex::new(); + + // Given is an index with a geo field NOT contained in the sortable_fields of the settings + index + .update_settings(|settings| { + settings.set_primary_key("id".to_string()); + settings.set_filterable_fields(HashSet::from(["_geo".to_string()])); + }) + .unwrap(); + + // happy path + index.add_documents(documents!({ "id" : 5, "_geo": {"lat": 12.0, "lng": 11.0}})).unwrap(); + + db_snap!(index, geo_faceted_documents_ids); + + // both are unparseable, we expect GeoError::BadLatitudeAndLongitude + let err1 = index + .add_documents( + documents!({ "id" : 6, "_geo": {"lat": "unparseable", "lng": "unparseable"}}), + ) + .unwrap_err(); + assert!(matches!( + err1, + Error::UserError(UserError::InvalidGeoField( + GeoError::BadLatitudeAndLongitude { .. } + )) + )); + + db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted + } } diff --git a/milli/src/snapshots/index.rs/bug_3007/geo_faceted_documents_ids.snap b/milli/src/snapshots/index.rs/bug_3007/geo_faceted_documents_ids.snap new file mode 100644 index 000000000..f9ebc0c20 --- /dev/null +++ b/milli/src/snapshots/index.rs/bug_3007/geo_faceted_documents_ids.snap @@ -0,0 +1,4 @@ +--- +source: milli/src/index.rs +--- +[0, ] From 57da80900dcbb444b3e5ab476dbae8be609d1faa Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 18 Jan 2023 14:16:00 +0100 Subject: [PATCH 017/186] make the swap indexes not found errors return an IndexNotFound error code --- index-scheduler/src/error.rs | 4 ++-- .../lib.rs/swap_indexes_errors/first_swap_failed.snap | 2 +- meilisearch/tests/tasks/mod.rs | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/index-scheduler/src/error.rs b/index-scheduler/src/error.rs index 013bcf595..95161fd5e 100644 --- a/index-scheduler/src/error.rs +++ b/index-scheduler/src/error.rs @@ -141,8 +141,8 @@ impl ErrorCode for Error { Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists, Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound, Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound, - Error::SwapIndexNotFound(_) => Code::InvalidSwapIndexes, - Error::SwapIndexesNotFound(_) => Code::InvalidSwapIndexes, + Error::SwapIndexNotFound(_) => Code::IndexNotFound, + Error::SwapIndexesNotFound(_) => Code::IndexNotFound, Error::InvalidTaskDate { field, .. } => (*field).into(), Error::InvalidTaskUids { .. } => Code::InvalidTaskUids, Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses, diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap index 1a47f87fb..3e95c5b25 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap @@ -10,7 +10,7 @@ source: index-scheduler/src/lib.rs 1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }} 2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }} 3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "invalid_swap_indexes", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-swap-indexes" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }} ---------------------------------------------------------------------- ### Status: enqueued [] diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 7fadf0a10..b20b2dabb 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -844,9 +844,9 @@ async fn test_summarized_index_swap() { }, "error": { "message": "Indexes `cattos`, `doggos` not found.", - "code": "invalid_swap_indexes", + "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" + "link": "https://docs.meilisearch.com/errors#index-not-found" }, "duration": "[duration]", "enqueuedAt": "[date]", From a4476c20f88bbba66eaa8ed782e80b829a99055b Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 18 Jan 2023 15:28:02 +0100 Subject: [PATCH 018/186] fix a wrong error code and add tests on the document resource --- meilisearch/src/routes/indexes/documents.rs | 4 +- meilisearch/tests/common/index.rs | 10 +++ meilisearch/tests/documents/errors.rs | 99 +++++++++++++++++++++ meilisearch/tests/documents/mod.rs | 1 + 4 files changed, 112 insertions(+), 2 deletions(-) create mode 100644 meilisearch/tests/documents/errors.rs diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index 2b36ba834..0ec1057ae 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -128,11 +128,11 @@ pub async fn delete_document( #[derive(Debug, DeserializeFromValue)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct BrowseQuery { - #[deserr(default, error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] offset: Param, #[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError)] limit: Param, - #[deserr(default, error = DeserrQueryParamError)] + #[deserr(default, error = DeserrQueryParamError)] fields: OptionStarOrList, } diff --git a/meilisearch/tests/common/index.rs b/meilisearch/tests/common/index.rs index b98ed9827..454c84565 100644 --- a/meilisearch/tests/common/index.rs +++ b/meilisearch/tests/common/index.rs @@ -155,6 +155,11 @@ impl Index<'_> { self.service.get(url).await } + pub async fn get_all_documents_raw(&self, options: &str) -> (Value, StatusCode) { + let url = format!("/indexes/{}/documents{}", urlencode(self.uid.as_ref()), options); + self.service.get(url).await + } + pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) { let mut url = format!("/indexes/{}/documents?", urlencode(self.uid.as_ref())); if let Some(limit) = options.limit { @@ -187,6 +192,11 @@ impl Index<'_> { self.service.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder).await } + pub async fn delete_batch_raw(&self, body: Value) -> (Value, StatusCode) { + let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref())); + self.service.post_encoded(url, body, self.encoder).await + } + pub async fn settings(&self) -> (Value, StatusCode) { let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref())); self.service.get(url).await diff --git a/meilisearch/tests/documents/errors.rs b/meilisearch/tests/documents/errors.rs new file mode 100644 index 000000000..4c50a8e02 --- /dev/null +++ b/meilisearch/tests/documents/errors.rs @@ -0,0 +1,99 @@ +use meili_snap::*; +use serde_json::json; + +use crate::common::Server; + +#[actix_rt::test] +async fn get_all_documents_bad_offset() { + let server = Server::new().await; + let index = server.index("test"); + + let (response, code) = index.get_all_documents_raw("?offset").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `offset`: could not parse `` as a positive integer", + "code": "invalid_document_offset", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-document-offset" + } + "###); + + let (response, code) = index.get_all_documents_raw("?offset=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `offset`: could not parse `doggo` as a positive integer", + "code": "invalid_document_offset", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-document-offset" + } + "###); + + let (response, code) = index.get_all_documents_raw("?offset=-1").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `offset`: could not parse `-1` as a positive integer", + "code": "invalid_document_offset", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-document-offset" + } + "###); +} + +#[actix_rt::test] +async fn get_all_documents_bad_limit() { + let server = Server::new().await; + let index = server.index("test"); + + let (response, code) = index.get_all_documents_raw("?limit").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `limit`: could not parse `` as a positive integer", + "code": "invalid_document_limit", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-document-limit" + } + "###); + + let (response, code) = index.get_all_documents_raw("?limit=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", + "code": "invalid_document_limit", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-document-limit" + } + "###); + + let (response, code) = index.get_all_documents_raw("?limit=-1").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `limit`: could not parse `-1` as a positive integer", + "code": "invalid_document_limit", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-document-limit" + } + "###); +} + +#[actix_rt::test] +async fn delete_documents_batch() { + let server = Server::new().await; + let index = server.index("test"); + + let (response, code) = index.delete_batch_raw(json!("doggo")).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Json deserialize error: invalid type: string \"doggo\", expected a sequence at line 1 column 7", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad-request" + } + "###); +} diff --git a/meilisearch/tests/documents/mod.rs b/meilisearch/tests/documents/mod.rs index 794b57c3a..f6430b108 100644 --- a/meilisearch/tests/documents/mod.rs +++ b/meilisearch/tests/documents/mod.rs @@ -1,4 +1,5 @@ mod add_documents; mod delete_documents; +mod errors; mod get_documents; mod update_documents; From 182eea1f1703dc2df26b697cc40865f9d9d8742e Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 18 Jan 2023 15:50:05 +0100 Subject: [PATCH 019/186] Introduce a canceledBy filter for the tests --- meilisearch/tests/common/index.rs | 10 +++++++++- meilisearch/tests/documents/add_documents.rs | 2 +- meilisearch/tests/tasks/mod.rs | 11 ++++++----- 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/meilisearch/tests/common/index.rs b/meilisearch/tests/common/index.rs index b98ed9827..4902f221f 100644 --- a/meilisearch/tests/common/index.rs +++ b/meilisearch/tests/common/index.rs @@ -132,7 +132,12 @@ impl Index<'_> { self.service.get(url).await } - pub async fn filtered_tasks(&self, types: &[&str], statuses: &[&str]) -> (Value, StatusCode) { + pub async fn filtered_tasks( + &self, + types: &[&str], + statuses: &[&str], + canceled_by: &[&str], + ) -> (Value, StatusCode) { let mut url = format!("/tasks?indexUids={}", self.uid); if !types.is_empty() { let _ = write!(url, "&types={}", types.join(",")); @@ -140,6 +145,9 @@ impl Index<'_> { if !statuses.is_empty() { let _ = write!(url, "&statuses={}", statuses.join(",")); } + if !canceled_by.is_empty() { + let _ = write!(url, "&canceledBy={}", canceled_by.join(",")); + } self.service.get(url).await } diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 4af365a7e..8452955fd 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -1077,7 +1077,7 @@ async fn batch_several_documents_addition() { futures::future::join_all(waiter).await; index.wait_task(9).await; - let (response, _code) = index.filtered_tasks(&[], &["failed"]).await; + let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await; // Check if only the 6th task failed println!("{}", &response); diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 7fadf0a10..d44381729 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -115,7 +115,7 @@ async fn list_tasks_status_filtered() { .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await; + let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 1); @@ -126,7 +126,7 @@ async fn list_tasks_status_filtered() { index.wait_task(1).await; - let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await; + let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 2); } @@ -141,12 +141,12 @@ async fn list_tasks_type_filtered() { .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await; + let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await; assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = - index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[]).await; + index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await; assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 2); } @@ -161,7 +161,7 @@ async fn list_tasks_status_and_type_filtered() { .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await; + let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await; assert_eq!(code, 200, "{}", response); assert_eq!(response["results"].as_array().unwrap().len(), 0); @@ -169,6 +169,7 @@ async fn list_tasks_status_and_type_filtered() { .filtered_tasks( &["indexCreation", "documentAdditionOrUpdate"], &["succeeded", "processing", "enqueued"], + &[], ) .await; assert_eq!(code, 200, "{}", response); From d3c796af380be64548c5957067d22584772ac9c2 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 18 Jan 2023 15:50:36 +0100 Subject: [PATCH 020/186] Add a new test to check that invalid canceledBy works correctly --- meilisearch/tests/tasks/mod.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index d44381729..2b429f798 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -151,6 +151,21 @@ async fn list_tasks_type_filtered() { assert_eq!(response["results"].as_array().unwrap().len(), 2); } +#[actix_rt::test] +async fn list_tasks_invalid_canceled_by_filter() { + let server = Server::new().await; + let index = server.index("test"); + index.create(None).await; + index.wait_task(0).await; + index + .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) + .await; + + let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await; + assert_eq!(code, 200, "{}", response); + assert_eq!(response["results"].as_array().unwrap().len(), 0); +} + #[actix_rt::test] async fn list_tasks_status_and_type_filtered() { let server = Server::new().await; From e89973f1bf01c8a687275116205abf9d869df104 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 18 Jan 2023 15:25:27 +0100 Subject: [PATCH 021/186] Do not delete all tasks when no canceled-by matches --- index-scheduler/src/lib.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 0b9e856d2..4374a0612 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -502,13 +502,22 @@ impl IndexScheduler { } if let Some(canceled_by) = &query.canceled_by { + let mut all_canceled_tasks = RoaringBitmap::new(); for cancel_task_uid in canceled_by { if let Some(canceled_by_uid) = self.canceled_by.get(rtxn, &BEU32::new(*cancel_task_uid))? { - tasks &= canceled_by_uid; + all_canceled_tasks |= canceled_by_uid; } } + + // if the canceled_by has been specified but no task + // matches then we prefer matching zero than all tasks. + if all_canceled_tasks.is_empty() { + return Ok(RoaringBitmap::new()); + } else { + tasks &= all_canceled_tasks; + } } if let Some(kind) = &query.types { From 00f6af647521c0965ed9cfb178ecf3f0d0c60282 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 18 Jan 2023 17:26:48 +0100 Subject: [PATCH 022/186] fix a wrong error message --- meilisearch/src/error.rs | 4 ++-- meilisearch/tests/swap_indexes/errors.rs | 18 +++++++++++++++++- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/meilisearch/src/error.rs b/meilisearch/src/error.rs index 9c77f4d3e..fcfe4b7fc 100644 --- a/meilisearch/src/error.rs +++ b/meilisearch/src/error.rs @@ -24,8 +24,8 @@ pub enum MeilisearchHttpError { MissingPayload(PayloadType), #[error("The provided payload reached the size limit.")] PayloadTooLarge, - #[error("Two indexes must be given for each swap. The list `{:?}` contains {} indexes.", - .0, .0.len() + #[error("Two indexes must be given for each swap. The list `[{}]` contains {} indexes.", + .0.iter().map(|uid| format!("\"{uid}\"")).collect::>().join(", "), .0.len() )] SwapIndexPayloadWrongLength(Vec), #[error(transparent)] diff --git a/meilisearch/tests/swap_indexes/errors.rs b/meilisearch/tests/swap_indexes/errors.rs index a2697a37d..848f347f8 100644 --- a/meilisearch/tests/swap_indexes/errors.rs +++ b/meilisearch/tests/swap_indexes/errors.rs @@ -17,6 +17,7 @@ async fn swap_indexes_bad_format() { "link": "https://docs.meilisearch.com/errors#bad-request" } "###); + let (response, code) = server.index_swap(json!(["doggo"])).await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" @@ -43,16 +44,30 @@ async fn swap_indexes_bad_indexes() { "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" } "###); + let (response, code) = server.index_swap(json!([{ "indexes": ["doggo"]}])).await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Two indexes must be given for each swap. The list `[IndexUid(\"doggo\")]` contains 1 indexes.", + "message": "Two indexes must be given for each swap. The list `[\"doggo\"]` contains 1 indexes.", "code": "invalid_swap_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" } "###); + + let (response, code) = + server.index_swap(json!([{ "indexes": ["doggo", "crabo", "croco"]}])).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Two indexes must be given for each swap. The list `[\"doggo\", \"crabo\", \"croco\"]` contains 3 indexes.", + "code": "invalid_swap_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" + } + "###); + let (response, code) = server.index_swap(json!([{ "indexes": ["doggo", "doggo"]}])).await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" @@ -63,6 +78,7 @@ async fn swap_indexes_bad_indexes() { "link": "https://docs.meilisearch.com/errors#invalid-swap-duplicate-index-found" } "###); + let (response, code) = server .index_swap(json!([{ "indexes": ["doggo", "catto"]}, { "indexes": ["girafo", "doggo"]}])) .await; From 5dcb920fb44895caf823d532a706fd2080b462d4 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 18 Jan 2023 18:27:00 +0100 Subject: [PATCH 023/186] improve the tests --- meilisearch/tests/auth/api_keys.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/meilisearch/tests/auth/api_keys.rs b/meilisearch/tests/auth/api_keys.rs index 03910c0a9..9065b615a 100644 --- a/meilisearch/tests/auth/api_keys.rs +++ b/meilisearch/tests/auth/api_keys.rs @@ -1080,14 +1080,14 @@ async fn patch_api_key_description() { let uid = response["uid"].as_str().unwrap(); - // Add a description - let content = json!({ "description": "Indexing API key" }); + // Add a description and a name + let content = json!({ "description": "Indexing API key", "name": "bob" }); thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###" { - "name": null, + "name": "bob", "description": "Indexing API key", "key": "[ignored]", "uid": "[ignored]", @@ -1233,15 +1233,15 @@ async fn patch_api_key_name() { let created_at = response["createdAt"].as_str().unwrap(); let updated_at = response["updatedAt"].as_str().unwrap(); - // Add a name - let content = json!({ "name": "Indexing API key" }); + // Add a name and description + let content = json!({ "name": "Indexing API key", "description": "The doggoscription" }); thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###" { "name": "Indexing API key", - "description": null, + "description": "The doggoscription", "key": "[ignored]", "uid": "[ignored]", "actions": [ @@ -1302,7 +1302,7 @@ async fn patch_api_key_name() { meili_snap::snapshot!(code, @"200 OK"); // Remove the name - let content = json!({ "name": serde_json::Value::Null }); + let content = json!({ "name": null }); let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###" From d0988e115f61d9bf0d6d90162256bc996029c0ad Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 18 Jan 2023 19:07:26 +0100 Subject: [PATCH 024/186] fix the patch of description and name for the api-key --- meilisearch-auth/src/lib.rs | 11 +++++++++-- meilisearch-types/src/keys.rs | 5 +++-- meilisearch/tests/auth/api_keys.rs | 8 ++++---- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 072b87dad..609409cf7 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -8,6 +8,7 @@ use std::sync::Arc; use error::{AuthControllerError, Result}; use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey}; +use meilisearch_types::milli::update::Setting; use meilisearch_types::star_or::StarOr; use serde::{Deserialize, Serialize}; pub use store::open_auth_store_env; @@ -41,8 +42,14 @@ impl AuthController { pub fn update_key(&self, uid: Uuid, patch: PatchApiKey) -> Result { let mut key = self.get_key(uid)?; - key.description = patch.description; - key.name = patch.name; + match patch.description { + Setting::NotSet => (), + description => key.description = description.set(), + }; + match patch.name { + Setting::NotSet => (), + name => key.name = name.set(), + }; key.updated_at = OffsetDateTime::now_utc(); self.store.put_api_key(key) } diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index 655af9b31..7f81e39ac 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -4,6 +4,7 @@ use std::str::FromStr; use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef}; use enum_iterator::Sequence; +use milli::update::Setting; use serde::{Deserialize, Serialize}; use time::format_description::well_known::Rfc3339; use time::macros::{format_description, time}; @@ -78,9 +79,9 @@ fn deny_immutable_fields_api_key( #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)] pub struct PatchApiKey { #[deserr(default, error = DeserrJsonError)] - pub description: Option, + pub description: Setting, #[deserr(default, error = DeserrJsonError)] - pub name: Option, + pub name: Setting, } #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] diff --git a/meilisearch/tests/auth/api_keys.rs b/meilisearch/tests/auth/api_keys.rs index 9065b615a..aa829448b 100644 --- a/meilisearch/tests/auth/api_keys.rs +++ b/meilisearch/tests/auth/api_keys.rs @@ -1119,7 +1119,7 @@ async fn patch_api_key_description() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###" { - "name": null, + "name": "bob", "description": "Product API key", "key": "[ignored]", "uid": "[ignored]", @@ -1151,7 +1151,7 @@ async fn patch_api_key_description() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###" { - "name": null, + "name": "bob", "description": null, "key": "[ignored]", "uid": "[ignored]", @@ -1276,7 +1276,7 @@ async fn patch_api_key_name() { meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###" { "name": "Product API key", - "description": null, + "description": "The doggoscription", "key": "[ignored]", "uid": "[ignored]", "actions": [ @@ -1308,7 +1308,7 @@ async fn patch_api_key_name() { meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]", ".uid" => "[ignored]", ".key" => "[ignored]" }), @r###" { "name": null, - "description": null, + "description": "The doggoscription", "key": "[ignored]", "uid": "[ignored]", "actions": [ From a2cd7214f0fc31cd983a46fa19d2f9fed5ca3d32 Mon Sep 17 00:00:00 2001 From: Philipp Ahlner Date: Wed, 18 Jan 2023 13:24:46 +0100 Subject: [PATCH 025/186] Fixes error message when lat/lng are unparseable --- milli/src/index.rs | 4 +--- milli/src/update/index_documents/enrich.rs | 11 +++++++++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/milli/src/index.rs b/milli/src/index.rs index 7ed9af424..0ab596fa9 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -2321,9 +2321,7 @@ pub(crate) mod tests { .unwrap_err(); assert!(matches!( err1, - Error::UserError(UserError::InvalidGeoField( - GeoError::BadLatitudeAndLongitude { .. } - )) + Error::UserError(UserError::InvalidGeoField(GeoError::BadLatitudeAndLongitude { .. })) )); db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted diff --git a/milli/src/update/index_documents/enrich.rs b/milli/src/update/index_documents/enrich.rs index 3331497c9..4c735856d 100644 --- a/milli/src/update/index_documents/enrich.rs +++ b/milli/src/update/index_documents/enrich.rs @@ -98,7 +98,12 @@ pub fn enrich_documents_batch( // If the settings specifies that a _geo field must be used therefore we must check the // validity of it in all the documents of this batch and this is when we return `Some`. let geo_field_id = match documents_batch_index.id("_geo") { - Some(geo_field_id) if index.sortable_fields(rtxn)?.contains("_geo") => Some(geo_field_id), + Some(geo_field_id) + if index.sortable_fields(rtxn)?.contains("_geo") + || index.filterable_fields(rtxn)?.contains("_geo") => + { + Some(geo_field_id) + } _otherwise => None, }; @@ -367,7 +372,9 @@ pub fn extract_finite_float_from_value(value: Value) -> StdResult { pub fn validate_geo_from_json(id: &DocumentId, bytes: &[u8]) -> Result> { use GeoError::*; - let debug_id = || Value::from(id.debug()); + let debug_id = || { + serde_json::from_slice(id.value().as_bytes()).unwrap_or_else(|_| Value::from(id.debug())) + }; match serde_json::from_slice(bytes).map_err(InternalError::SerdeJson)? { Value::Object(mut object) => match (object.remove("lat"), object.remove("lng")) { (Some(lat), Some(lng)) => { From 4fd6fd9bef9e097dbb1ea70b5ca4ab78dedf1a3e Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 19 Jan 2023 12:25:18 +0100 Subject: [PATCH 026/186] Indicate filterable attributes when the user set a non filterable attribute in facet distributions --- milli/src/error.rs | 54 ++++++++++++++++++-- milli/src/search/facet/facet_distribution.rs | 1 + 2 files changed, 51 insertions(+), 4 deletions(-) diff --git a/milli/src/error.rs b/milli/src/error.rs index 8734cb540..87cb3f360 100644 --- a/milli/src/error.rs +++ b/milli/src/error.rs @@ -1,5 +1,6 @@ use std::collections::BTreeSet; use std::convert::Infallible; +use std::fmt::Write; use std::{io, str}; use heed::{Error as HeedError, MdbError}; @@ -100,10 +101,11 @@ A document identifier can be of type integer or string, \ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).", .document_id.to_string() )] InvalidDocumentId { document_id: Value }, - #[error("Invalid facet distribution, the fields `{}` are not set as filterable.", - .invalid_facets_name.iter().map(AsRef::as_ref).collect::>().join(", ") - )] - InvalidFacetsDistribution { invalid_facets_name: BTreeSet }, + #[error("Invalid facet distribution, {}", format_invalid_filter_distribution(.invalid_facets_name, .valid_facets_name))] + InvalidFacetsDistribution { + invalid_facets_name: BTreeSet, + valid_facets_name: BTreeSet, + }, #[error(transparent)] InvalidGeoField(#[from] GeoError), #[error("{0}")] @@ -166,6 +168,50 @@ pub enum GeoError { BadLongitude { document_id: Value, value: Value }, } +fn format_invalid_filter_distribution( + invalid_facets_name: &BTreeSet, + valid_facets_name: &BTreeSet, +) -> String { + if valid_facets_name.is_empty() { + return "this index does not have configured filterable attributes.".into(); + } + + let mut result = String::new(); + + match invalid_facets_name.len() { + 0 => (), + 1 => write!( + result, + "attribute `{}` is not filterable.", + invalid_facets_name.first().unwrap() + ) + .unwrap(), + _ => write!( + result, + "attributes `{}` are not filterable.", + invalid_facets_name.iter().map(AsRef::as_ref).collect::>().join(", ") + ) + .unwrap(), + }; + + match valid_facets_name.len() { + 1 => write!( + result, + " The available filterable attribute is `{}`.", + valid_facets_name.first().unwrap() + ) + .unwrap(), + _ => write!( + result, + " The available filterable attributes are `{}`.", + valid_facets_name.iter().map(AsRef::as_ref).collect::>().join(", ") + ) + .unwrap(), + } + + result +} + /// A little macro helper to autogenerate From implementation that needs two `Into`. /// Given the following parameters: `error_from_sub_error!(FieldIdMapMissingEntry => InternalError)` /// the macro will create the following code: diff --git a/milli/src/search/facet/facet_distribution.rs b/milli/src/search/facet/facet_distribution.rs index 43367abbb..4d5028ce0 100644 --- a/milli/src/search/facet/facet_distribution.rs +++ b/milli/src/search/facet/facet_distribution.rs @@ -291,6 +291,7 @@ impl<'a> FacetDistribution<'a> { if !invalid_fields.is_empty() { return Err(UserError::InvalidFacetsDistribution { invalid_facets_name: invalid_fields.into_iter().cloned().collect(), + valid_facets_name: filterable_fields.into_iter().collect(), } .into()); } else { From e8e7070cc67e9cbfa4169b61f00d09fc8cefc138 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 19 Jan 2023 12:42:08 +0100 Subject: [PATCH 027/186] improve the error message when no task filter are specified for the cancelation or deletion of tasks --- index-scheduler/src/error.rs | 4 ++-- meilisearch/tests/tasks/mod.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/index-scheduler/src/error.rs b/index-scheduler/src/error.rs index 95161fd5e..3264bda7a 100644 --- a/index-scheduler/src/error.rs +++ b/index-scheduler/src/error.rs @@ -100,9 +100,9 @@ pub enum Error { InvalidIndexUid { index_uid: String }, #[error("Task `{0}` not found.")] TaskNotFound(TaskId), - #[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")] + #[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")] TaskDeletionWithEmptyQuery, - #[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")] + #[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")] TaskCancelationWithEmptyQuery, #[error(transparent)] diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index fbb3968f8..361ac1083 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -248,7 +248,7 @@ async fn delete_task_filter_error() { assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", + "message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", "code": "missing_task_filters", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#missing-task-filters" @@ -286,7 +286,7 @@ async fn cancel_task_filter_error() { assert_eq!(code, 400, "{}", response); meili_snap::snapshot!(meili_snap::json_string!(response), @r###" { - "message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", + "message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", "code": "missing_task_filters", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#missing-task-filters" From 3d8ca62c351ffdaa89437ca5429fe6ea2cf52e75 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 19 Jan 2023 11:25:55 +0100 Subject: [PATCH 028/186] InvalidFacetDistribution returns invalid_search_facet --- meilisearch-types/src/error.rs | 2 +- meilisearch/tests/search/errors.rs | 31 ++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index bf38bb14f..bc320d275 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -327,7 +327,7 @@ impl ErrorCode for milli::Error { } UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists, UserError::SortRankingRuleMissing => Code::InvalidSearchSort, - UserError::InvalidFacetsDistribution { .. } => Code::BadRequest, + UserError::InvalidFacetsDistribution { .. } => Code::InvalidSearchFacets, UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort, UserError::CriterionError(_) => Code::InvalidSettingsRankingRules, UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField, diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index d8a19fff1..1149d71a5 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -321,6 +321,37 @@ async fn search_bad_facets() { // Can't make the `attributes_to_highlight` fail with a get search since it'll accept anything as an array of strings. } +#[actix_rt::test] +async fn search_non_filterable_facets() { + let server = Server::new().await; + let index = server.index("test"); + index.update_settings(json!({"filterableAttributes": ["title"]})).await; + // Wait for the settings update to complete + index.wait_task(0).await; + + let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid facet distribution, the fields `doggo` are not set as filterable.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-search-facets" + } + "###); + + let (response, code) = index.search_get("facets=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid facet distribution, the fields `doggo` are not set as filterable.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-search-facets" + } + "###); +} + #[actix_rt::test] async fn search_bad_highlight_pre_tag() { let server = Server::new().await; From 3f048927a0336858d8a6888f466a55baa51584b4 Mon Sep 17 00:00:00 2001 From: curquiza Date: Thu, 19 Jan 2023 14:29:09 +0000 Subject: [PATCH 029/186] Update version for the next release (v0.39.2) in Cargo.toml files --- benchmarks/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- filter-parser/Cargo.toml | 2 +- flatten-serde-json/Cargo.toml | 2 +- json-depth-checker/Cargo.toml | 2 +- milli/Cargo.toml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index 9f5b6190e..5fec31ee0 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "benchmarks" -version = "0.39.1" +version = "0.39.2" edition = "2018" publish = false diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 42fa4353a..3a40384bc 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cli" -version = "0.39.1" +version = "0.39.2" edition = "2018" description = "A CLI to interact with a milli index" publish = false diff --git a/filter-parser/Cargo.toml b/filter-parser/Cargo.toml index 38427ccdf..73754df26 100644 --- a/filter-parser/Cargo.toml +++ b/filter-parser/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "filter-parser" -version = "0.39.1" +version = "0.39.2" edition = "2021" description = "The parser for the Meilisearch filter syntax" publish = false diff --git a/flatten-serde-json/Cargo.toml b/flatten-serde-json/Cargo.toml index 904baa0b4..e84a21798 100644 --- a/flatten-serde-json/Cargo.toml +++ b/flatten-serde-json/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flatten-serde-json" -version = "0.39.1" +version = "0.39.2" edition = "2021" description = "Flatten serde-json objects like elastic search" readme = "README.md" diff --git a/json-depth-checker/Cargo.toml b/json-depth-checker/Cargo.toml index 5210b8bf4..95ee2aa88 100644 --- a/json-depth-checker/Cargo.toml +++ b/json-depth-checker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "json-depth-checker" -version = "0.39.1" +version = "0.39.2" edition = "2021" description = "A library that indicates if a JSON must be flattened" publish = false diff --git a/milli/Cargo.toml b/milli/Cargo.toml index 743cd23a9..dd0287331 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "milli" -version = "0.39.1" +version = "0.39.2" authors = ["Kerollmops "] edition = "2018" From f5ca421227405ab312f57b0f13da48e6586ccb90 Mon Sep 17 00:00:00 2001 From: Philipp Ahlner Date: Thu, 19 Jan 2023 15:39:21 +0100 Subject: [PATCH 030/186] Superfluous test removed --- milli/src/update/index_documents/mod.rs | 28 ------------------------- 1 file changed, 28 deletions(-) diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index f912a756a..7e13afb1b 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -965,34 +965,6 @@ mod tests { .unwrap(); } - #[test] - fn index_all_flavour_of_geo() { - let mut index = TempIndex::new(); - index.index_documents_config.update_method = IndexDocumentsMethod::ReplaceDocuments; - - index - .update_settings(|settings| { - settings.set_filterable_fields(hashset!(S("_geo"))); - }) - .unwrap(); - - index - .add_documents(documents!([ - { "id": 0, "_geo": { "lat": 31, "lng": [42] } }, - { "id": 1, "_geo": { "lat": "31" }, "_geo.lng": 42 }, - { "id": 2, "_geo": { "lng": "42" }, "_geo.lat": "31" }, - { "id": 3, "_geo.lat": 31, "_geo.lng": "42" }, - ])) - .unwrap(); - - let rtxn = index.read_txn().unwrap(); - - let mut search = crate::Search::new(&rtxn, &index); - search.filter(crate::Filter::from_str("_geoRadius(31, 42, 0.000001)").unwrap().unwrap()); - let crate::SearchResult { documents_ids, .. } = search.execute().unwrap(); - assert_eq!(documents_ids, vec![0, 1, 2, 3]); - } - #[test] fn geo_error() { let mut index = TempIndex::new(); From b0c33ed6d2e7a955fc2def44d70c9a6211a390ab Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 19 Jan 2023 15:47:01 +0100 Subject: [PATCH 031/186] Error codes are underscore again --- meilisearch-types/src/error.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index bc320d275..0f551d584 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -155,10 +155,7 @@ macro_rules! make_error_codes { /// return the doc url associated with the error fn url(&self) -> String { - format!( - "https://docs.meilisearch.com/errors#{}", - self.name().to_case(convert_case::Case::Kebab) - ) + format!("https://docs.meilisearch.com/errors#{}", self.name()) } } pub mod deserr_codes { From 72e2b220ed11b7e6dbef2fe9c9de4bcec366f9f4 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 19 Jan 2023 15:48:20 +0100 Subject: [PATCH 032/186] Fix tests --- dump/src/reader/compat/v5_to_v6.rs | 2 +- dump/src/reader/mod.rs | 10 +-- .../first_swap_failed.snap | 2 +- .../after_processing_the_10_tasks.snap | 20 ++--- .../all_tasks_processed.snap | 20 ++--- .../five_tasks_processed.snap | 10 +-- .../all_tasks_processed.snap | 2 +- .../only_first_task_failed.snap | 2 +- meilisearch/src/routes/tasks.rs | 32 ++++---- meilisearch/tests/auth/api_keys.rs | 80 +++++++++---------- meilisearch/tests/documents/add_documents.rs | 4 +- meilisearch/tests/documents/errors.rs | 14 ++-- meilisearch/tests/index/create_index.rs | 2 +- meilisearch/tests/index/get_index.rs | 2 +- meilisearch/tests/search/errors.rs | 50 ++++++------ meilisearch/tests/settings/errors.rs | 48 +++++------ meilisearch/tests/settings/get_settings.rs | 4 +- meilisearch/tests/swap_indexes/errors.rs | 14 ++-- meilisearch/tests/tasks/errors.rs | 80 +++++++++---------- meilisearch/tests/tasks/mod.rs | 36 ++++----- 20 files changed, 217 insertions(+), 217 deletions(-) diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index 8d345be45..6d1e698b3 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -439,7 +439,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"10c673c97f053830aa659876d7aa0b53"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c"); assert_eq!(update_files.len(), 22); assert!(update_files[0].is_none()); // the dump creation assert!(update_files[1].is_some()); // the enqueued document addition diff --git a/dump/src/reader/mod.rs b/dump/src/reader/mod.rs index 259c9ce53..cf671ea45 100644 --- a/dump/src/reader/mod.rs +++ b/dump/src/reader/mod.rs @@ -201,7 +201,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"10c673c97f053830aa659876d7aa0b53"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c"); assert_eq!(update_files.len(), 22); assert!(update_files[0].is_none()); // the dump creation assert!(update_files[1].is_some()); // the enqueued document addition @@ -279,7 +279,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"12eca43d5d1e1f334200eb4df653b0c9"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"c2445ddd1785528b80f2ba534d3bd00c"); assert_eq!(update_files.len(), 10); assert!(update_files[0].is_some()); // the enqueued document addition assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed @@ -356,7 +356,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2f51c6345fabccf47b18c82bad618ffe"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"cd12efd308fe3ed226356a727ab42ed3"); assert_eq!(update_files.len(), 10); assert!(update_files[0].is_some()); // the enqueued document addition assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed @@ -449,7 +449,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b27292d0bb86d4b4dd1b375a46b33890"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"bc616290adfe7d09a624cf6065ca9069"); assert_eq!(update_files.len(), 9); assert!(update_files[0].is_some()); // the enqueued document addition assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed @@ -542,7 +542,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"9725ccfceea3f8d5846c44006c9e1e7b"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b3e3652bfc10a76670be157d2507d761"); assert_eq!(update_files.len(), 9); assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1 diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap index 3e95c5b25..fd9790835 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap @@ -10,7 +10,7 @@ source: index-scheduler/src/lib.rs 1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }} 2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }} 3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }} ---------------------------------------------------------------------- ### Status: enqueued [] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_processing_the_10_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_processing_the_10_tasks.snap index 59e7a4509..ed28c121b 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_processing_the_10_tasks.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_processing_the_10_tasks.snap @@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} -5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} -6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} -7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} -8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} -9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} +6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} +7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} +8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} +9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} ---------------------------------------------------------------------- ### Status: enqueued [] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/all_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/all_tasks_processed.snap index 05da8f83b..d995cab9e 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/all_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/all_tasks_processed.snap @@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} -5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} -6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} -7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} -8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} -9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} +6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} +7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} +8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} +9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} ---------------------------------------------------------------------- ### Status: enqueued [] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/five_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/five_tasks_processed.snap index 3c33a3f57..3ae875bff 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/five_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/five_tasks_processed.snap @@ -6,11 +6,11 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} 5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} 6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} 7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/all_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/all_tasks_processed.snap index d2ebddf08..19ee47359 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/all_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/all_tasks_processed.snap @@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} 1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} 3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/only_first_task_failed.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/only_first_task_failed.snap index 00edf1ef7..ed57bc4e3 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/only_first_task_failed.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/only_first_task_failed.snap @@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} 3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index eb5cadf2d..b78be7876 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -567,7 +567,7 @@ mod tests { "message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_finished_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at" } "###); } @@ -579,7 +579,7 @@ mod tests { "message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_finished_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at" } "###); } @@ -591,7 +591,7 @@ mod tests { "message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_enqueued_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at" } "###); } @@ -604,7 +604,7 @@ mod tests { "message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_enqueued_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at" } "###); } @@ -616,7 +616,7 @@ mod tests { "message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at" } "###); } @@ -628,7 +628,7 @@ mod tests { "message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at" } "###); } @@ -654,7 +654,7 @@ mod tests { "message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); } @@ -666,7 +666,7 @@ mod tests { "message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); } @@ -678,7 +678,7 @@ mod tests { "message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); } @@ -704,7 +704,7 @@ mod tests { "message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", "code": "invalid_task_statuses", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-statuses" + "link": "https://docs.meilisearch.com/errors#invalid_task_statuses" } "###); } @@ -729,7 +729,7 @@ mod tests { "message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", "code": "invalid_task_types", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-types" + "link": "https://docs.meilisearch.com/errors#invalid_task_types" } "###); } @@ -754,7 +754,7 @@ mod tests { "message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" } "###); } @@ -766,7 +766,7 @@ mod tests { "message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" } "###); } @@ -801,7 +801,7 @@ mod tests { "message": "Invalid value in parameter `from`: could not parse `*` as a positive integer", "code": "invalid_task_from", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-from" + "link": "https://docs.meilisearch.com/errors#invalid_task_from" } "###); } @@ -814,7 +814,7 @@ mod tests { "message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); } @@ -827,7 +827,7 @@ mod tests { "message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); } diff --git a/meilisearch/tests/auth/api_keys.rs b/meilisearch/tests/auth/api_keys.rs index aa829448b..f419a71cf 100644 --- a/meilisearch/tests/auth/api_keys.rs +++ b/meilisearch/tests/auth/api_keys.rs @@ -205,7 +205,7 @@ async fn error_add_api_key_no_header() { "message": "The Authorization header is missing. It must use the bearer authorization method.", "code": "missing_authorization_header", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-authorization-header" + "link": "https://docs.meilisearch.com/errors#missing_authorization_header" } "###); } @@ -228,7 +228,7 @@ async fn error_add_api_key_bad_key() { "message": "The provided API key is invalid.", "code": "invalid_api_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#invalid-api-key" + "link": "https://docs.meilisearch.com/errors#invalid_api_key" } "###); } @@ -251,7 +251,7 @@ async fn error_add_api_key_missing_parameter() { "message": "Missing field `indexes`", "code": "missing_api_key_indexes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#missing-api-key-indexes" + "link": "https://docs.meilisearch.com/errors#missing_api_key_indexes" } "###); @@ -268,7 +268,7 @@ async fn error_add_api_key_missing_parameter() { "message": "Missing field `actions`", "code": "missing_api_key_actions", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#missing-api-key-actions" + "link": "https://docs.meilisearch.com/errors#missing_api_key_actions" } "###); @@ -285,7 +285,7 @@ async fn error_add_api_key_missing_parameter() { "message": "Missing field `expiresAt`", "code": "missing_api_key_expires_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#missing-api-key-expires-at" + "link": "https://docs.meilisearch.com/errors#missing_api_key_expires_at" } "###); } @@ -308,7 +308,7 @@ async fn error_add_api_key_invalid_parameters_description() { "message": "Invalid value type at `.description`: expected a string, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_description", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-description" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_description" } "###); } @@ -331,7 +331,7 @@ async fn error_add_api_key_invalid_parameters_name() { "message": "Invalid value type at `.name`: expected a string, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_name", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-name" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_name" } "###); } @@ -354,7 +354,7 @@ async fn error_add_api_key_invalid_parameters_indexes() { "message": "Invalid value type at `.indexes`: expected an array, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_indexes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-indexes" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" } "###); } @@ -380,7 +380,7 @@ async fn error_add_api_key_invalid_index_uids() { "message": "Invalid value at `.indexes[0]`: `invalid index # / \\name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_api_key_indexes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-indexes" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" } "###); } @@ -405,7 +405,7 @@ async fn error_add_api_key_invalid_parameters_actions() { "message": "Invalid value type at `.actions`: expected an array, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_actions", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-actions" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" } "###); @@ -425,7 +425,7 @@ async fn error_add_api_key_invalid_parameters_actions() { "message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`", "code": "invalid_api_key_actions", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-actions" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" } "###); } @@ -449,7 +449,7 @@ async fn error_add_api_key_invalid_parameters_expires_at() { "message": "Invalid value type at `.expiresAt`: expected a string, but found an object: `{\"name\":\"products\"}`", "code": "invalid_api_key_expires_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-expires-at" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" } "###); } @@ -472,7 +472,7 @@ async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() { "message": "Invalid value at `.expiresAt`: `2010-11-13T00:00:00Z` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.\n", "code": "invalid_api_key_expires_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-expires-at" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" } "###); meili_snap::snapshot!(code, @"400 Bad Request"); @@ -497,7 +497,7 @@ async fn error_add_api_key_invalid_parameters_uid() { "message": "Invalid value at `.uid`: invalid length: expected length 32 for simple format, found 13", "code": "invalid_api_key_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-uid" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_uid" } "###); meili_snap::snapshot!(code, @"400 Bad Request"); @@ -542,7 +542,7 @@ async fn error_add_api_key_parameters_uid_already_exist() { "message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.", "code": "api_key_already_exists", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#api-key-already-exists" + "link": "https://docs.meilisearch.com/errors#api_key_already_exists" } "###); meili_snap::snapshot!(code, @"409 Conflict"); @@ -688,7 +688,7 @@ async fn error_get_api_key_no_header() { "message": "The Authorization header is missing. It must use the bearer authorization method.", "code": "missing_authorization_header", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-authorization-header" + "link": "https://docs.meilisearch.com/errors#missing_authorization_header" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -707,7 +707,7 @@ async fn error_get_api_key_bad_key() { "message": "The provided API key is invalid.", "code": "invalid_api_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#invalid-api-key" + "link": "https://docs.meilisearch.com/errors#invalid_api_key" } "###); meili_snap::snapshot!(code, @"403 Forbidden"); @@ -726,7 +726,7 @@ async fn error_get_api_key_not_found() { "message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.", "code": "api_key_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#api-key-not-found" + "link": "https://docs.meilisearch.com/errors#api_key_not_found" } "###); meili_snap::snapshot!(code, @"404 Not Found"); @@ -870,7 +870,7 @@ async fn error_list_api_keys_no_header() { "message": "The Authorization header is missing. It must use the bearer authorization method.", "code": "missing_authorization_header", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-authorization-header" + "link": "https://docs.meilisearch.com/errors#missing_authorization_header" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -887,7 +887,7 @@ async fn error_list_api_keys_bad_key() { "message": "The provided API key is invalid.", "code": "invalid_api_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#invalid-api-key" + "link": "https://docs.meilisearch.com/errors#invalid_api_key" } "###); meili_snap::snapshot!(code, @"403 Forbidden"); @@ -964,7 +964,7 @@ async fn delete_api_key() { "message": "[ignored]", "code": "api_key_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#api-key-not-found" + "link": "https://docs.meilisearch.com/errors#api_key_not_found" } "###); meili_snap::snapshot!(code, @"404 Not Found"); @@ -983,7 +983,7 @@ async fn error_delete_api_key_no_header() { "message": "The Authorization header is missing. It must use the bearer authorization method.", "code": "missing_authorization_header", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-authorization-header" + "link": "https://docs.meilisearch.com/errors#missing_authorization_header" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1002,7 +1002,7 @@ async fn error_delete_api_key_bad_key() { "message": "The provided API key is invalid.", "code": "invalid_api_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#invalid-api-key" + "link": "https://docs.meilisearch.com/errors#invalid_api_key" } "###); meili_snap::snapshot!(code, @"403 Forbidden"); @@ -1021,7 +1021,7 @@ async fn error_delete_api_key_not_found() { "message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.", "code": "api_key_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#api-key-not-found" + "link": "https://docs.meilisearch.com/errors#api_key_not_found" } "###); meili_snap::snapshot!(code, @"404 Not Found"); @@ -1397,7 +1397,7 @@ async fn error_patch_api_key_indexes() { "message": "Unknown field `indexes`: expected one of `description`, `name`", "code": "immutable_api_key_indexes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#immutable-api-key-indexes" + "link": "https://docs.meilisearch.com/errors#immutable_api_key_indexes" } "###); meili_snap::snapshot!(code, @"400 Bad Request"); @@ -1474,7 +1474,7 @@ async fn error_patch_api_key_actions() { "message": "Unknown field `actions`: expected one of `description`, `name`", "code": "immutable_api_key_actions", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#immutable-api-key-actions" + "link": "https://docs.meilisearch.com/errors#immutable_api_key_actions" } "###); meili_snap::snapshot!(code, @"400 Bad Request"); @@ -1543,7 +1543,7 @@ async fn error_patch_api_key_expiration_date() { "message": "Unknown field `expiresAt`: expected one of `description`, `name`", "code": "immutable_api_key_expires_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#immutable-api-key-expires-at" + "link": "https://docs.meilisearch.com/errors#immutable_api_key_expires_at" } "###); meili_snap::snapshot!(code, @"400 Bad Request"); @@ -1565,7 +1565,7 @@ async fn error_patch_api_key_no_header() { "message": "The Authorization header is missing. It must use the bearer authorization method.", "code": "missing_authorization_header", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-authorization-header" + "link": "https://docs.meilisearch.com/errors#missing_authorization_header" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1588,7 +1588,7 @@ async fn error_patch_api_key_bad_key() { "message": "The provided API key is invalid.", "code": "invalid_api_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#invalid-api-key" + "link": "https://docs.meilisearch.com/errors#invalid_api_key" } "###); meili_snap::snapshot!(code, @"403 Forbidden"); @@ -1611,7 +1611,7 @@ async fn error_patch_api_key_not_found() { "message": "API key `d0552b41d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4` not found.", "code": "api_key_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#api-key-not-found" + "link": "https://docs.meilisearch.com/errors#api_key_not_found" } "###); meili_snap::snapshot!(code, @"404 Not Found"); @@ -1664,7 +1664,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() { "message": "Invalid value type at `.description`: expected a string, but found a positive integer: `13`", "code": "invalid_api_key_description", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-description" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_description" } "###); meili_snap::snapshot!(code, @"400 Bad Request"); @@ -1680,7 +1680,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() { "message": "Invalid value type at `.name`: expected a string, but found a positive integer: `13`", "code": "invalid_api_key_name", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-api-key-name" + "link": "https://docs.meilisearch.com/errors#invalid_api_key_name" } "###); meili_snap::snapshot!(code, @"400 Bad Request"); @@ -1696,7 +1696,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-master-key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1707,7 +1707,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-master-key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1718,7 +1718,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-master-key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1729,7 +1729,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-master-key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1742,7 +1742,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-master-key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1753,7 +1753,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-master-key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1764,7 +1764,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-master-key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); @@ -1775,7 +1775,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#missing-master-key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" } "###); meili_snap::snapshot!(code, @"401 Unauthorized"); diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 8452955fd..64220e033 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -929,7 +929,7 @@ async fn error_primary_key_inference() { "message": "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", "code": "index_primary_key_no_candidate_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-primary-key-no-candidate-found" + "link": "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, "duration": "[duration]", "enqueuedAt": "[date]", @@ -969,7 +969,7 @@ async fn error_primary_key_inference() { "message": "The primary key inference failed as the engine found 3 fields ending with `id` in their names: 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.", "code": "index_primary_key_multiple_candidates_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-primary-key-multiple-candidates-found" + "link": "https://docs.meilisearch.com/errors#index_primary_key_multiple_candidates_found" }, "duration": "[duration]", "enqueuedAt": "[date]", diff --git a/meilisearch/tests/documents/errors.rs b/meilisearch/tests/documents/errors.rs index 4c50a8e02..ffec01062 100644 --- a/meilisearch/tests/documents/errors.rs +++ b/meilisearch/tests/documents/errors.rs @@ -15,7 +15,7 @@ async fn get_all_documents_bad_offset() { "message": "Invalid value in parameter `offset`: could not parse `` as a positive integer", "code": "invalid_document_offset", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-document-offset" + "link": "https://docs.meilisearch.com/errors#invalid_document_offset" } "###); @@ -26,7 +26,7 @@ async fn get_all_documents_bad_offset() { "message": "Invalid value in parameter `offset`: could not parse `doggo` as a positive integer", "code": "invalid_document_offset", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-document-offset" + "link": "https://docs.meilisearch.com/errors#invalid_document_offset" } "###); @@ -37,7 +37,7 @@ async fn get_all_documents_bad_offset() { "message": "Invalid value in parameter `offset`: could not parse `-1` as a positive integer", "code": "invalid_document_offset", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-document-offset" + "link": "https://docs.meilisearch.com/errors#invalid_document_offset" } "###); } @@ -54,7 +54,7 @@ async fn get_all_documents_bad_limit() { "message": "Invalid value in parameter `limit`: could not parse `` as a positive integer", "code": "invalid_document_limit", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-document-limit" + "link": "https://docs.meilisearch.com/errors#invalid_document_limit" } "###); @@ -65,7 +65,7 @@ async fn get_all_documents_bad_limit() { "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", "code": "invalid_document_limit", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-document-limit" + "link": "https://docs.meilisearch.com/errors#invalid_document_limit" } "###); @@ -76,7 +76,7 @@ async fn get_all_documents_bad_limit() { "message": "Invalid value in parameter `limit`: could not parse `-1` as a positive integer", "code": "invalid_document_limit", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-document-limit" + "link": "https://docs.meilisearch.com/errors#invalid_document_limit" } "###); } @@ -93,7 +93,7 @@ async fn delete_documents_batch() { "message": "Json deserialize error: invalid type: string \"doggo\", expected a sequence at line 1 column 7", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); } diff --git a/meilisearch/tests/index/create_index.rs b/meilisearch/tests/index/create_index.rs index 884a0b069..b4512c60d 100644 --- a/meilisearch/tests/index/create_index.rs +++ b/meilisearch/tests/index/create_index.rs @@ -195,7 +195,7 @@ async fn error_create_with_invalid_index_uid() { "message": "Invalid value at `.uid`: `test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" } "###); } diff --git a/meilisearch/tests/index/get_index.rs b/meilisearch/tests/index/get_index.rs index 6e70484f6..d73360cd2 100644 --- a/meilisearch/tests/index/get_index.rs +++ b/meilisearch/tests/index/get_index.rs @@ -189,7 +189,7 @@ async fn get_invalid_index_uid() { "message": "`this is not a valid index name` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" } "###); } diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index 1149d71a5..e8cf3f91c 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -49,7 +49,7 @@ async fn search_bad_q() { "message": "Invalid value type at `.q`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_search_q", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-q" + "link": "https://docs.meilisearch.com/errors#invalid_search_q" } "###); // Can't make the `q` fail with a get search since it'll accept anything as a string. @@ -67,7 +67,7 @@ async fn search_bad_offset() { "message": "Invalid value type at `.offset`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_offset", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-offset" + "link": "https://docs.meilisearch.com/errors#invalid_search_offset" } "###); @@ -78,7 +78,7 @@ async fn search_bad_offset() { "message": "Invalid value in parameter `offset`: could not parse `doggo` as a positive integer", "code": "invalid_search_offset", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-offset" + "link": "https://docs.meilisearch.com/errors#invalid_search_offset" } "###); } @@ -95,7 +95,7 @@ async fn search_bad_limit() { "message": "Invalid value type at `.limit`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_limit", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-limit" + "link": "https://docs.meilisearch.com/errors#invalid_search_limit" } "###); @@ -106,7 +106,7 @@ async fn search_bad_limit() { "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", "code": "invalid_search_limit", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-limit" + "link": "https://docs.meilisearch.com/errors#invalid_search_limit" } "###); } @@ -123,7 +123,7 @@ async fn search_bad_page() { "message": "Invalid value type at `.page`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_page", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-page" + "link": "https://docs.meilisearch.com/errors#invalid_search_page" } "###); @@ -134,7 +134,7 @@ async fn search_bad_page() { "message": "Invalid value in parameter `page`: could not parse `doggo` as a positive integer", "code": "invalid_search_page", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-page" + "link": "https://docs.meilisearch.com/errors#invalid_search_page" } "###); } @@ -151,7 +151,7 @@ async fn search_bad_hits_per_page() { "message": "Invalid value type at `.hitsPerPage`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_hits_per_page", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-hits-per-page" + "link": "https://docs.meilisearch.com/errors#invalid_search_hits_per_page" } "###); @@ -162,7 +162,7 @@ async fn search_bad_hits_per_page() { "message": "Invalid value in parameter `hitsPerPage`: could not parse `doggo` as a positive integer", "code": "invalid_search_hits_per_page", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-hits-per-page" + "link": "https://docs.meilisearch.com/errors#invalid_search_hits_per_page" } "###); } @@ -179,7 +179,7 @@ async fn search_bad_attributes_to_crop() { "message": "Invalid value type at `.attributesToCrop`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_search_attributes_to_crop", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-attributes-to-crop" + "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_crop" } "###); // Can't make the `attributes_to_crop` fail with a get search since it'll accept anything as an array of strings. @@ -197,7 +197,7 @@ async fn search_bad_crop_length() { "message": "Invalid value type at `.cropLength`: expected a positive integer, but found a string: `\"doggo\"`", "code": "invalid_search_crop_length", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-crop-length" + "link": "https://docs.meilisearch.com/errors#invalid_search_crop_length" } "###); @@ -208,7 +208,7 @@ async fn search_bad_crop_length() { "message": "Invalid value in parameter `cropLength`: could not parse `doggo` as a positive integer", "code": "invalid_search_crop_length", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-crop-length" + "link": "https://docs.meilisearch.com/errors#invalid_search_crop_length" } "###); } @@ -225,7 +225,7 @@ async fn search_bad_attributes_to_highlight() { "message": "Invalid value type at `.attributesToHighlight`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_search_attributes_to_highlight", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-attributes-to-highlight" + "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_highlight" } "###); // Can't make the `attributes_to_highlight` fail with a get search since it'll accept anything as an array of strings. @@ -251,7 +251,7 @@ async fn search_bad_filter() { "message": "Invalid syntax for the filter parameter: `expected String, Array, found: true`.", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" } "###); // Can't make the `filter` fail with a get search since it'll accept anything as a strings. @@ -269,7 +269,7 @@ async fn search_bad_sort() { "message": "Invalid value type at `.sort`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_search_sort", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-sort" + "link": "https://docs.meilisearch.com/errors#invalid_search_sort" } "###); // Can't make the `sort` fail with a get search since it'll accept anything as a strings. @@ -287,7 +287,7 @@ async fn search_bad_show_matches_position() { "message": "Invalid value type at `.showMatchesPosition`: expected a boolean, but found a string: `\"doggo\"`", "code": "invalid_search_show_matches_position", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position" + "link": "https://docs.meilisearch.com/errors#invalid_search_show_matches_position" } "###); @@ -298,7 +298,7 @@ async fn search_bad_show_matches_position() { "message": "Invalid value in parameter `showMatchesPosition`: could not parse `doggo` as a boolean, expected either `true` or `false`", "code": "invalid_search_show_matches_position", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position" + "link": "https://docs.meilisearch.com/errors#invalid_search_show_matches_position" } "###); } @@ -315,7 +315,7 @@ async fn search_bad_facets() { "message": "Invalid value type at `.facets`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_search_facets", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-facets" + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" } "###); // Can't make the `attributes_to_highlight` fail with a get search since it'll accept anything as an array of strings. @@ -336,7 +336,7 @@ async fn search_non_filterable_facets() { "message": "Invalid facet distribution, the fields `doggo` are not set as filterable.", "code": "invalid_search_facets", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-facets" + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" } "###); @@ -347,7 +347,7 @@ async fn search_non_filterable_facets() { "message": "Invalid facet distribution, the fields `doggo` are not set as filterable.", "code": "invalid_search_facets", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-facets" + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" } "###); } @@ -364,7 +364,7 @@ async fn search_bad_highlight_pre_tag() { "message": "Invalid value type at `.highlightPreTag`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_search_highlight_pre_tag", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-highlight-pre-tag" + "link": "https://docs.meilisearch.com/errors#invalid_search_highlight_pre_tag" } "###); // Can't make the `highlight_pre_tag` fail with a get search since it'll accept anything as a strings. @@ -382,7 +382,7 @@ async fn search_bad_highlight_post_tag() { "message": "Invalid value type at `.highlightPostTag`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_search_highlight_post_tag", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-highlight-post-tag" + "link": "https://docs.meilisearch.com/errors#invalid_search_highlight_post_tag" } "###); // Can't make the `highlight_post_tag` fail with a get search since it'll accept anything as a strings. @@ -400,7 +400,7 @@ async fn search_bad_crop_marker() { "message": "Invalid value type at `.cropMarker`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_search_crop_marker", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-crop-marker" + "link": "https://docs.meilisearch.com/errors#invalid_search_crop_marker" } "###); // Can't make the `crop_marker` fail with a get search since it'll accept anything as a strings. @@ -418,7 +418,7 @@ async fn search_bad_matching_strategy() { "message": "Unknown value `doggo` at `.matchingStrategy`: expected one of `last`, `all`", "code": "invalid_search_matching_strategy", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy" + "link": "https://docs.meilisearch.com/errors#invalid_search_matching_strategy" } "###); @@ -429,7 +429,7 @@ async fn search_bad_matching_strategy() { "message": "Unknown value `doggo` for parameter `matchingStrategy`: expected one of `last`, `all`", "code": "invalid_search_matching_strategy", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy" + "link": "https://docs.meilisearch.com/errors#invalid_search_matching_strategy" } "###); } diff --git a/meilisearch/tests/settings/errors.rs b/meilisearch/tests/settings/errors.rs index b4bdb27ca..52dadcf98 100644 --- a/meilisearch/tests/settings/errors.rs +++ b/meilisearch/tests/settings/errors.rs @@ -15,7 +15,7 @@ async fn settings_bad_displayed_attributes() { "message": "Invalid value type at `.displayedAttributes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_displayed_attributes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes" + "link": "https://docs.meilisearch.com/errors#invalid_settings_displayed_attributes" } "###); @@ -26,7 +26,7 @@ async fn settings_bad_displayed_attributes() { "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_displayed_attributes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes" + "link": "https://docs.meilisearch.com/errors#invalid_settings_displayed_attributes" } "###); } @@ -43,7 +43,7 @@ async fn settings_bad_searchable_attributes() { "message": "Invalid value type at `.searchableAttributes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_searchable_attributes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes" + "link": "https://docs.meilisearch.com/errors#invalid_settings_searchable_attributes" } "###); @@ -54,7 +54,7 @@ async fn settings_bad_searchable_attributes() { "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_searchable_attributes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes" + "link": "https://docs.meilisearch.com/errors#invalid_settings_searchable_attributes" } "###); } @@ -71,7 +71,7 @@ async fn settings_bad_filterable_attributes() { "message": "Invalid value type at `.filterableAttributes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_filterable_attributes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes" + "link": "https://docs.meilisearch.com/errors#invalid_settings_filterable_attributes" } "###); @@ -82,7 +82,7 @@ async fn settings_bad_filterable_attributes() { "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_filterable_attributes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes" + "link": "https://docs.meilisearch.com/errors#invalid_settings_filterable_attributes" } "###); } @@ -99,7 +99,7 @@ async fn settings_bad_sortable_attributes() { "message": "Invalid value type at `.sortableAttributes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_sortable_attributes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes" + "link": "https://docs.meilisearch.com/errors#invalid_settings_sortable_attributes" } "###); @@ -110,7 +110,7 @@ async fn settings_bad_sortable_attributes() { "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_sortable_attributes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes" + "link": "https://docs.meilisearch.com/errors#invalid_settings_sortable_attributes" } "###); } @@ -127,7 +127,7 @@ async fn settings_bad_ranking_rules() { "message": "Invalid value type at `.rankingRules`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_ranking_rules", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" + "link": "https://docs.meilisearch.com/errors#invalid_settings_ranking_rules" } "###); @@ -138,7 +138,7 @@ async fn settings_bad_ranking_rules() { "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_ranking_rules", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" + "link": "https://docs.meilisearch.com/errors#invalid_settings_ranking_rules" } "###); } @@ -155,7 +155,7 @@ async fn settings_bad_stop_words() { "message": "Invalid value type at `.stopWords`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_stop_words", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words" + "link": "https://docs.meilisearch.com/errors#invalid_settings_stop_words" } "###); @@ -166,7 +166,7 @@ async fn settings_bad_stop_words() { "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "invalid_settings_stop_words", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words" + "link": "https://docs.meilisearch.com/errors#invalid_settings_stop_words" } "###); } @@ -183,7 +183,7 @@ async fn settings_bad_synonyms() { "message": "Invalid value type at `.synonyms`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_synonyms", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms" + "link": "https://docs.meilisearch.com/errors#invalid_settings_synonyms" } "###); @@ -194,7 +194,7 @@ async fn settings_bad_synonyms() { "message": "Invalid value type: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_synonyms", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms" + "link": "https://docs.meilisearch.com/errors#invalid_settings_synonyms" } "###); } @@ -211,7 +211,7 @@ async fn settings_bad_distinct_attribute() { "message": "Invalid value type at `.distinctAttribute`: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_settings_distinct_attribute", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute" + "link": "https://docs.meilisearch.com/errors#invalid_settings_distinct_attribute" } "###); @@ -222,7 +222,7 @@ async fn settings_bad_distinct_attribute() { "message": "Invalid value type: expected a string, but found an array: `[\"doggo\"]`", "code": "invalid_settings_distinct_attribute", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute" + "link": "https://docs.meilisearch.com/errors#invalid_settings_distinct_attribute" } "###); } @@ -239,7 +239,7 @@ async fn settings_bad_typo_tolerance() { "message": "Invalid value type at `.typoTolerance`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_typo_tolerance", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" + "link": "https://docs.meilisearch.com/errors#invalid_settings_typo_tolerance" } "###); @@ -251,7 +251,7 @@ async fn settings_bad_typo_tolerance() { "message": "Invalid value type at `.typoTolerance.minWordSizeForTypos`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_typo_tolerance", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" + "link": "https://docs.meilisearch.com/errors#invalid_settings_typo_tolerance" } "###); @@ -262,7 +262,7 @@ async fn settings_bad_typo_tolerance() { "message": "Invalid value type: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_typo_tolerance", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" + "link": "https://docs.meilisearch.com/errors#invalid_settings_typo_tolerance" } "###); @@ -277,7 +277,7 @@ async fn settings_bad_typo_tolerance() { "message": "Unknown field `typoTolerance`: expected one of `enabled`, `minWordSizeForTypos`, `disableOnWords`, `disableOnAttributes`", "code": "invalid_settings_typo_tolerance", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance" + "link": "https://docs.meilisearch.com/errors#invalid_settings_typo_tolerance" } "###); } @@ -294,7 +294,7 @@ async fn settings_bad_faceting() { "message": "Invalid value type at `.faceting`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_faceting", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-faceting" + "link": "https://docs.meilisearch.com/errors#invalid_settings_faceting" } "###); @@ -305,7 +305,7 @@ async fn settings_bad_faceting() { "message": "Invalid value type: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_faceting", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-faceting" + "link": "https://docs.meilisearch.com/errors#invalid_settings_faceting" } "###); } @@ -322,7 +322,7 @@ async fn settings_bad_pagination() { "message": "Invalid value type at `.pagination`: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_pagination", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-pagination" + "link": "https://docs.meilisearch.com/errors#invalid_settings_pagination" } "###); @@ -333,7 +333,7 @@ async fn settings_bad_pagination() { "message": "Invalid value type: expected an object, but found a string: `\"doggo\"`", "code": "invalid_settings_pagination", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-pagination" + "link": "https://docs.meilisearch.com/errors#invalid_settings_pagination" } "###); } diff --git a/meilisearch/tests/settings/get_settings.rs b/meilisearch/tests/settings/get_settings.rs index f18787e19..88e395b57 100644 --- a/meilisearch/tests/settings/get_settings.rs +++ b/meilisearch/tests/settings/get_settings.rs @@ -185,7 +185,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() { "message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" } "###); } @@ -285,7 +285,7 @@ async fn error_set_invalid_ranking_rules() { "message": "Invalid value at `.rankingRules[0]`: `manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.", "code": "invalid_settings_ranking_rules", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" + "link": "https://docs.meilisearch.com/errors#invalid_settings_ranking_rules" } "###); } diff --git a/meilisearch/tests/swap_indexes/errors.rs b/meilisearch/tests/swap_indexes/errors.rs index 848f347f8..03625fd08 100644 --- a/meilisearch/tests/swap_indexes/errors.rs +++ b/meilisearch/tests/swap_indexes/errors.rs @@ -14,7 +14,7 @@ async fn swap_indexes_bad_format() { "message": "Invalid value type: expected an array, but found a string: `\"doggo\"`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); @@ -25,7 +25,7 @@ async fn swap_indexes_bad_format() { "message": "Invalid value type at `[0]`: expected an object, but found a string: `\"doggo\"`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); } @@ -41,7 +41,7 @@ async fn swap_indexes_bad_indexes() { "message": "Invalid value type at `[0].indexes`: expected an array, but found a string: `\"doggo\"`", "code": "invalid_swap_indexes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" + "link": "https://docs.meilisearch.com/errors#invalid_swap_indexes" } "###); @@ -52,7 +52,7 @@ async fn swap_indexes_bad_indexes() { "message": "Two indexes must be given for each swap. The list `[\"doggo\"]` contains 1 indexes.", "code": "invalid_swap_indexes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" + "link": "https://docs.meilisearch.com/errors#invalid_swap_indexes" } "###); @@ -64,7 +64,7 @@ async fn swap_indexes_bad_indexes() { "message": "Two indexes must be given for each swap. The list `[\"doggo\", \"crabo\", \"croco\"]` contains 3 indexes.", "code": "invalid_swap_indexes", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-swap-indexes" + "link": "https://docs.meilisearch.com/errors#invalid_swap_indexes" } "###); @@ -75,7 +75,7 @@ async fn swap_indexes_bad_indexes() { "message": "Indexes must be declared only once during a swap. `doggo` was specified several times.", "code": "invalid_swap_duplicate_index_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-swap-duplicate-index-found" + "link": "https://docs.meilisearch.com/errors#invalid_swap_duplicate_index_found" } "###); @@ -88,7 +88,7 @@ async fn swap_indexes_bad_indexes() { "message": "Indexes must be declared only once during a swap. `doggo` was specified several times.", "code": "invalid_swap_duplicate_index_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-swap-duplicate-index-found" + "link": "https://docs.meilisearch.com/errors#invalid_swap_duplicate_index_found" } "###); } diff --git a/meilisearch/tests/tasks/errors.rs b/meilisearch/tests/tasks/errors.rs index a15c0eca0..830c4c8e7 100644 --- a/meilisearch/tests/tasks/errors.rs +++ b/meilisearch/tests/tasks/errors.rs @@ -13,7 +13,7 @@ async fn task_bad_uids() { "message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); @@ -24,7 +24,7 @@ async fn task_bad_uids() { "message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); @@ -35,7 +35,7 @@ async fn task_bad_uids() { "message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); @@ -46,7 +46,7 @@ async fn task_bad_uids() { "message": "Invalid value in parameter `uids[1]`: could not parse `dogo` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); } @@ -62,7 +62,7 @@ async fn task_bad_canceled_by() { "message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer", "code": "invalid_task_canceled_by", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by" + "link": "https://docs.meilisearch.com/errors#invalid_task_canceled_by" } "###); @@ -73,7 +73,7 @@ async fn task_bad_canceled_by() { "message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer", "code": "invalid_task_canceled_by", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by" + "link": "https://docs.meilisearch.com/errors#invalid_task_canceled_by" } "###); @@ -84,7 +84,7 @@ async fn task_bad_canceled_by() { "message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer", "code": "invalid_task_canceled_by", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by" + "link": "https://docs.meilisearch.com/errors#invalid_task_canceled_by" } "###); } @@ -100,7 +100,7 @@ async fn task_bad_types() { "message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", "code": "invalid_task_types", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-types" + "link": "https://docs.meilisearch.com/errors#invalid_task_types" } "###); @@ -111,7 +111,7 @@ async fn task_bad_types() { "message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", "code": "invalid_task_types", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-types" + "link": "https://docs.meilisearch.com/errors#invalid_task_types" } "###); @@ -122,7 +122,7 @@ async fn task_bad_types() { "message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.", "code": "invalid_task_types", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-types" + "link": "https://docs.meilisearch.com/errors#invalid_task_types" } "###); } @@ -138,7 +138,7 @@ async fn task_bad_statuses() { "message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", "code": "invalid_task_statuses", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-statuses" + "link": "https://docs.meilisearch.com/errors#invalid_task_statuses" } "###); @@ -149,7 +149,7 @@ async fn task_bad_statuses() { "message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", "code": "invalid_task_statuses", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-statuses" + "link": "https://docs.meilisearch.com/errors#invalid_task_statuses" } "###); @@ -160,7 +160,7 @@ async fn task_bad_statuses() { "message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.", "code": "invalid_task_statuses", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-statuses" + "link": "https://docs.meilisearch.com/errors#invalid_task_statuses" } "###); } @@ -176,7 +176,7 @@ async fn task_bad_index_uids() { "message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" } "###); @@ -187,7 +187,7 @@ async fn task_bad_index_uids() { "message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" } "###); @@ -198,7 +198,7 @@ async fn task_bad_index_uids() { "message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" } "###); } @@ -214,7 +214,7 @@ async fn task_bad_limit() { "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", "code": "invalid_task_limit", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-limit" + "link": "https://docs.meilisearch.com/errors#invalid_task_limit" } "###); @@ -225,7 +225,7 @@ async fn task_bad_limit() { "message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); @@ -236,7 +236,7 @@ async fn task_bad_limit() { "message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); } @@ -252,7 +252,7 @@ async fn task_bad_from() { "message": "Invalid value in parameter `from`: could not parse `doggo` as a positive integer", "code": "invalid_task_from", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-from" + "link": "https://docs.meilisearch.com/errors#invalid_task_from" } "###); @@ -263,7 +263,7 @@ async fn task_bad_from() { "message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); @@ -274,7 +274,7 @@ async fn task_bad_from() { "message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); } @@ -290,7 +290,7 @@ async fn task_bad_after_enqueued_at() { "message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_enqueued_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at" } "###); @@ -301,7 +301,7 @@ async fn task_bad_after_enqueued_at() { "message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_enqueued_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at" } "###); @@ -312,7 +312,7 @@ async fn task_bad_after_enqueued_at() { "message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_enqueued_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at" } "###); } @@ -328,7 +328,7 @@ async fn task_bad_before_enqueued_at() { "message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_enqueued_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at" } "###); @@ -339,7 +339,7 @@ async fn task_bad_before_enqueued_at() { "message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_enqueued_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at" } "###); @@ -350,7 +350,7 @@ async fn task_bad_before_enqueued_at() { "message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_enqueued_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at" } "###); } @@ -366,7 +366,7 @@ async fn task_bad_after_started_at() { "message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at" } "###); @@ -377,7 +377,7 @@ async fn task_bad_after_started_at() { "message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at" } "###); @@ -388,7 +388,7 @@ async fn task_bad_after_started_at() { "message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at" } "###); } @@ -404,7 +404,7 @@ async fn task_bad_before_started_at() { "message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at" } "###); @@ -415,7 +415,7 @@ async fn task_bad_before_started_at() { "message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at" } "###); @@ -426,7 +426,7 @@ async fn task_bad_before_started_at() { "message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at" } "###); } @@ -442,7 +442,7 @@ async fn task_bad_after_finished_at() { "message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_finished_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at" } "###); @@ -453,7 +453,7 @@ async fn task_bad_after_finished_at() { "message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_finished_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at" } "###); @@ -464,7 +464,7 @@ async fn task_bad_after_finished_at() { "message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_after_finished_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at" } "###); } @@ -480,7 +480,7 @@ async fn task_bad_before_finished_at() { "message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_finished_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at" } "###); @@ -491,7 +491,7 @@ async fn task_bad_before_finished_at() { "message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_finished_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at" } "###); @@ -502,7 +502,7 @@ async fn task_bad_before_finished_at() { "message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_finished_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at" } "###); } diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 361ac1083..61fe19614 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -202,7 +202,7 @@ async fn get_task_filter_error() { "message": "Unknown parameter `lol`: expected one of `limit`, `from`, `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); @@ -213,7 +213,7 @@ async fn get_task_filter_error() { "message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); @@ -224,7 +224,7 @@ async fn get_task_filter_error() { "message": "Invalid value in parameter `from`: could not parse `pied` as a positive integer", "code": "invalid_task_from", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-from" + "link": "https://docs.meilisearch.com/errors#invalid_task_from" } "###); @@ -235,7 +235,7 @@ async fn get_task_filter_error() { "message": "Invalid value in parameter `beforeStartedAt`: `pied` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", "code": "invalid_task_before_started_at", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at" + "link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at" } "###); } @@ -251,7 +251,7 @@ async fn delete_task_filter_error() { "message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", "code": "missing_task_filters", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#missing-task-filters" + "link": "https://docs.meilisearch.com/errors#missing_task_filters" } "###); @@ -262,7 +262,7 @@ async fn delete_task_filter_error() { "message": "Unknown parameter `lol`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); @@ -273,7 +273,7 @@ async fn delete_task_filter_error() { "message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); } @@ -289,7 +289,7 @@ async fn cancel_task_filter_error() { "message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", "code": "missing_task_filters", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#missing-task-filters" + "link": "https://docs.meilisearch.com/errors#missing_task_filters" } "###); @@ -300,7 +300,7 @@ async fn cancel_task_filter_error() { "message": "Unknown parameter `lol`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", "code": "bad_request", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#bad-request" + "link": "https://docs.meilisearch.com/errors#bad_request" } "###); @@ -311,7 +311,7 @@ async fn cancel_task_filter_error() { "message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer", "code": "invalid_task_uids", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-task-uids" + "link": "https://docs.meilisearch.com/errors#invalid_task_uids" } "###); } @@ -436,7 +436,7 @@ async fn test_summarized_delete_batch() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }, "duration": "[duration]", "enqueuedAt": "[date]", @@ -495,7 +495,7 @@ async fn test_summarized_delete_document() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }, "duration": "[duration]", "enqueuedAt": "[date]", @@ -542,7 +542,7 @@ async fn test_summarized_settings_update() { "message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.", "code": "invalid_settings_ranking_rules", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules" + "link": "https://docs.meilisearch.com/errors#invalid_settings_ranking_rules" } "###); @@ -626,7 +626,7 @@ async fn test_summarized_index_creation() { "message": "Index `test` already exists.", "code": "index_already_exists", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-already-exists" + "link": "https://docs.meilisearch.com/errors#index_already_exists" }, "duration": "[duration]", "enqueuedAt": "[date]", @@ -659,7 +659,7 @@ async fn test_summarized_index_deletion() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }, "duration": "[duration]", "enqueuedAt": "[date]", @@ -742,7 +742,7 @@ async fn test_summarized_index_update() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }, "duration": "[duration]", "enqueuedAt": "[date]", @@ -770,7 +770,7 @@ async fn test_summarized_index_update() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }, "duration": "[duration]", "enqueuedAt": "[date]", @@ -862,7 +862,7 @@ async fn test_summarized_index_swap() { "message": "Indexes `cattos`, `doggos` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }, "duration": "[duration]", "enqueuedAt": "[date]", From d2420f5c8f21278b27fc480928f65bf862b3f9e0 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 19 Jan 2023 16:10:05 +0100 Subject: [PATCH 033/186] Fix non insta tests --- meilisearch/tests/auth/authorization.rs | 4 +- meilisearch/tests/auth/payload.rs | 20 ++++---- meilisearch/tests/auth/tenant_token.rs | 2 +- meilisearch/tests/content_type.rs | 4 +- meilisearch/tests/documents/add_documents.rs | 46 +++++++++---------- .../tests/documents/delete_documents.rs | 2 +- meilisearch/tests/documents/get_documents.rs | 4 +- .../tests/documents/update_documents.rs | 6 +-- meilisearch/tests/index/create_index.rs | 2 +- meilisearch/tests/index/delete_index.rs | 2 +- meilisearch/tests/index/get_index.rs | 2 +- meilisearch/tests/index/stats.rs | 2 +- meilisearch/tests/index/update_index.rs | 4 +- meilisearch/tests/search/errors.rs | 30 ++++++------ meilisearch/tests/tasks/mod.rs | 44 +++++++++--------- 15 files changed, 87 insertions(+), 87 deletions(-) diff --git a/meilisearch/tests/auth/authorization.rs b/meilisearch/tests/auth/authorization.rs index 8ef2d108d..fae6ee7e1 100644 --- a/meilisearch/tests/auth/authorization.rs +++ b/meilisearch/tests/auth/authorization.rs @@ -73,7 +73,7 @@ static INVALID_RESPONSE: Lazy = Lazy::new(|| { json!({"message": "The provided API key is invalid.", "code": "invalid_api_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#invalid-api-key" + "link": "https://docs.meilisearch.com/errors#invalid_api_key" }) }); @@ -520,7 +520,7 @@ async fn error_creating_index_without_action() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }); // try to create a index via add documents route diff --git a/meilisearch/tests/auth/payload.rs b/meilisearch/tests/auth/payload.rs index 8164acf48..78eec3eb2 100644 --- a/meilisearch/tests/auth/payload.rs +++ b/meilisearch/tests/auth/payload.rs @@ -37,7 +37,7 @@ async fn error_api_key_bad_content_types() { ); assert_eq!(response["code"], "invalid_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type"); // patch let req = test::TestRequest::patch() @@ -59,7 +59,7 @@ async fn error_api_key_bad_content_types() { ); assert_eq!(response["code"], "invalid_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type"); } #[actix_rt::test] @@ -96,7 +96,7 @@ async fn error_api_key_empty_content_types() { ); assert_eq!(response["code"], "invalid_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type"); // patch let req = test::TestRequest::patch() @@ -118,7 +118,7 @@ async fn error_api_key_empty_content_types() { ); assert_eq!(response["code"], "invalid_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type"); } #[actix_rt::test] @@ -154,7 +154,7 @@ async fn error_api_key_missing_content_types() { ); assert_eq!(response["code"], "missing_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type"); // patch let req = test::TestRequest::patch() @@ -175,7 +175,7 @@ async fn error_api_key_missing_content_types() { ); assert_eq!(response["code"], "missing_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type"); } #[actix_rt::test] @@ -200,7 +200,7 @@ async fn error_api_key_empty_payload() { assert_eq!(status_code, 400); assert_eq!(response["code"], json!("missing_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); assert_eq!(response["message"], json!(r#"A json payload is missing."#)); // patch @@ -217,7 +217,7 @@ async fn error_api_key_empty_payload() { assert_eq!(status_code, 400); assert_eq!(response["code"], json!("missing_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); assert_eq!(response["message"], json!(r#"A json payload is missing."#)); } @@ -243,7 +243,7 @@ async fn error_api_key_malformed_payload() { assert_eq!(status_code, 400); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); assert_eq!( response["message"], json!( @@ -265,7 +265,7 @@ async fn error_api_key_malformed_payload() { assert_eq!(status_code, 400); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); assert_eq!( response["message"], json!( diff --git a/meilisearch/tests/auth/tenant_token.rs b/meilisearch/tests/auth/tenant_token.rs index af3e7c2a5..fbf9d2b49 100644 --- a/meilisearch/tests/auth/tenant_token.rs +++ b/meilisearch/tests/auth/tenant_token.rs @@ -56,7 +56,7 @@ static INVALID_RESPONSE: Lazy = Lazy::new(|| { json!({"message": "The provided API key is invalid.", "code": "invalid_api_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#invalid-api-key" + "link": "https://docs.meilisearch.com/errors#invalid_api_key" }) }); diff --git a/meilisearch/tests/content_type.rs b/meilisearch/tests/content_type.rs index 5759d4d9e..e16a83c06 100644 --- a/meilisearch/tests/content_type.rs +++ b/meilisearch/tests/content_type.rs @@ -88,7 +88,7 @@ async fn error_json_bad_content_type() { "message": r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`"#, "code": "missing_content_type", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#missing-content-type", + "link": "https://docs.meilisearch.com/errors#missing_content_type", }), "when calling the route `{}` with no content-type", route, @@ -117,7 +117,7 @@ async fn error_json_bad_content_type() { "message": expected_error_message, "code": "invalid_content_type", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-content-type", + "link": "https://docs.meilisearch.com/errors#invalid_content_type", }), "when calling the route `{}` with a content-type of `{}`", route, diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 64220e033..64bdcac78 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -193,7 +193,7 @@ async fn error_add_documents_test_bad_content_types() { ); assert_eq!(response["code"], "invalid_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type"); // put let req = test::TestRequest::put() @@ -214,7 +214,7 @@ async fn error_add_documents_test_bad_content_types() { ); assert_eq!(response["code"], "invalid_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type"); } /// missing content-type must be refused @@ -248,7 +248,7 @@ async fn error_add_documents_test_no_content_type() { ); assert_eq!(response["code"], "missing_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type"); // put let req = test::TestRequest::put() @@ -268,7 +268,7 @@ async fn error_add_documents_test_no_content_type() { ); assert_eq!(response["code"], "missing_content_type"); assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing-content-type"); + assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type"); } #[actix_rt::test] @@ -297,7 +297,7 @@ async fn error_add_malformed_csv_documents() { ); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); // put let req = test::TestRequest::put() @@ -318,7 +318,7 @@ async fn error_add_malformed_csv_documents() { ); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); } #[actix_rt::test] @@ -347,7 +347,7 @@ async fn error_add_malformed_json_documents() { ); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); // put let req = test::TestRequest::put() @@ -368,7 +368,7 @@ async fn error_add_malformed_json_documents() { ); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); // truncate @@ -393,7 +393,7 @@ async fn error_add_malformed_json_documents() { ); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); // add one more char to the long string to test if the truncating works. let document = format!("\"{}m\"", long); @@ -412,7 +412,7 @@ async fn error_add_malformed_json_documents() { ); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); } #[actix_rt::test] @@ -441,7 +441,7 @@ async fn error_add_malformed_ndjson_documents() { ); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); // put let req = test::TestRequest::put() @@ -460,7 +460,7 @@ async fn error_add_malformed_ndjson_documents() { ); assert_eq!(response["code"], json!("malformed_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); } #[actix_rt::test] @@ -484,7 +484,7 @@ async fn error_add_missing_payload_csv_documents() { assert_eq!(response["message"], json!(r#"A csv payload is missing."#)); assert_eq!(response["code"], json!("missing_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); // put let req = test::TestRequest::put() @@ -500,7 +500,7 @@ async fn error_add_missing_payload_csv_documents() { assert_eq!(response["message"], json!(r#"A csv payload is missing."#)); assert_eq!(response["code"], json!("missing_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); } #[actix_rt::test] @@ -524,7 +524,7 @@ async fn error_add_missing_payload_json_documents() { assert_eq!(response["message"], json!(r#"A json payload is missing."#)); assert_eq!(response["code"], json!("missing_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); // put let req = test::TestRequest::put() @@ -540,7 +540,7 @@ async fn error_add_missing_payload_json_documents() { assert_eq!(response["message"], json!(r#"A json payload is missing."#)); assert_eq!(response["code"], json!("missing_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); } #[actix_rt::test] @@ -564,7 +564,7 @@ async fn error_add_missing_payload_ndjson_documents() { assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#)); assert_eq!(response["code"], json!("missing_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); // put let req = test::TestRequest::put() @@ -580,7 +580,7 @@ async fn error_add_missing_payload_ndjson_documents() { assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#)); assert_eq!(response["code"], json!("missing_payload")); assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload")); + assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); } #[actix_rt::test] @@ -639,7 +639,7 @@ async fn error_document_add_create_index_bad_uid() { "message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" }); assert_eq!(code, 400); @@ -781,7 +781,7 @@ async fn error_add_documents_bad_document_id() { assert_eq!(response["error"]["type"], json!("invalid_request")); assert_eq!( response["error"]["link"], - json!("https://docs.meilisearch.com/errors#invalid-document-id") + json!("https://docs.meilisearch.com/errors#invalid_document_id") ); } @@ -809,7 +809,7 @@ async fn error_add_documents_missing_document_id() { assert_eq!(response["error"]["type"], json!("invalid_request")); assert_eq!( response["error"]["link"], - json!("https://docs.meilisearch.com/errors#missing-document-id") + json!("https://docs.meilisearch.com/errors#missing_document_id") ); } @@ -843,7 +843,7 @@ async fn error_document_field_limit_reached() { "message": "A document cannot contain more than 65,535 fields.", "code": "document_fields_limit_reached", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#document-fields-limit-reached" + "link": "https://docs.meilisearch.com/errors#document_fields_limit_reached" }); assert_eq!(response["error"], expected_error); @@ -889,7 +889,7 @@ async fn error_add_documents_payload_size() { "message": "The provided payload reached the size limit.", "code": "payload_too_large", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#payload-too-large" + "link": "https://docs.meilisearch.com/errors#payload_too_large" }); assert_eq!(response, expected_response); diff --git a/meilisearch/tests/documents/delete_documents.rs b/meilisearch/tests/documents/delete_documents.rs index 0abc24eca..e36e2f033 100644 --- a/meilisearch/tests/documents/delete_documents.rs +++ b/meilisearch/tests/documents/delete_documents.rs @@ -95,7 +95,7 @@ async fn error_delete_batch_unexisting_index() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }); assert_eq!(code, 202); diff --git a/meilisearch/tests/documents/get_documents.rs b/meilisearch/tests/documents/get_documents.rs index 69c60dbb4..9bc54973e 100644 --- a/meilisearch/tests/documents/get_documents.rs +++ b/meilisearch/tests/documents/get_documents.rs @@ -27,7 +27,7 @@ async fn error_get_unexisting_document() { "message": "Document `1` not found.", "code": "document_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#document-not-found" + "link": "https://docs.meilisearch.com/errors#document_not_found" }); assert_eq!(response, expected_response); @@ -90,7 +90,7 @@ async fn error_get_unexisting_index_all_documents() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }); assert_eq!(response, expected_response); diff --git a/meilisearch/tests/documents/update_documents.rs b/meilisearch/tests/documents/update_documents.rs index 3f31eb2e7..688605861 100644 --- a/meilisearch/tests/documents/update_documents.rs +++ b/meilisearch/tests/documents/update_documents.rs @@ -13,7 +13,7 @@ async fn error_document_update_create_index_bad_uid() { "message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-index-uid" + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" }); assert_eq!(code, 400); @@ -167,7 +167,7 @@ async fn error_update_documents_bad_document_id() { assert_eq!(response["error"]["type"], json!("invalid_request")); assert_eq!( response["error"]["link"], - json!("https://docs.meilisearch.com/errors#invalid-document-id") + json!("https://docs.meilisearch.com/errors#invalid_document_id") ); } @@ -193,6 +193,6 @@ async fn error_update_documents_missing_document_id() { assert_eq!(response["error"]["type"], "invalid_request"); assert_eq!( response["error"]["link"], - "https://docs.meilisearch.com/errors#missing-document-id" + "https://docs.meilisearch.com/errors#missing_document_id" ); } diff --git a/meilisearch/tests/index/create_index.rs b/meilisearch/tests/index/create_index.rs index b4512c60d..48a07b67f 100644 --- a/meilisearch/tests/index/create_index.rs +++ b/meilisearch/tests/index/create_index.rs @@ -177,7 +177,7 @@ async fn error_create_existing_index() { "message": "Index `test` already exists.", "code": "index_already_exists", "type": "invalid_request", - "link":"https://docs.meilisearch.com/errors#index-already-exists" + "link":"https://docs.meilisearch.com/errors#index_already_exists" }); assert_eq!(response["error"], expected_response); diff --git a/meilisearch/tests/index/delete_index.rs b/meilisearch/tests/index/delete_index.rs index 2c5fb51b2..b6efc7a68 100644 --- a/meilisearch/tests/index/delete_index.rs +++ b/meilisearch/tests/index/delete_index.rs @@ -35,7 +35,7 @@ async fn error_delete_unexisting_index() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }); let response = index.wait_task(0).await; diff --git a/meilisearch/tests/index/get_index.rs b/meilisearch/tests/index/get_index.rs index d73360cd2..5a184c8ce 100644 --- a/meilisearch/tests/index/get_index.rs +++ b/meilisearch/tests/index/get_index.rs @@ -35,7 +35,7 @@ async fn error_get_unexisting_index() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }); assert_eq!(response, expected_response); diff --git a/meilisearch/tests/index/stats.rs b/meilisearch/tests/index/stats.rs index a5828e32b..813f05b4a 100644 --- a/meilisearch/tests/index/stats.rs +++ b/meilisearch/tests/index/stats.rs @@ -55,7 +55,7 @@ async fn error_get_stats_unexisting_index() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }); assert_eq!(response, expected_response); diff --git a/meilisearch/tests/index/update_index.rs b/meilisearch/tests/index/update_index.rs index cad55babe..3c283407c 100644 --- a/meilisearch/tests/index/update_index.rs +++ b/meilisearch/tests/index/update_index.rs @@ -98,7 +98,7 @@ async fn error_update_existing_primary_key() { "message": "Index already has a primary key: `id`.", "code": "index_primary_key_already_exists", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-primary-key-already-exists" + "link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }); assert_eq!(response["error"], expected_response); @@ -117,7 +117,7 @@ async fn error_update_unexisting_index() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }); assert_eq!(response["error"], expected_response); diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index e8cf3f91c..42f248452 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -13,7 +13,7 @@ async fn search_unexisting_index() { "message": "Index `test` not found.", "code": "index_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#index-not-found" + "link": "https://docs.meilisearch.com/errors#index_not_found" }); index @@ -449,7 +449,7 @@ async fn filter_invalid_syntax_object() { "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": "title & Glass"}), |response, code| { @@ -474,7 +474,7 @@ async fn filter_invalid_syntax_array() { "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": ["title & Glass"]}), |response, code| { @@ -499,7 +499,7 @@ async fn filter_invalid_syntax_string() { "message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": "title = Glass XOR title = Glass"}), |response, code| { @@ -524,7 +524,7 @@ async fn filter_invalid_attribute_array() { "message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": ["many = Glass"]}), |response, code| { @@ -549,7 +549,7 @@ async fn filter_invalid_attribute_string() { "message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": "many = Glass"}), |response, code| { @@ -574,7 +574,7 @@ async fn filter_reserved_geo_attribute_array() { "message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": ["_geo = Glass"]}), |response, code| { @@ -599,7 +599,7 @@ async fn filter_reserved_geo_attribute_string() { "message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": "_geo = Glass"}), |response, code| { @@ -624,7 +624,7 @@ async fn filter_reserved_attribute_array() { "message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": ["_geoDistance = Glass"]}), |response, code| { @@ -649,7 +649,7 @@ async fn filter_reserved_attribute_string() { "message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass", "code": "invalid_search_filter", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-filter" + "link": "https://docs.meilisearch.com/errors#invalid_search_filter" }); index .search(json!({"filter": "_geoDistance = Glass"}), |response, code| { @@ -674,7 +674,7 @@ async fn sort_geo_reserved_attribute() { "message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.", "code": "invalid_search_sort", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-sort" + "link": "https://docs.meilisearch.com/errors#invalid_search_sort" }); index .search( @@ -704,7 +704,7 @@ async fn sort_reserved_attribute() { "message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.", "code": "invalid_search_sort", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-sort" + "link": "https://docs.meilisearch.com/errors#invalid_search_sort" }); index .search( @@ -734,7 +734,7 @@ async fn sort_unsortable_attribute() { "message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.", "code": "invalid_search_sort", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-sort" + "link": "https://docs.meilisearch.com/errors#invalid_search_sort" }); index .search( @@ -764,7 +764,7 @@ async fn sort_invalid_syntax() { "message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.", "code": "invalid_search_sort", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-sort" + "link": "https://docs.meilisearch.com/errors#invalid_search_sort" }); index .search( @@ -798,7 +798,7 @@ async fn sort_unset_ranking_rule() { "message": "The sort ranking rule must be specified in the ranking rules settings to use the sort parameter at search time.", "code": "invalid_search_sort", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-sort" + "link": "https://docs.meilisearch.com/errors#invalid_search_sort" }); index .search( diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 61fe19614..e9b5a2325 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -19,7 +19,7 @@ async fn error_get_unexisting_task_status() { "message": "Task `1` not found.", "code": "task_not_found", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#task-not-found" + "link": "https://docs.meilisearch.com/errors#task_not_found" }); assert_eq!(response, expected_response); @@ -366,7 +366,7 @@ async fn test_summarized_document_addition_or_update() { index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await; index.wait_task(0).await; let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -390,7 +390,7 @@ async fn test_summarized_document_addition_or_update() { index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -419,7 +419,7 @@ async fn test_summarized_delete_batch() { index.delete_batch(vec![1, 2, 3]).await; index.wait_task(0).await; let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -449,7 +449,7 @@ async fn test_summarized_delete_batch() { index.delete_batch(vec![42]).await; index.wait_task(2).await; let (task, _) = index.get_task(2).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -478,7 +478,7 @@ async fn test_summarized_delete_document() { index.delete_document(1).await; index.wait_task(0).await; let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -508,7 +508,7 @@ async fn test_summarized_delete_document() { index.delete_document(42).await; index.wait_task(2).await; let (task, _) = index.get_task(2).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -549,7 +549,7 @@ async fn test_summarized_settings_update() { index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await; index.wait_task(0).await; let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -587,7 +587,7 @@ async fn test_summarized_index_creation() { index.create(None).await; index.wait_task(0).await; let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -610,7 +610,7 @@ async fn test_summarized_index_creation() { index.create(Some("doggos")).await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -643,7 +643,7 @@ async fn test_summarized_index_deletion() { index.delete().await; index.wait_task(0).await; let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -673,7 +673,7 @@ async fn test_summarized_index_deletion() { index.delete().await; index.wait_task(2).await; let (task, _) = index.get_task(2).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -697,7 +697,7 @@ async fn test_summarized_index_deletion() { index.delete().await; index.wait_task(2).await; let (task, _) = index.get_task(2).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -726,7 +726,7 @@ async fn test_summarized_index_update() { index.update(None).await; index.wait_task(0).await; let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -754,7 +754,7 @@ async fn test_summarized_index_update() { index.update(Some("bones")).await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -785,7 +785,7 @@ async fn test_summarized_index_update() { index.update(None).await; index.wait_task(3).await; let (task, _) = index.get_task(3).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -808,7 +808,7 @@ async fn test_summarized_index_update() { index.update(Some("bones")).await; index.wait_task(4).await; let (task, _) = index.get_task(4).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -839,7 +839,7 @@ async fn test_summarized_index_swap() { .await; server.wait_task(0).await; let (task, _) = server.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -880,7 +880,7 @@ async fn test_summarized_index_swap() { .await; server.wait_task(3).await; let (task, _) = server.get_task(3).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -918,7 +918,7 @@ async fn test_summarized_task_cancelation() { server.cancel_tasks("uids=0").await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -951,7 +951,7 @@ async fn test_summarized_task_deletion() { server.delete_tasks("uids=0").await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { @@ -980,7 +980,7 @@ async fn test_summarized_dump_creation() { server.create_dump().await; server.wait_task(0).await; let (task, _) = server.get_task(0).await; - assert_json_snapshot!(task, + assert_json_snapshot!(task, { ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { From 13b1abceaf2da0b5a6507c003f7f26186369ec19 Mon Sep 17 00:00:00 2001 From: curquiza Date: Thu, 19 Jan 2023 16:23:06 +0100 Subject: [PATCH 034/186] Rework technical information in the README --- README.md | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 0219771c4..62d9ef241 100644 --- a/README.md +++ b/README.md @@ -101,6 +101,15 @@ Meilisearch is a search engine created by [Meili](https://www.welcometothejungle Thank you for your support! -## 📦 Internal crates and their versioning +## 👩‍💻 Contributing -The crates in this repository are not currently available on crates.io and do not follow [semver conventions](https://semver.org). However, the Meilisearch search engine is well versioned, and releases follow the semver conventions. +Meilisearch is and always will be open-source! If you want to contribute to the project, please take a look at this [guidelines](CONTRIBUTING.md). + +We are looking forward to review your contribution! ❤️ + +## 📦 Technical information + +More information about technical details related to the Meilisearch project: +- The releases of Meilisearch and their associated binaries are available [in this GitHub section](https://github.com/meilisearch/meilisearch/releases). +- We versionize the Meilisearch binaries following the [SemVer conventions](https://semver.org/), and we also provide our detailed [versioning policy](https://github.com/meilisearch/engine-team/blob/main/resources/versioning-policy.md). +- The crates in this repository are not currently available on [crates.io](https://crates.io/) and do not follow [SemVer conventions](https://semver.org). From e3742a38d4f99c6c855d3be8b1ff1acd7a7b8c6e Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 19 Jan 2023 16:45:10 +0100 Subject: [PATCH 035/186] improve the error messages for the immutable fields --- .../src/deserr/error_messages.rs | 15 +++++++++- meilisearch-types/src/keys.rs | 30 +++++++++---------- meilisearch/src/routes/indexes/mod.rs | 25 ++++++++-------- meilisearch/tests/auth/api_keys.rs | 6 ++-- 4 files changed, 43 insertions(+), 33 deletions(-) diff --git a/meilisearch-types/src/deserr/error_messages.rs b/meilisearch-types/src/deserr/error_messages.rs index b289e454d..9b46201c4 100644 --- a/meilisearch-types/src/deserr/error_messages.rs +++ b/meilisearch-types/src/deserr/error_messages.rs @@ -9,7 +9,7 @@ We try to: use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef}; use super::{DeserrJsonError, DeserrQueryParamError}; -use crate::error::ErrorCode; +use crate::error::{Code, ErrorCode}; /// Return a description of the given location in a Json, preceded by the given article. /// e.g. `at .key1[8].key2`. If the location is the origin, the given article will not be @@ -179,6 +179,19 @@ impl deserr::DeserializeError for DeserrJsonError { } } +pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError { + let msg = format!( + "Immutable field `{field}`: expected one of {}", + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", ") + ); + + DeserrJsonError::new(msg, code) +} + /// Return a description of the given location in query parameters, preceded by the /// given article. e.g. `at key5[2]`. If the location is the origin, the given article /// will not be included in the description. diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index 7f81e39ac..31561c848 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -11,6 +11,7 @@ use time::macros::{format_description, time}; use time::{Date, OffsetDateTime, PrimitiveDateTime}; use uuid::Uuid; +use crate::deserr::error_messages::immutable_field_error; use crate::deserr::DeserrJsonError; use crate::error::deserr_codes::*; use crate::error::{unwrap_any, Code, ParseOffsetDateTimeError}; @@ -57,22 +58,19 @@ fn deny_immutable_fields_api_key( accepted: &[&str], location: ValuePointerRef, ) -> DeserrJsonError { - let mut error = unwrap_any(DeserrJsonError::::error::( - None, - deserr::ErrorKind::UnknownKey { key: field, accepted }, - location, - )); - - error.code = match field { - "uid" => Code::ImmutableApiKeyUid, - "actions" => Code::ImmutableApiKeyActions, - "indexes" => Code::ImmutableApiKeyIndexes, - "expiresAt" => Code::ImmutableApiKeyExpiresAt, - "createdAt" => Code::ImmutableApiKeyCreatedAt, - "updatedAt" => Code::ImmutableApiKeyUpdatedAt, - _ => Code::BadRequest, - }; - error + match field { + "uid" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUid), + "actions" => immutable_field_error(field, accepted, Code::ImmutableApiKeyActions), + "indexes" => immutable_field_error(field, accepted, Code::ImmutableApiKeyIndexes), + "expiresAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyExpiresAt), + "createdAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyCreatedAt), + "updatedAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUpdatedAt), + _ => unwrap_any(DeserrJsonError::::error::( + None, + deserr::ErrorKind::UnknownKey { key: field, accepted }, + location, + )), + } } #[derive(Debug, DeserializeFromValue)] diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index d2a842fe3..2d352bfe5 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -5,6 +5,7 @@ use actix_web::{web, HttpRequest, HttpResponse}; use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef}; use index_scheduler::IndexScheduler; use log::debug; +use meilisearch_types::deserr::error_messages::immutable_field_error; use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::error::deserr_codes::*; @@ -144,20 +145,18 @@ fn deny_immutable_fields_index( accepted: &[&str], location: ValuePointerRef, ) -> DeserrJsonError { - let mut error = unwrap_any(DeserrJsonError::::error::( - None, - deserr::ErrorKind::UnknownKey { key: field, accepted }, - location, - )); - - error.code = match field { - "uid" => Code::ImmutableIndexUid, - "createdAt" => Code::ImmutableIndexCreatedAt, - "updatedAt" => Code::ImmutableIndexUpdatedAt, - _ => Code::BadRequest, - }; - error + match field { + "uid" => immutable_field_error(field, accepted, Code::ImmutableIndexUid), + "createdAt" => immutable_field_error(field, accepted, Code::ImmutableIndexCreatedAt), + "updatedAt" => immutable_field_error(field, accepted, Code::ImmutableIndexUpdatedAt), + _ => unwrap_any(DeserrJsonError::::error::( + None, + deserr::ErrorKind::UnknownKey { key: field, accepted }, + location, + )), + } } + #[derive(DeserializeFromValue, Debug)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)] pub struct UpdateIndexRequest { diff --git a/meilisearch/tests/auth/api_keys.rs b/meilisearch/tests/auth/api_keys.rs index aa829448b..6ac47e5f7 100644 --- a/meilisearch/tests/auth/api_keys.rs +++ b/meilisearch/tests/auth/api_keys.rs @@ -1394,7 +1394,7 @@ async fn error_patch_api_key_indexes() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Unknown field `indexes`: expected one of `description`, `name`", + "message": "Immutable field `indexes`: expected one of `description`, `name`", "code": "immutable_api_key_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable-api-key-indexes" @@ -1471,7 +1471,7 @@ async fn error_patch_api_key_actions() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Unknown field `actions`: expected one of `description`, `name`", + "message": "Immutable field `actions`: expected one of `description`, `name`", "code": "immutable_api_key_actions", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable-api-key-actions" @@ -1540,7 +1540,7 @@ async fn error_patch_api_key_expiration_date() { let (response, code) = server.patch_api_key(&uid, content).await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "Unknown field `expiresAt`: expected one of `description`, `name`", + "message": "Immutable field `expiresAt`: expected one of `description`, `name`", "code": "immutable_api_key_expires_at", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#immutable-api-key-expires-at" From d1a31afdd654eb13c26c025840cd110b2575ccb3 Mon Sep 17 00:00:00 2001 From: curquiza Date: Thu, 19 Jan 2023 17:17:34 +0100 Subject: [PATCH 036/186] Modify README to prevent contributions --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index 948752ee9..ea80410a5 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,14 @@

a concurrent indexer combined with fast and relevant search algorithms

+--- + +DO NOT CONTRIBUTE TO THIS REPOSITORY ANYMORE, IT WILL BE ARCHIVED SOON. ONLY MEILI TEAM IS ALLOWED TO CONTRIBUTE. + +The content of this repository is now available in the [Meilisearch repository](https://github.com/meilisearch/meilisearch) in the workspace `milli`. + +--- + ## Introduction This repository contains the core engine used in [Meilisearch]. From 30fc37671329ae479db035fc64004ebfa1fc911b Mon Sep 17 00:00:00 2001 From: Many the fish Date: Thu, 19 Jan 2023 17:37:30 +0100 Subject: [PATCH 037/186] Update deserr v0.3.0 --- milli/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/milli/Cargo.toml b/milli/Cargo.toml index dd0287331..3b709d638 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -12,7 +12,7 @@ byteorder = "1.4.3" charabia = { version = "0.7.0", default-features = false } concat-arrays = "0.1.2" crossbeam-channel = "0.5.6" -deserr = "0.1.5" +deserr = "0.3.0" either = "1.8.0" flatten-serde-json = { path = "../flatten-serde-json" } fst = "0.4.7" From abd65d93079ab6abb6a5824c9214597fdcacc62c Mon Sep 17 00:00:00 2001 From: curquiza Date: Thu, 19 Jan 2023 16:43:45 +0000 Subject: [PATCH 038/186] Update version for the next release (v0.40.0) in Cargo.toml files --- benchmarks/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- filter-parser/Cargo.toml | 2 +- flatten-serde-json/Cargo.toml | 2 +- json-depth-checker/Cargo.toml | 2 +- milli/Cargo.toml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index 5fec31ee0..73ca8ec33 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "benchmarks" -version = "0.39.2" +version = "0.40.0" edition = "2018" publish = false diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 3a40384bc..5acbbc632 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cli" -version = "0.39.2" +version = "0.40.0" edition = "2018" description = "A CLI to interact with a milli index" publish = false diff --git a/filter-parser/Cargo.toml b/filter-parser/Cargo.toml index 73754df26..d7e96cebf 100644 --- a/filter-parser/Cargo.toml +++ b/filter-parser/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "filter-parser" -version = "0.39.2" +version = "0.40.0" edition = "2021" description = "The parser for the Meilisearch filter syntax" publish = false diff --git a/flatten-serde-json/Cargo.toml b/flatten-serde-json/Cargo.toml index e84a21798..802bf5f7c 100644 --- a/flatten-serde-json/Cargo.toml +++ b/flatten-serde-json/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flatten-serde-json" -version = "0.39.2" +version = "0.40.0" edition = "2021" description = "Flatten serde-json objects like elastic search" readme = "README.md" diff --git a/json-depth-checker/Cargo.toml b/json-depth-checker/Cargo.toml index 95ee2aa88..85e52c4fd 100644 --- a/json-depth-checker/Cargo.toml +++ b/json-depth-checker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "json-depth-checker" -version = "0.39.2" +version = "0.40.0" edition = "2021" description = "A library that indicates if a JSON must be flattened" publish = false diff --git a/milli/Cargo.toml b/milli/Cargo.toml index dd0287331..44fb045ef 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "milli" -version = "0.39.2" +version = "0.40.0" authors = ["Kerollmops "] edition = "2018" From 0de9a3ffe71a03e3b5753483defa31565a414565 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 12 Jan 2023 17:50:39 +0100 Subject: [PATCH 039/186] Implements errors and warnings from the specification Now in technicolor --- Cargo.lock | 2 + meilisearch/Cargo.toml | 2 + meilisearch/src/main.rs | 95 ++++++++++++++++++++++++++++++++--------- 3 files changed, 79 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5c41a3406..04749dfd2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2285,6 +2285,7 @@ dependencies = [ "assert-json-diff", "async-stream", "async-trait", + "atty", "brotli", "bstr 1.1.0", "byte-unit", @@ -2344,6 +2345,7 @@ dependencies = [ "tar", "temp-env", "tempfile", + "termcolor", "thiserror", "time", "tokio", diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index 9a0c9bd0b..1161c2dd6 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -73,6 +73,8 @@ walkdir = "2.3.2" yaup = "0.2.0" serde_urlencoded = "0.7.1" actix-utils = "3.0.1" +atty = "0.2.14" +termcolor = "1.1.3" [dev-dependencies] actix-rt = "2.7.0" diff --git a/meilisearch/src/main.rs b/meilisearch/src/main.rs index 050c825a8..a04e40bc7 100644 --- a/meilisearch/src/main.rs +++ b/meilisearch/src/main.rs @@ -1,4 +1,5 @@ use std::env; +use std::io::Write; use std::path::PathBuf; use std::sync::Arc; @@ -9,6 +10,7 @@ use index_scheduler::IndexScheduler; use meilisearch::analytics::Analytics; use meilisearch::{analytics, create_app, setup_meilisearch, Opt}; use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE}; +use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; #[global_allocator] static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; @@ -32,24 +34,19 @@ async fn main() -> anyhow::Result<()> { match (opt.env.as_ref(), &opt.master_key) { ("production", Some(master_key)) if master_key.len() < MASTER_KEY_MIN_SIZE => { anyhow::bail!( - "In production mode, the master key must be of at least {MASTER_KEY_MIN_SIZE} bytes, but the provided key is only {} bytes long + "The master key must be at least {MASTER_KEY_MIN_SIZE} bytes in a production environment. The provided key is only {} bytes. -We generated a secure master key for you (you can safely copy this token): - ->> export MEILI_MASTER_KEY={} <<", +{}", master_key.len(), - generate_master_key(), + generated_master_key_message(), ) } ("production", None) => { anyhow::bail!( - "In production mode, you must provide a master key to secure your instance. It can be specified via the MEILI_MASTER_KEY environment variable or the --master-key launch option. + "You must provide a master key to secure your instance in a production environment. It can be specified via the MEILI_MASTER_KEY environment variable or the --master-key launch option. -We generated a secure master key for you (you can safely copy this token): - ->> export MEILI_MASTER_KEY={} << -", - generate_master_key() +{}", + generated_master_key_message() ) } // No error; continue @@ -147,7 +144,7 @@ pub fn print_launch_resume( " Thank you for using Meilisearch! -We collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html +\nWe collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html Anonymous telemetry:\t\"Enabled\"" ); @@ -170,16 +167,10 @@ Anonymous telemetry:\t\"Enabled\"" eprintln!("A master key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key."); if master_key.len() < MASTER_KEY_MIN_SIZE { - eprintln!(); - log::warn!("The provided master key is too short (< {MASTER_KEY_MIN_SIZE} bytes)"); - eprintln!("A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to the production environment."); + print_master_key_too_short_warning() } } - ("development", None) => { - log::warn!("No master key found; The server will accept unidentified requests"); - eprintln!("If you need some protection in development mode, please export a key:\n\nexport MEILI_MASTER_KEY={}", generate_master_key()); - eprintln!("\nA master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to the production environment."); - } + ("development", None) => print_missing_master_key_warning(), // unreachable because Opt::try_build above would have failed already if any other value had been produced _ => unreachable!(), } @@ -190,3 +181,67 @@ Anonymous telemetry:\t\"Enabled\"" eprintln!("Contact:\t\thttps://docs.meilisearch.com/resources/contact.html"); eprintln!(); } + +const WARNING_BG_COLOR: Option = Some(Color::Ansi256(178)); +const WARNING_FG_COLOR: Option = Some(Color::Ansi256(0)); + +fn print_master_key_too_short_warning() { + let choice = + if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never }; + let mut stderr = StandardStream::stderr(choice); + stderr + .set_color( + ColorSpec::new().set_bg(WARNING_BG_COLOR).set_fg(WARNING_FG_COLOR).set_bold(true), + ) + .unwrap(); + writeln!(stderr, "\n").unwrap(); + writeln!( + stderr, + " Meilisearch started with a master key considered unsafe for use in a production environment. + +A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to a production environment." + ) + .unwrap(); + stderr.reset().unwrap(); + writeln!(stderr).unwrap(); + + eprintln!("\n{}", generated_master_key_message()); + eprintln!( + "\nRestart Meilisearch with the argument above to use this new and secure master key." + ) +} + +fn print_missing_master_key_warning() { + let choice = + if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never }; + let mut stderr = StandardStream::stderr(choice); + stderr + .set_color( + ColorSpec::new().set_bg(WARNING_BG_COLOR).set_fg(WARNING_FG_COLOR).set_bold(true), + ) + .unwrap(); + writeln!(stderr, "\n").unwrap(); + writeln!( + stderr, + " No master key was found. The server will accept unidentified requests. + + A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to a production environment." +) +.unwrap(); + stderr.reset().unwrap(); + writeln!(stderr).unwrap(); + + eprintln!("\n{}", generated_master_key_message()); + eprintln!( + "\nRestart Meilisearch with the argument above to use this new and secure master key." + ) +} + +fn generated_master_key_message() -> String { + format!( + "We generated a new secure master key for you (you can safely use this token): + +>> --master-key {} <<", + generate_master_key() + ) +} From 3f69dd645039a7de61ca29c8663379dd5758a40d Mon Sep 17 00:00:00 2001 From: Gregory Conrad Date: Thu, 19 Jan 2023 12:08:38 -0500 Subject: [PATCH 040/186] feat: add Cargo feature for LMDB's POSIX semaphores --- Cargo.lock | 8 ++++---- milli/Cargo.toml | 6 +++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1959006d2..a8668fccc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1747,8 +1747,8 @@ checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" [[package]] name = "heed" -version = "0.12.4" -source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb" +version = "0.12.5" +source = "git+https://github.com/meilisearch/heed?tag=v0.12.5#4158a6c484752afaaf9e2530a6ee0e7ab0f24ee8" dependencies = [ "byteorder", "heed-traits", @@ -1765,12 +1765,12 @@ dependencies = [ [[package]] name = "heed-traits" version = "0.7.0" -source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb" +source = "git+https://github.com/meilisearch/heed?tag=v0.12.5#4158a6c484752afaaf9e2530a6ee0e7ab0f24ee8" [[package]] name = "heed-types" version = "0.7.2" -source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb" +source = "git+https://github.com/meilisearch/heed?tag=v0.12.5#4158a6c484752afaaf9e2530a6ee0e7ab0f24ee8" dependencies = [ "bincode", "heed-traits", diff --git a/milli/Cargo.toml b/milli/Cargo.toml index b32592ab9..0ba44a819 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -19,7 +19,7 @@ fst = "0.4.7" fxhash = "0.2.1" geoutils = "0.5.1" grenad = { version = "0.4.3", default-features = false, features = ["tempfile"] } -heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.4", default-features = false, features = ["lmdb", "sync-read-txn"] } +heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.5", default-features = false, features = ["lmdb", "sync-read-txn"] } json-depth-checker = { path = "../json-depth-checker" } levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] } memmap2 = "0.5.7" @@ -63,6 +63,10 @@ fuzzcheck = "0.12.1" [features] default = [ "charabia/default" ] +# Use POSIX semaphores instead of SysV semaphores in LMDB +# For more information on this feature, see heed's Cargo.toml +lmdb-posix-sem = ["heed/posix-sem"] + # allow chinese specialized tokenization chinese = ["charabia/chinese"] From a9b3f914670e58bf6368fb4f75d54f11ee9456ec Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 23 Jan 2023 10:33:30 +0100 Subject: [PATCH 041/186] Add missing space Co-authored-by: Guillaume Mourier --- meilisearch/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch/src/main.rs b/meilisearch/src/main.rs index a04e40bc7..2841dd52c 100644 --- a/meilisearch/src/main.rs +++ b/meilisearch/src/main.rs @@ -199,7 +199,7 @@ fn print_master_key_too_short_warning() { stderr, " Meilisearch started with a master key considered unsafe for use in a production environment. -A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to a production environment." + A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to a production environment." ) .unwrap(); stderr.reset().unwrap(); From 56db54486cfb14d4e1e13229b60353e03b5ea635 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 19 Jan 2023 13:37:34 +0100 Subject: [PATCH 042/186] Add tests --- meilisearch/tests/search/errors.rs | 94 +++++++++++++++++++++++++++++- 1 file changed, 92 insertions(+), 2 deletions(-) diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index 42f248452..f50edd13f 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -333,7 +333,7 @@ async fn search_non_filterable_facets() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Invalid facet distribution, the fields `doggo` are not set as filterable.", + "message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute is `title`.", "code": "invalid_search_facets", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_facets" @@ -344,7 +344,97 @@ async fn search_non_filterable_facets() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Invalid facet distribution, the fields `doggo` are not set as filterable.", + "message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute is `title`.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" + } + "###); +} + +#[actix_rt::test] +async fn search_non_filterable_facets_multiple_filterable() { + let server = Server::new().await; + let index = server.index("test"); + index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await; + index.wait_task(0).await; + + let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attributes are `genres, title`.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" + } + "###); + + let (response, code) = index.search_get("facets=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attributes are `genres, title`.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" + } + "###); +} + +#[actix_rt::test] +async fn search_non_filterable_facets_no_filterable() { + let server = Server::new().await; + let index = server.index("test"); + index.update_settings(json!({"filterableAttributes": []})).await; + index.wait_task(0).await; + + let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid facet distribution, this index does not have configured filterable attributes.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" + } + "###); + + let (response, code) = index.search_get("facets=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid facet distribution, this index does not have configured filterable attributes.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" + } + "###); +} + +#[actix_rt::test] +async fn search_non_filterable_facets_multiple_facets() { + let server = Server::new().await; + let index = server.index("test"); + index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await; + index.wait_task(0).await; + + let (response, code) = index.search_post(json!({"facets": ["doggo", "neko"]})).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid facet distribution, attributes `doggo, neko` are not filterable. The available filterable attributes are `genres, title`.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" + } + "###); + + let (response, code) = index.search_get("facets=doggo,neko").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid facet distribution, attributes `doggo, neko` are not filterable. The available filterable attributes are `genres, title`.", "code": "invalid_search_facets", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_facets" From f0e6b9c0c5d0e3a6a3d987ae7cd706ae0482c7e4 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Thu, 19 Jan 2023 17:29:54 +0100 Subject: [PATCH 043/186] Update deserr to 0.3.0 --- meilisearch-types/Cargo.toml | 2 +- meilisearch/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 8d7f673d9..b71e7e46c 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -9,7 +9,7 @@ actix-web = { version = "4.2.1", default-features = false } anyhow = "1.0.65" convert_case = "0.6.0" csv = "1.1.6" -deserr = "0.1.5" +deserr = "0.3.0" either = { version = "1.6.1", features = ["serde"] } enum-iterator = "1.1.3" file-store = { path = "../file-store" } diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index 9a0c9bd0b..b0592f9d1 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", " bytes = "1.2.1" clap = { version = "4.0.9", features = ["derive", "env"] } crossbeam-channel = "0.5.6" -deserr = "0.1.5" +deserr = "0.3.0" dump = { path = "../dump" } either = "1.8.0" env_logger = "0.9.1" From c79b6a1ee419d5d2bc7427907c32895753b452c1 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 19 Jan 2023 20:54:42 +0100 Subject: [PATCH 044/186] bump milli --- Cargo.lock | 24 ++++++++++++------------ meilisearch-types/Cargo.toml | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5c41a3406..a55e6dc49 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1009,9 +1009,9 @@ dependencies = [ [[package]] name = "deserr" -version = "0.1.5" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d3c6417f0bf7561774690e3d47f9659b0cbc3614c7af7bfda404fda7a2c11d3" +checksum = "28380303ca15ec07e1d5b079baf19cf849b09edad5cab219c1c51b2bd07523de" dependencies = [ "deserr-internal", "serde-cs", @@ -1020,9 +1020,9 @@ dependencies = [ [[package]] name = "deserr-internal" -version = "0.1.5" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "196415cbd3b782cddecbdd69da18cd9b19e1bb0bdbb649e87b5afd83fa8d322b" +checksum = "860928cd8af78d223a3d70dd581f21d7c3de8aa2eecd938e0c0a399ded7c1451" dependencies = [ "convert_case 0.5.0", "proc-macro2", @@ -1300,8 +1300,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.39.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.1#0c7d1f761e5db6d086f27d3f0f47a97c7f4a5f08" +version = "0.40.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.40.0#1c4b1b3b2dcd1b84da603a381c898da879c4adb5" dependencies = [ "nom", "nom_locate", @@ -1319,8 +1319,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.39.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.1#0c7d1f761e5db6d086f27d3f0f47a97c7f4a5f08" +version = "0.40.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.40.0#1c4b1b3b2dcd1b84da603a381c898da879c4adb5" dependencies = [ "serde_json", ] @@ -1884,8 +1884,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.39.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.1#0c7d1f761e5db6d086f27d3f0f47a97c7f4a5f08" +version = "0.40.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.40.0#1c4b1b3b2dcd1b84da603a381c898da879c4adb5" dependencies = [ "serde_json", ] @@ -2431,8 +2431,8 @@ dependencies = [ [[package]] name = "milli" -version = "0.39.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.39.1#0c7d1f761e5db6d086f27d3f0f47a97c7f4a5f08" +version = "0.40.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.40.0#1c4b1b3b2dcd1b84da603a381c898da879c4adb5" dependencies = [ "bimap", "bincode", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index b71e7e46c..3bc43bee3 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -16,7 +16,7 @@ file-store = { path = "../file-store" } flate2 = "1.0.24" fst = "0.4.7" memmap2 = "0.5.7" -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.39.1", default-features = false } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.40.0", default-features = false } roaring = { version = "0.10.0", features = ["serde"] } serde = { version = "1.0.145", features = ["derive"] } serde-cs = "0.2.4" From 5dd582918d3791fa796c9e75a26e4145338f89a0 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Thu, 19 Jan 2023 17:21:08 +0100 Subject: [PATCH 045/186] Add test --- meilisearch/tests/search/errors.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index f50edd13f..fb232e9d3 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -512,6 +512,17 @@ async fn search_bad_matching_strategy() { } "###); + let (response, code) = index.search_post(json!({"matchingStrategy": {"doggo": "doggo"}})).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.matchingStrategy`: expected a string, but found an object: `{\"doggo\":\"doggo\"}`", + "code": "invalid_search_matching_strategy", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy" + } + "###); + let (response, code) = index.search_get("matchingStrategy=doggo").await; snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" From 57682cbabeaa0910108777594ff46d2dfc833c1d Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 23 Jan 2023 15:42:58 +0100 Subject: [PATCH 046/186] Fix test url after #3398 --- meilisearch/tests/search/errors.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index fb232e9d3..9c30b09ba 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -519,7 +519,7 @@ async fn search_bad_matching_strategy() { "message": "Invalid value type at `.matchingStrategy`: expected a string, but found an object: `{\"doggo\":\"doggo\"}`", "code": "invalid_search_matching_strategy", "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy" + "link": "https://docs.meilisearch.com/errors#invalid_search_matching_strategy" } "###); From 5672118bfa28da6d81cd568135a822317372d35a Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 23 Jan 2023 17:32:13 +0100 Subject: [PATCH 047/186] When adding documents, trying to update the primary-key now throw an error MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit While updating the test suite I also noticed an issue with the indexed_documents value of failed task and had to update it. I also named a bunch of snapshots that had no name sorry 😬 --- index-scheduler/src/batch.rs | 54 ++- index-scheduler/src/lib.rs | 380 +++++++++++++++++- .../{1.snap => documents.snap} | 0 .../{1.snap => documents.snap} | 0 .../{1.snap => documents.snap} | 0 .../{1.snap => documents.snap} | 0 .../after_registering_the_5_tasks.snap | 49 +++ .../documents.snap | 13 + .../fifth_task_succeeds.snap | 54 +++ .../first_and_second_task_fails.snap | 50 +++ .../fourth_task_fails.snap | 53 +++ .../third_task_succeeds.snap | 52 +++ .../after_registering_the_3_tasks.snap | 43 ++ .../documents.snap | 9 + .../only_first_task_succeed.snap | 45 +++ .../second_task_fails.snap | 47 +++ .../third_task_fails.snap | 48 +++ .../after_registering_the_3_tasks.snap | 43 ++ .../documents.snap | 9 + .../only_first_task_succeed.snap | 45 +++ .../second_and_third_tasks_fails.snap | 46 +++ .../after_registering_the_6_tasks.snap | 52 +++ .../documents.snap | 21 + .../first_task_fails.snap | 54 +++ .../fourth_and_fifth_tasks_succeeds.snap | 57 +++ .../second_task_fails.snap | 55 +++ .../sixth_task_succeeds.snap | 58 +++ .../third_task_succeeds.snap | 57 +++ .../after_registering_the_6_tasks.snap | 52 +++ .../documents.snap | 25 ++ .../first_task_succeed.snap | 54 +++ .../fourth_and_fifth_tasks_succeeds.snap | 57 +++ .../second_task_fails.snap | 56 +++ .../sixth_task_succeeds.snap | 58 +++ .../third_task_succeeds.snap | 57 +++ .../{3.snap => documents.snap} | 0 .../{1.snap => documents.snap} | 0 .../{3.snap => documents.snap} | 0 .../{1.snap => documents.snap} | 0 .../{1.snap => documents.snap} | 0 index-scheduler/src/utils.rs | 22 +- meilisearch/tests/documents/add_documents.rs | 4 +- 42 files changed, 1748 insertions(+), 31 deletions(-) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/{1.snap => documents.snap} (100%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/{1.snap => documents.snap} (100%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/{1.snap => documents.snap} (100%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/{1.snap => documents.snap} (100%) create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/after_registering_the_5_tasks.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/documents.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/first_and_second_task_fails.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/third_task_succeeds.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/after_registering_the_3_tasks.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/documents.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/only_first_task_succeed.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/after_registering_the_3_tasks.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/documents.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/only_first_task_succeed.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/after_registering_the_6_tasks.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/documents.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/first_task_fails.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/fourth_and_fifth_tasks_succeeds.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/second_task_fails.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/sixth_task_succeeds.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/third_task_succeeds.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/after_registering_the_6_tasks.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/documents.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/first_task_succeed.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/fourth_and_fifth_tasks_succeeds.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/sixth_task_succeeds.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap rename index-scheduler/src/snapshots/lib.rs/test_document_replace/{3.snap => documents.snap} (100%) rename index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/{1.snap => documents.snap} (100%) rename index-scheduler/src/snapshots/lib.rs/test_document_update/{3.snap => documents.snap} (100%) rename index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/{1.snap => documents.snap} (100%) rename index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/{1.snap => documents.snap} (100%) diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index 423e2f23d..d364081a8 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -207,7 +207,7 @@ impl IndexScheduler { must_create_index, })), BatchKind::DocumentImport { method, import_ids, .. } => { - let tasks = self.get_existing_tasks(rtxn, import_ids)?; + let mut tasks = self.get_existing_tasks(rtxn, import_ids)?; let primary_key = match &tasks[0].kind { KindWithContent::DocumentAdditionOrUpdate { primary_key, .. } => { primary_key.clone() @@ -217,8 +217,18 @@ impl IndexScheduler { let mut documents_counts = Vec::new(); let mut content_files = Vec::new(); - for task in &tasks { + let mut drain_after = tasks.len(); + + for (i, task) in tasks.iter().enumerate() { match task.kind { + KindWithContent::DocumentAdditionOrUpdate { + primary_key: ref pk, .. + } if pk != &primary_key => { + // we can't autobatch document additions that don't share the same + // primary key because that would make the whole batch fails. + drain_after = i; + break; + } KindWithContent::DocumentAdditionOrUpdate { content_file, documents_count, @@ -231,6 +241,8 @@ impl IndexScheduler { } } + tasks.drain(drain_after..); + Ok(Some(Batch::IndexOperation { op: IndexOperation::DocumentImport { index_uid, @@ -985,17 +997,32 @@ impl IndexScheduler { let mut primary_key_has_been_set = false; let must_stop_processing = self.must_stop_processing.clone(); let indexer_config = self.index_mapper.indexer_config(); - // TODO use the code from the IndexCreate operation + if let Some(primary_key) = primary_key { - if index.primary_key(index_wtxn)?.is_none() { - let mut builder = - milli::update::Settings::new(index_wtxn, index, indexer_config); - builder.set_primary_key(primary_key); - builder.execute( - |indexing_step| debug!("update: {:?}", indexing_step), - || must_stop_processing.clone().get(), - )?; - primary_key_has_been_set = true; + match index.primary_key(index_wtxn)? { + // if a primary key was set AND had already be defined in the index + // but to a different value then we can make the whole batch fail. + Some(pk) if primary_key != pk => { + return Err(milli::Error::from( + milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()), + ) + .into()); + } + // if the primary key was set and equal to the one already set for + // the index then there is nothing to do. + Some(_) => (), + // if the primary key was set and there was no primary key set for this index + // we set it to the received value before starting the indexing process. + None => { + let mut builder = + milli::update::Settings::new(index_wtxn, index, indexer_config); + builder.set_primary_key(primary_key); + builder.execute( + |indexing_step| debug!("update: {:?}", indexing_step), + || must_stop_processing.clone().get(), + )?; + primary_key_has_been_set = true; + } } } @@ -1059,7 +1086,8 @@ impl IndexScheduler { task.status = Status::Failed; task.details = Some(Details::DocumentAdditionOrUpdate { received_documents: count, - indexed_documents: Some(count), + // if there was an error we indexed 0 documents. + indexed_documents: Some(0), }); task.error = Some(error.into()) } diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 4374a0612..6e0cea644 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -2000,7 +2000,7 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[test] @@ -2047,7 +2047,7 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[test] @@ -2099,7 +2099,7 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[test] @@ -2150,7 +2150,7 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[test] @@ -2201,7 +2201,7 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[macro_export] @@ -2840,7 +2840,7 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[test] @@ -2903,7 +2903,7 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[test] @@ -2963,7 +2963,7 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[test] @@ -3020,7 +3020,369 @@ mod tests { .unwrap() .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap()); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); + } + + #[test] + fn test_document_addition_with_multiple_primary_key() { + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); + + for (id, primary_key) in ["id", "bork", "bloup"].iter().enumerate() { + let content = format!( + r#"{{ + "id": {id}, + "doggo": "jean bob" + }}"#, + ); + let (uuid, mut file) = + index_scheduler.create_update_file_with_uuid(id as u128).unwrap(); + let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); + assert_eq!(documents_count, 1); + file.persist().unwrap(); + + index_scheduler + .register(KindWithContent::DocumentAdditionOrUpdate { + index_uid: S("doggos"), + primary_key: Some(S(primary_key)), + method: ReplaceDocuments, + content_file: uuid, + documents_count, + allow_index_creation: true, + }) + .unwrap(); + index_scheduler.assert_internally_consistent(); + } + + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_3_tasks"); + + // A first batch should be processed with only the first documentAddition. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_succeed"); + + // The second batch should fail. + handle.advance_one_failed_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails"); + + // The second batch should fail. + handle.advance_one_failed_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_fails"); + + // Is the primary key still what we expect? + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap().unwrap(); + snapshot!(primary_key, @"id"); + + // Is the document still the one we expect?. + let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); + let field_ids = field_ids_map.ids().collect::>(); + let documents = index + .all_documents(&rtxn) + .unwrap() + .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) + .collect::>(); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); + } + + #[test] + fn test_document_addition_with_multiple_primary_key_batch_wrong_key() { + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); + + for (id, primary_key) in ["id", "bork", "bork"].iter().enumerate() { + let content = format!( + r#"{{ + "id": {id}, + "doggo": "jean bob" + }}"#, + ); + let (uuid, mut file) = + index_scheduler.create_update_file_with_uuid(id as u128).unwrap(); + let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); + assert_eq!(documents_count, 1); + file.persist().unwrap(); + + index_scheduler + .register(KindWithContent::DocumentAdditionOrUpdate { + index_uid: S("doggos"), + primary_key: Some(S(primary_key)), + method: ReplaceDocuments, + content_file: uuid, + documents_count, + allow_index_creation: true, + }) + .unwrap(); + index_scheduler.assert_internally_consistent(); + } + + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_3_tasks"); + + // A first batch should be processed with only the first documentAddition. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_succeed"); + + // The second batch should fail and contains two tasks. + handle.advance_one_failed_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_and_third_tasks_fails"); + + // Is the primary key still what we expect? + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap().unwrap(); + snapshot!(primary_key, @"id"); + + // Is the document still the one we expect?. + let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); + let field_ids = field_ids_map.ids().collect::>(); + let documents = index + .all_documents(&rtxn) + .unwrap() + .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) + .collect::>(); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); + } + + #[test] + fn test_document_addition_with_bad_primary_key() { + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); + + for (id, primary_key) in ["bork", "bork", "id", "bork", "id"].iter().enumerate() { + let content = format!( + r#"{{ + "id": {id}, + "doggo": "jean bob" + }}"#, + ); + let (uuid, mut file) = + index_scheduler.create_update_file_with_uuid(id as u128).unwrap(); + let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); + assert_eq!(documents_count, 1); + file.persist().unwrap(); + + index_scheduler + .register(KindWithContent::DocumentAdditionOrUpdate { + index_uid: S("doggos"), + primary_key: Some(S(primary_key)), + method: ReplaceDocuments, + content_file: uuid, + documents_count, + allow_index_creation: true, + }) + .unwrap(); + index_scheduler.assert_internally_consistent(); + } + + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_5_tasks"); + + // A first batch should be processed with only the first two documentAddition. + // it should fails because the documents don't contains any `bork` field. + // NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_and_second_task_fails"); + + // The primary key should be set to none since we failed the batch. + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap(); + snapshot!(primary_key.is_none(), @"true"); + + // The second batch should succeed and only contains one task. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds"); + + // The primary key should be set to `id` since this batch succeeded. + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap().unwrap(); + snapshot!(primary_key, @"id"); + + // We're trying to `bork` again, but now there is already a primary key set for this index. + handle.advance_one_failed_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth_task_fails"); + + // Finally the last task should succeed since its primary key is the same as the valid one. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fifth_task_succeeds"); + + // Is the primary key still what we expect? + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap().unwrap(); + snapshot!(primary_key, @"id"); + + // Is the document still the one we expect?. + let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); + let field_ids = field_ids_map.ids().collect::>(); + let documents = index + .all_documents(&rtxn) + .unwrap() + .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) + .collect::>(); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); + } + + #[test] + fn test_document_addition_with_set_and_null_primary_key() { + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); + + for (id, primary_key) in + [None, Some("bork"), Some("paw"), None, None, Some("paw")].into_iter().enumerate() + { + let content = format!( + r#"{{ + "paw": {id}, + "doggo": "jean bob" + }}"#, + ); + let (uuid, mut file) = + index_scheduler.create_update_file_with_uuid(id as u128).unwrap(); + let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); + assert_eq!(documents_count, 1); + file.persist().unwrap(); + + index_scheduler + .register(KindWithContent::DocumentAdditionOrUpdate { + index_uid: S("doggos"), + primary_key: primary_key.map(|pk| pk.to_string()), + method: ReplaceDocuments, + content_file: uuid, + documents_count, + allow_index_creation: true, + }) + .unwrap(); + index_scheduler.assert_internally_consistent(); + } + + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_6_tasks"); + + // A first batch should contains only one task that fails because we can't infer the primary key. + // NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_fails"); + + // The second batch should contains only one task that fails because we bork is not a valid primary key. + // NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails"); + + // No primary key should be set at this point. + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap(); + snapshot!(primary_key.is_none(), @"true"); + + // The third batch should succeed and only contains one task. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds"); + + // The primary key should be set to `id` since this batch succeeded. + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap().unwrap(); + snapshot!(primary_key, @"paw"); + + // We should be able to batch together the next two tasks that don't specify any primary key + // and it should succeed. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth_and_fifth_tasks_succeeds"); + + // Finally the last task should succeed since its primary key is the same as the valid one. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "sixth_task_succeeds"); + + // Is the primary key still what we expect? + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap().unwrap(); + snapshot!(primary_key, @"paw"); + + // Is the document still the one we expect?. + let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); + let field_ids = field_ids_map.ids().collect::>(); + let documents = index + .all_documents(&rtxn) + .unwrap() + .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) + .collect::>(); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); + } + + #[test] + fn test_document_addition_with_set_and_null_primary_key_inference_works() { + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); + + for (id, primary_key) in [None, Some("bork"), Some("doggoid"), None, None, Some("doggoid")] + .into_iter() + .enumerate() + { + let content = format!( + r#"{{ + "doggoid": {id}, + "doggo": "jean bob" + }}"#, + ); + let (uuid, mut file) = + index_scheduler.create_update_file_with_uuid(id as u128).unwrap(); + let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); + assert_eq!(documents_count, 1); + file.persist().unwrap(); + + index_scheduler + .register(KindWithContent::DocumentAdditionOrUpdate { + index_uid: S("doggos"), + primary_key: primary_key.map(|pk| pk.to_string()), + method: ReplaceDocuments, + content_file: uuid, + documents_count, + allow_index_creation: true, + }) + .unwrap(); + index_scheduler.assert_internally_consistent(); + } + + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_6_tasks"); + + // A first batch should contains only one task that succeed and sets the primary key to `doggoid`. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_succeed"); + + // Checking the primary key. + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap(); + snapshot!(primary_key.is_none(), @"false"); + + // The second batch should contains only one task that fails because it tries to update the primary key to `bork`. + handle.advance_one_failed_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails"); + + // The third batch should succeed and only contains one task. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds"); + + // We should be able to batch together the next two tasks that don't specify any primary key + // and it should succeed. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth_and_fifth_tasks_succeeds"); + + // Finally the last task should succeed. + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "sixth_task_succeeds"); + + // Is the primary key still what we expect? + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let primary_key = index.primary_key(&rtxn).unwrap().unwrap(); + snapshot!(primary_key, @"doggoid"); + + // Is the document still the one we expect?. + let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); + let field_ids = field_ids_map.ids().collect::>(); + let documents = index + .all_documents(&rtxn) + .unwrap() + .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) + .collect::>(); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); } #[test] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/documents.snap diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/documents.snap diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/documents.snap diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/documents.snap diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/after_registering_the_5_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/after_registering_the_5_tasks.snap new file mode 100644 index 000000000..53d3d28da --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/after_registering_the_5_tasks.snap @@ -0,0 +1,49 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,3,4,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/documents.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/documents.snap new file mode 100644 index 000000000..dd1bbf8b0 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/documents.snap @@ -0,0 +1,13 @@ +--- +source: index-scheduler/src/lib.rs +--- +[ + { + "id": 2, + "doggo": "jean bob" + }, + { + "id": 4, + "doggo": "jean bob" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap new file mode 100644 index 000000000..58e16fa55 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap @@ -0,0 +1,54 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [2,4,] +failed [0,1,3,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/first_and_second_task_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/first_and_second_task_fails.snap new file mode 100644 index 000000000..98bd2b580 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/first_and_second_task_fails.snap @@ -0,0 +1,50 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [2,3,4,] +failed [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,1,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap new file mode 100644 index 000000000..279040fdb --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap @@ -0,0 +1,53 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [4,] +succeeded [2,] +failed [0,1,3,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000004 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/third_task_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/third_task_succeeds.snap new file mode 100644 index 000000000..441bb59e2 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/third_task_succeeds.snap @@ -0,0 +1,52 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [3,4,] +succeeded [2,] +failed [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/after_registering_the_3_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/after_registering_the_3_tasks.snap new file mode 100644 index 000000000..cff9b0bd9 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/after_registering_the_3_tasks.snap @@ -0,0 +1,43 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/documents.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/documents.snap new file mode 100644 index 000000000..96f9d447f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/documents.snap @@ -0,0 +1,9 @@ +--- +source: index-scheduler/src/lib.rs +--- +[ + { + "id": 0, + "doggo": "jean bob" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/only_first_task_succeed.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/only_first_task_succeed.snap new file mode 100644 index 000000000..d3888af01 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/only_first_task_succeed.snap @@ -0,0 +1,45 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap new file mode 100644 index 000000000..84baeca92 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap @@ -0,0 +1,47 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [2,] +succeeded [0,] +failed [1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000002 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap new file mode 100644 index 000000000..6b92f91d1 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap @@ -0,0 +1,48 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,] +failed [1,2,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/after_registering_the_3_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/after_registering_the_3_tasks.snap new file mode 100644 index 000000000..4c4f88a30 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/after_registering_the_3_tasks.snap @@ -0,0 +1,43 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/documents.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/documents.snap new file mode 100644 index 000000000..96f9d447f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/documents.snap @@ -0,0 +1,9 @@ +--- +source: index-scheduler/src/lib.rs +--- +[ + { + "id": 0, + "doggo": "jean bob" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/only_first_task_succeed.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/only_first_task_succeed.snap new file mode 100644 index 000000000..76b814eab --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/only_first_task_succeed.snap @@ -0,0 +1,45 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap new file mode 100644 index 000000000..e27e95b51 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap @@ -0,0 +1,46 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,] +failed [1,2,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,2,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,2,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/after_registering_the_6_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/after_registering_the_6_tasks.snap new file mode 100644 index 000000000..078ba06d3 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/after_registering_the_6_tasks.snap @@ -0,0 +1,52 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/documents.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/documents.snap new file mode 100644 index 000000000..a73c52da5 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/documents.snap @@ -0,0 +1,21 @@ +--- +source: index-scheduler/src/lib.rs +--- +[ + { + "paw": 2, + "doggo": "jean bob" + }, + { + "paw": 3, + "doggo": "jean bob" + }, + { + "paw": 4, + "doggo": "jean bob" + }, + { + "paw": 5, + "doggo": "jean bob" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/first_task_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/first_task_fails.snap new file mode 100644 index 000000000..ac63f3b58 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/first_task_fails.snap @@ -0,0 +1,54 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,3,4,5,] +failed [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/fourth_and_fifth_tasks_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/fourth_and_fifth_tasks_succeeds.snap new file mode 100644 index 000000000..af131b74a --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/fourth_and_fifth_tasks_succeeds.snap @@ -0,0 +1,57 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [5,] +succeeded [2,3,4,] +failed [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,4,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,4,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/second_task_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/second_task_fails.snap new file mode 100644 index 000000000..538b4af93 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/second_task_fails.snap @@ -0,0 +1,55 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [2,3,4,5,] +failed [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/sixth_task_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/sixth_task_succeeds.snap new file mode 100644 index 000000000..efbabec0d --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/sixth_task_succeeds.snap @@ -0,0 +1,58 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [2,3,4,5,] +failed [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/third_task_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/third_task_succeeds.snap new file mode 100644 index 000000000..1271b6f92 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/third_task_succeeds.snap @@ -0,0 +1,57 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [3,4,5,] +succeeded [2,] +failed [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/after_registering_the_6_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/after_registering_the_6_tasks.snap new file mode 100644 index 000000000..0e9ecb81a --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/after_registering_the_6_tasks.snap @@ -0,0 +1,52 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/documents.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/documents.snap new file mode 100644 index 000000000..9c79853fa --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/documents.snap @@ -0,0 +1,25 @@ +--- +source: index-scheduler/src/lib.rs +--- +[ + { + "doggoid": 0, + "doggo": "jean bob" + }, + { + "doggoid": 2, + "doggo": "jean bob" + }, + { + "doggoid": 3, + "doggo": "jean bob" + }, + { + "doggoid": 4, + "doggo": "jean bob" + }, + { + "doggoid": 5, + "doggo": "jean bob" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/first_task_succeed.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/first_task_succeed.snap new file mode 100644 index 000000000..fd480420a --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/first_task_succeed.snap @@ -0,0 +1,54 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,3,4,5,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/fourth_and_fifth_tasks_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/fourth_and_fifth_tasks_succeeds.snap new file mode 100644 index 000000000..6d61b01df --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/fourth_and_fifth_tasks_succeeds.snap @@ -0,0 +1,57 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [5,] +succeeded [0,2,3,4,] +failed [1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,4,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,4,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap new file mode 100644 index 000000000..99001edb0 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap @@ -0,0 +1,56 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [2,3,4,5,] +succeeded [0,] +failed [1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/sixth_task_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/sixth_task_succeeds.snap new file mode 100644 index 000000000..e6b003712 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/sixth_task_succeeds.snap @@ -0,0 +1,58 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,2,3,4,5,] +failed [1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap new file mode 100644 index 000000000..64625ca90 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap @@ -0,0 +1,57 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [3,4,5,] +succeeded [0,2,] +failed [1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_document_replace/3.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_replace/documents.snap diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/documents.snap diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_document_update/3.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_update/documents.snap diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/documents.snap diff --git a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/1.snap b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/documents.snap similarity index 100% rename from index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/documents.snap diff --git a/index-scheduler/src/utils.rs b/index-scheduler/src/utils.rs index 0018ae1d0..e13d0e375 100644 --- a/index-scheduler/src/utils.rs +++ b/index-scheduler/src/utils.rs @@ -404,15 +404,19 @@ impl IndexScheduler { Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => { assert_eq!(kind.as_kind(), Kind::DocumentAdditionOrUpdate); match indexed_documents { - Some(0) => assert_ne!(status, Status::Enqueued), Some(indexed_documents) => { - assert_eq!(status, Status::Succeeded); - assert!(indexed_documents <= received_documents); + assert!(matches!( + status, + Status::Succeeded | Status::Failed | Status::Canceled + )); + match status { + Status::Succeeded => assert!(indexed_documents <= received_documents), + Status::Failed | Status::Canceled => assert_eq!(indexed_documents, 0), + status => panic!("DocumentAddition can't have an indexed_document set if it's {}", status), + } } None => { - assert_ne!(status, Status::Succeeded); - assert_ne!(status, Status::Canceled); - assert_ne!(status, Status::Failed); + assert!(matches!(status, Status::Enqueued | Status::Processing)) } } } @@ -504,7 +508,11 @@ impl IndexScheduler { if let KindWithContent::DocumentAdditionOrUpdate { content_file, .. } = kind { match status { Status::Enqueued | Status::Processing => { - assert!(self.file_store.__all_uuids().contains(&content_file)); + assert!( + self.file_store.__all_uuids().contains(&content_file), + "Could not find uuid `{content_file}` in the file_store. Available uuids are {:?}.", + self.file_store.__all_uuids(), + ); } Status::Succeeded | Status::Failed | Status::Canceled => { assert!(!self.file_store.__all_uuids().contains(&content_file)); diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 64bdcac78..83fcb6e83 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -923,7 +923,7 @@ async fn error_primary_key_inference() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", @@ -963,7 +963,7 @@ async fn error_primary_key_inference() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "The primary key inference failed as the engine found 3 fields ending with `id` in their names: 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.", From 13c2cd700d128ab58f729987d1ec7cc8ed597b07 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 19 Jan 2023 14:02:42 +0100 Subject: [PATCH 048/186] Update error message about negative integer --- meilisearch-types/src/deserr/error_messages.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/meilisearch-types/src/deserr/error_messages.rs b/meilisearch-types/src/deserr/error_messages.rs index 9b46201c4..7e288085d 100644 --- a/meilisearch-types/src/deserr/error_messages.rs +++ b/meilisearch-types/src/deserr/error_messages.rs @@ -52,7 +52,7 @@ fn value_kinds_description_json(kinds: &[ValueKind]) -> String { ValueKind::Null => "null", ValueKind::Boolean => "a boolean", ValueKind::Integer => "a positive integer", - ValueKind::NegativeInteger => "an integer", + ValueKind::NegativeInteger => "a negative integer", ValueKind::Float => "a number", ValueKind::String => "a string", ValueKind::Sequence => "an array", @@ -310,7 +310,7 @@ mod tests { insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean"); insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"an integer"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"a negative integer"); insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string"); insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array"); @@ -318,7 +318,7 @@ mod tests { insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer"); insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"an integer or an array"); + insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"a negative integer or an array"); insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number"); insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number"); insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number"); From 767cb725a5a354b18b47963ed3e3b9994f38ca81 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 23 Jan 2023 20:16:16 +0100 Subject: [PATCH 049/186] reimplement the batching of task with or without primary key in the autobatcher --- index-scheduler/src/autobatcher.rs | 256 ++++++++++++------ index-scheduler/src/batch.rs | 31 ++- index-scheduler/src/lib.rs | 16 +- .../second_and_third_tasks_fails.snap | 11 +- ...eds.snap => all_other_tasks_succeeds.snap} | 6 +- .../fourth_and_fifth_tasks_succeeds.snap | 57 ---- ...eds.snap => all_other_tasks_succeeds.snap} | 6 +- .../fourth_and_fifth_tasks_succeeds.snap | 57 ---- 8 files changed, 206 insertions(+), 234 deletions(-) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/{sixth_task_succeeds.snap => all_other_tasks_succeeds.snap} (98%) delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/fourth_and_fifth_tasks_succeeds.snap rename index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/{sixth_task_succeeds.snap => all_other_tasks_succeeds.snap} (97%) delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/fourth_and_fifth_tasks_succeeds.snap diff --git a/index-scheduler/src/autobatcher.rs b/index-scheduler/src/autobatcher.rs index d1ed691c6..28ae886ad 100644 --- a/index-scheduler/src/autobatcher.rs +++ b/index-scheduler/src/autobatcher.rs @@ -19,10 +19,16 @@ use crate::KindWithContent; /// /// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`] enum AutobatchKind { - DocumentImport { method: IndexDocumentsMethod, allow_index_creation: bool }, + DocumentImport { + method: IndexDocumentsMethod, + allow_index_creation: bool, + primary_key: Option, + }, DocumentDeletion, DocumentClear, - Settings { allow_index_creation: bool }, + Settings { + allow_index_creation: bool, + }, IndexCreation, IndexDeletion, IndexUpdate, @@ -38,14 +44,24 @@ impl AutobatchKind { _ => None, } } + + fn primary_key(&self) -> Option> { + match self { + AutobatchKind::DocumentImport { primary_key, .. } => Some(primary_key.as_deref()), + _ => None, + } + } } impl From for AutobatchKind { fn from(kind: KindWithContent) -> Self { match kind { - KindWithContent::DocumentAdditionOrUpdate { method, allow_index_creation, .. } => { - AutobatchKind::DocumentImport { method, allow_index_creation } - } + KindWithContent::DocumentAdditionOrUpdate { + method, + allow_index_creation, + primary_key, + .. + } => AutobatchKind::DocumentImport { method, allow_index_creation, primary_key }, KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion, KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear, KindWithContent::SettingsUpdate { allow_index_creation, is_deletion, .. } => { @@ -75,6 +91,7 @@ pub enum BatchKind { DocumentImport { method: IndexDocumentsMethod, allow_index_creation: bool, + primary_key: Option, import_ids: Vec, }, DocumentDeletion { @@ -89,6 +106,7 @@ pub enum BatchKind { settings_ids: Vec, method: IndexDocumentsMethod, allow_index_creation: bool, + primary_key: Option, import_ids: Vec, }, Settings { @@ -120,6 +138,16 @@ impl BatchKind { _ => None, } } + + fn primary_key(&self) -> Option> { + match self { + BatchKind::DocumentImport { primary_key, .. } + | BatchKind::SettingsAndDocumentImport { primary_key, .. } => { + Some(primary_key.as_deref()) + } + _ => None, + } + } } impl BatchKind { @@ -131,6 +159,7 @@ impl BatchKind { pub fn new( task_id: TaskId, kind: KindWithContent, + primary_key: Option<&str>, ) -> (ControlFlow, bool) { use AutobatchKind as K; @@ -140,10 +169,28 @@ impl BatchKind { K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false), K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false), K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false), - K::DocumentImport { method, allow_index_creation } => ( - Continue(BatchKind::DocumentImport { + K::DocumentImport { method, allow_index_creation, primary_key: pk } + if primary_key.is_none() || pk.is_none() || primary_key == pk.as_deref() => + { + ( + Continue(BatchKind::DocumentImport { + method, + allow_index_creation, + primary_key: pk, + import_ids: vec![task_id], + }), + allow_index_creation, + ) + } + // if the primary key set in the task was different than ours we should stop and make this batch fail asap. + // TODO: maybe we could continue to batch tasks that'll fail? But that would mean we need to be extra + // cautious with the index deletion and document clear because we should remember that these tasks were + // supposed to fail even if we don't execute them. + K::DocumentImport { method, allow_index_creation, primary_key } => ( + Break(BatchKind::DocumentImport { method, allow_index_creation, + primary_key, import_ids: vec![task_id], }), allow_index_creation, @@ -163,7 +210,7 @@ impl BatchKind { /// To ease the writting of the code. `true` can be returned when you don't need to create an index /// but false can't be returned if you needs to create an index. #[rustfmt::skip] - fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool) -> ControlFlow { + fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool, primary_key: Option<&str>) -> ControlFlow { use AutobatchKind as K; match (self, kind) { @@ -173,11 +220,51 @@ impl BatchKind { (this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => { Break(this) }, + // + // 1. If both task don't interact with primary key -> we can continue + // 2. Else -> + // 2.1 If we already have a primary-key -> + // 2.1.1 If the task we're trying to accumulate have a pk -> it must be equal to our primary key to continue + // 2.1.2 If the task don't have a primary-key -> we can continue + // (We've already ensured that the current batch was correct according to our pk in a previous step of the autobatcher) + // 2.2 If we don't have a primary-key -> + // 2.2.1 If both the batch and the task have a primary key they should be equal + // 2.2.2 If the batch is set to Some(None), the task should be too + // 2.2.3 If the batch is set to None -> we can continue + // + // NOTE: We need to negate the whole condition since we're checking if we need to break instead of continue. + // I wrote it this way because it's easier to understand than the other way around. + (this, kind) if !( + // 1. If both task don't interact with primary key -> we can continue + (this.primary_key().is_none() && kind.primary_key().is_none()) || + // 2. Else -> + ( + // 2.1 If we already have a primary-key -> + ( + primary_key.is_some() && + // 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key + // 2.1.2 If the task don't have a primary-key -> we can continue + dbg!(kind.primary_key()).map_or(true, |pk| pk == primary_key) + ) || + // 2.2 If we don't have a primary-key -> + ( + // 2.2.1 If both the batch and the task have a primary key they should be equal + // 2.2.2 If the batch is set to Some(None), the task should be too + // 2.2.3 If the batch is set to None -> we can continue + dbg!(&this.primary_key()).zip(dbg!(kind.primary_key())).map_or(true, |(this, kind)| this == kind) + ) + ) + + ) // closing the negation + + => { + Break(this) + }, // The index deletion can batch with everything but must stop after ( BatchKind::DocumentClear { mut ids } | BatchKind::DocumentDeletion { deletion_ids: mut ids } - | BatchKind::DocumentImport { method: _, allow_index_creation: _, import_ids: mut ids } + | BatchKind::DocumentImport { method: _, allow_index_creation: _, primary_key: _, import_ids: mut ids } | BatchKind::Settings { allow_index_creation: _, settings_ids: mut ids }, K::IndexDeletion, ) => { @@ -186,7 +273,7 @@ impl BatchKind { } ( BatchKind::ClearAndSettings { settings_ids: mut ids, allow_index_creation: _, mut other } - | BatchKind::SettingsAndDocumentImport { import_ids: mut ids, method: _, allow_index_creation: _, settings_ids: mut other }, + | BatchKind::SettingsAndDocumentImport { import_ids: mut ids, method: _, allow_index_creation: _, primary_key: _, settings_ids: mut other }, K::IndexDeletion, ) => { ids.push(id); @@ -206,7 +293,7 @@ impl BatchKind { K::DocumentImport { .. } | K::Settings { .. }, ) => Break(this), ( - BatchKind::DocumentImport { method: _, allow_index_creation: _, import_ids: mut ids }, + BatchKind::DocumentImport { method: _, allow_index_creation: _, primary_key: _, import_ids: mut ids }, K::DocumentClear, ) => { ids.push(id); @@ -215,24 +302,27 @@ impl BatchKind { // we can autobatch the same kind of document additions / updates ( - BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, mut import_ids }, - K::DocumentImport { method: ReplaceDocuments, .. }, + BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, primary_key: _, mut import_ids }, + K::DocumentImport { method: ReplaceDocuments, primary_key: pk, .. }, ) => { import_ids.push(id); Continue(BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, import_ids, + primary_key: pk, }) } ( - BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, mut import_ids }, - K::DocumentImport { method: UpdateDocuments, .. }, + BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, primary_key: _, mut import_ids }, + K::DocumentImport { method: UpdateDocuments, primary_key: pk, .. }, ) => { + import_ids.push(id); Continue(BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, + primary_key: pk, import_ids, }) } @@ -245,12 +335,13 @@ impl BatchKind { ) => Break(this), ( - BatchKind::DocumentImport { method, allow_index_creation, import_ids }, + BatchKind::DocumentImport { method, allow_index_creation, primary_key, import_ids }, K::Settings { .. }, ) => Continue(BatchKind::SettingsAndDocumentImport { settings_ids: vec![id], method, allow_index_creation, + primary_key, import_ids, }), @@ -327,7 +418,7 @@ impl BatchKind { }) } ( - BatchKind::SettingsAndDocumentImport { settings_ids, method: _, import_ids: mut other, allow_index_creation }, + BatchKind::SettingsAndDocumentImport { settings_ids, method: _, import_ids: mut other, allow_index_creation, primary_key: _ }, K::DocumentClear, ) => { other.push(id); @@ -339,26 +430,28 @@ impl BatchKind { } ( - BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, mut import_ids, allow_index_creation }, - K::DocumentImport { method: ReplaceDocuments, .. }, + BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, mut import_ids, allow_index_creation, primary_key: _}, + K::DocumentImport { method: ReplaceDocuments, primary_key: pk2, .. }, ) => { import_ids.push(id); Continue(BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, allow_index_creation, + primary_key: pk2, import_ids, }) } ( - BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, mut import_ids }, - K::DocumentImport { method: UpdateDocuments, .. }, + BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: _, mut import_ids }, + K::DocumentImport { method: UpdateDocuments, primary_key: pk2, .. }, ) => { import_ids.push(id); Continue(BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, + primary_key: pk2, import_ids, }) } @@ -369,7 +462,7 @@ impl BatchKind { K::DocumentDeletion | K::DocumentImport { .. }, ) => Break(this), ( - BatchKind::SettingsAndDocumentImport { mut settings_ids, method, allow_index_creation, import_ids }, + BatchKind::SettingsAndDocumentImport { mut settings_ids, method, allow_index_creation,primary_key, import_ids }, K::Settings { .. }, ) => { settings_ids.push(id); @@ -377,6 +470,7 @@ impl BatchKind { settings_ids, method, allow_index_creation, + primary_key, import_ids, }) } @@ -406,6 +500,7 @@ impl BatchKind { pub fn autobatch( enqueued: Vec<(TaskId, KindWithContent)>, index_already_exists: bool, + primary_key: Option<&str>, ) -> Option<(BatchKind, bool)> { let mut enqueued = enqueued.into_iter(); let (id, kind) = enqueued.next()?; @@ -413,7 +508,7 @@ pub fn autobatch( // index_exist will keep track of if the index should exist at this point after the tasks we batched. let mut index_exist = index_already_exists; - let (mut acc, must_create_index) = match BatchKind::new(id, kind) { + let (mut acc, must_create_index) = match BatchKind::new(id, kind, primary_key) { (Continue(acc), create) => (acc, create), (Break(acc), create) => return Some((acc, create)), }; @@ -422,7 +517,7 @@ pub fn autobatch( index_exist |= must_create_index; for (id, kind) in enqueued { - acc = match acc.accumulate(id, kind.into(), index_exist) { + acc = match acc.accumulate(id, kind.into(), index_exist, primary_key) { Continue(acc) => acc, Break(acc) => return Some((acc, must_create_index)), }; @@ -446,6 +541,7 @@ mod tests { autobatch( input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind)).collect(), index_already_exists, + None, ) } @@ -502,29 +598,29 @@ mod tests { fn autobatch_simple_operation_together() { // we can autobatch one or multiple `ReplaceDocuments` together. // if the index exists. - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, false ), doc_imp(ReplaceDocuments, false )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, false ), doc_imp(ReplaceDocuments, false )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); // if it doesn't exists. - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); // we can autobatch one or multiple `UpdateDocuments` together. // if the index exists. - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); // if it doesn't exists. - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); // we can autobatch one or multiple DocumentDeletion together debug_snapshot!(autobatch_from(true, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); @@ -547,51 +643,51 @@ mod tests { #[test] fn simple_document_operation_dont_autobatch_with_other() { // addition, updates and deletion can't batch together - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); debug_snapshot!(autobatch_from(true, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); debug_snapshot!(autobatch_from(true, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); debug_snapshot!(autobatch_from(true, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); } #[test] fn document_addition_batch_with_settings() { // simple case - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); // multiple settings and doc addition - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); // addition and setting unordered - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))"); // We ensure this kind of batch doesn't batch with forbidden operations - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(UpdateDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(ReplaceDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(UpdateDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(ReplaceDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); } #[test] @@ -703,25 +799,25 @@ mod tests { debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); // The third and final case is when the first task doesn't create an index but is directly followed by a task creating an index. In this case we can't batch whith what // follows because we first need to process the erronous batch. - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); } #[test] fn allowed_and_disallowed_index_creation() { // `DocumentImport` can't be mixed with those disallowed to do so except if the index already exists. - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); } } diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index d364081a8..b52d3d495 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -207,7 +207,7 @@ impl IndexScheduler { must_create_index, })), BatchKind::DocumentImport { method, import_ids, .. } => { - let mut tasks = self.get_existing_tasks(rtxn, import_ids)?; + let tasks = self.get_existing_tasks(rtxn, import_ids)?; let primary_key = match &tasks[0].kind { KindWithContent::DocumentAdditionOrUpdate { primary_key, .. } => { primary_key.clone() @@ -217,18 +217,9 @@ impl IndexScheduler { let mut documents_counts = Vec::new(); let mut content_files = Vec::new(); - let mut drain_after = tasks.len(); - for (i, task) in tasks.iter().enumerate() { + for task in tasks.iter() { match task.kind { - KindWithContent::DocumentAdditionOrUpdate { - primary_key: ref pk, .. - } if pk != &primary_key => { - // we can't autobatch document additions that don't share the same - // primary key because that would make the whole batch fails. - drain_after = i; - break; - } KindWithContent::DocumentAdditionOrUpdate { content_file, documents_count, @@ -241,8 +232,6 @@ impl IndexScheduler { } } - tasks.drain(drain_after..); - Ok(Some(Batch::IndexOperation { op: IndexOperation::DocumentImport { index_uid, @@ -337,6 +326,7 @@ impl IndexScheduler { settings_ids, method, allow_index_creation, + primary_key, import_ids, } => { let settings = self.create_next_batch_index( @@ -349,7 +339,12 @@ impl IndexScheduler { let document_import = self.create_next_batch_index( rtxn, index_uid.clone(), - BatchKind::DocumentImport { method, allow_index_creation, import_ids }, + BatchKind::DocumentImport { + method, + allow_index_creation, + primary_key, + import_ids, + }, must_create_index, )?; @@ -479,6 +474,12 @@ impl IndexScheduler { }; let index_already_exists = self.index_mapper.exists(rtxn, index_name)?; + let mut primary_key = None; + if index_already_exists { + let index = self.index_mapper.index(rtxn, index_name)?; + let rtxn = index.read_txn()?; + primary_key = index.primary_key(&rtxn)?.map(|pk| pk.to_string()); + } let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued; @@ -496,7 +497,7 @@ impl IndexScheduler { .collect::>>()?; if let Some((batchkind, create_index)) = - autobatcher::autobatch(enqueued, index_already_exists) + autobatcher::autobatch(enqueued, index_already_exists, primary_key.as_deref()) { return self.create_next_batch_index( rtxn, diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 6e0cea644..895b97813 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -3281,13 +3281,9 @@ mod tests { snapshot!(primary_key, @"paw"); // We should be able to batch together the next two tasks that don't specify any primary key - // and it should succeed. + // + the last task that matches the current primary-key. Everything should succeed. handle.advance_one_successful_batch(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth_and_fifth_tasks_succeeds"); - - // Finally the last task should succeed since its primary key is the same as the valid one. - handle.advance_one_successful_batch(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "sixth_task_succeeds"); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_other_tasks_succeeds"); // Is the primary key still what we expect? let index = index_scheduler.index("doggos").unwrap(); @@ -3360,13 +3356,9 @@ mod tests { snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds"); // We should be able to batch together the next two tasks that don't specify any primary key - // and it should succeed. + // + the last task that matches the current primary-key. Everything should succeed. handle.advance_one_successful_batch(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth_and_fifth_tasks_succeeds"); - - // Finally the last task should succeed. - handle.advance_one_successful_batch(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "sixth_task_succeeds"); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_other_tasks_succeeds"); // Is the primary key still what we expect? let index = index_scheduler.index("doggos").unwrap(); diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap index e27e95b51..26b0f6584 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap @@ -8,12 +8,12 @@ source: index-scheduler/src/lib.rs ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} ---------------------------------------------------------------------- ### Status: -enqueued [] +enqueued [2,] succeeded [0,] -failed [1,2,] +failed [1,] ---------------------------------------------------------------------- ### Kind: "documentAdditionOrUpdate" [0,1,2,] @@ -34,13 +34,14 @@ doggos [0,1,2,] ---------------------------------------------------------------------- ### Started At: [timestamp] [0,] -[timestamp] [1,2,] +[timestamp] [1,] ---------------------------------------------------------------------- ### Finished At: [timestamp] [0,] -[timestamp] [1,2,] +[timestamp] [1,] ---------------------------------------------------------------------- ### File Store: +00000000-0000-0000-0000-000000000002 ---------------------------------------------------------------------- diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/sixth_task_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/all_other_tasks_succeeds.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/sixth_task_succeeds.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/all_other_tasks_succeeds.snap index efbabec0d..69cbd3def 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/sixth_task_succeeds.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/all_other_tasks_succeeds.snap @@ -42,15 +42,13 @@ doggos [0,1,2,3,4,5,] [timestamp] [0,] [timestamp] [1,] [timestamp] [2,] -[timestamp] [3,4,] -[timestamp] [5,] +[timestamp] [3,4,5,] ---------------------------------------------------------------------- ### Finished At: [timestamp] [0,] [timestamp] [1,] [timestamp] [2,] -[timestamp] [3,4,] -[timestamp] [5,] +[timestamp] [3,4,5,] ---------------------------------------------------------------------- ### File Store: diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/fourth_and_fifth_tasks_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/fourth_and_fifth_tasks_succeeds.snap deleted file mode 100644 index af131b74a..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key/fourth_and_fifth_tasks_succeeds.snap +++ /dev/null @@ -1,57 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} -4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [5,] -succeeded [2,3,4,] -failed [0,1,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,1,2,3,4,5,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,] ----------------------------------------------------------------------- -### Index Mapper: -["doggos"] ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] ----------------------------------------------------------------------- -### Started At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,4,] ----------------------------------------------------------------------- -### Finished At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,4,] ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000005 - ----------------------------------------------------------------------- - diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/sixth_task_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/all_other_tasks_succeeds.snap similarity index 97% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/sixth_task_succeeds.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/all_other_tasks_succeeds.snap index e6b003712..437c6375e 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/sixth_task_succeeds.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/all_other_tasks_succeeds.snap @@ -42,15 +42,13 @@ doggos [0,1,2,3,4,5,] [timestamp] [0,] [timestamp] [1,] [timestamp] [2,] -[timestamp] [3,4,] -[timestamp] [5,] +[timestamp] [3,4,5,] ---------------------------------------------------------------------- ### Finished At: [timestamp] [0,] [timestamp] [1,] [timestamp] [2,] -[timestamp] [3,4,] -[timestamp] [5,] +[timestamp] [3,4,5,] ---------------------------------------------------------------------- ### File Store: diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/fourth_and_fifth_tasks_succeeds.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/fourth_and_fifth_tasks_succeeds.snap deleted file mode 100644 index 6d61b01df..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/fourth_and_fifth_tasks_succeeds.snap +++ /dev/null @@ -1,57 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} -4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [5,] -succeeded [0,2,3,4,] -failed [1,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,1,2,3,4,5,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,] ----------------------------------------------------------------------- -### Index Mapper: -["doggos"] ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] ----------------------------------------------------------------------- -### Started At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,4,] ----------------------------------------------------------------------- -### Finished At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,4,] ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000005 - ----------------------------------------------------------------------- - From 7d1ebb72955132927c6a23baf336e5fa79b54230 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 23 Jan 2023 20:56:12 +0100 Subject: [PATCH 050/186] add test on the autobatcher layer --- index-scheduler/src/autobatcher.rs | 343 ++++++++++++++++------------- 1 file changed, 195 insertions(+), 148 deletions(-) diff --git a/index-scheduler/src/autobatcher.rs b/index-scheduler/src/autobatcher.rs index 28ae886ad..b5840c435 100644 --- a/index-scheduler/src/autobatcher.rs +++ b/index-scheduler/src/autobatcher.rs @@ -536,19 +536,24 @@ mod tests { fn autobatch_from( index_already_exists: bool, + primary_key: Option<&str>, input: impl IntoIterator, ) -> Option<(BatchKind, bool)> { autobatch( input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind)).collect(), index_already_exists, - None, + primary_key, ) } - fn doc_imp(method: IndexDocumentsMethod, allow_index_creation: bool) -> KindWithContent { + fn doc_imp( + method: IndexDocumentsMethod, + allow_index_creation: bool, + primary_key: Option<&str>, + ) -> KindWithContent { KindWithContent::DocumentAdditionOrUpdate { index_uid: String::from("doggo"), - primary_key: None, + primary_key: primary_key.map(|pk| pk.to_string()), method, content_file: Uuid::new_v4(), documents_count: 0, @@ -598,226 +603,268 @@ mod tests { fn autobatch_simple_operation_together() { // we can autobatch one or multiple `ReplaceDocuments` together. // if the index exists. - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, false ), doc_imp(ReplaceDocuments, false )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, false , None), doc_imp(ReplaceDocuments, false , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); // if it doesn't exists. - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); // we can autobatch one or multiple `UpdateDocuments` together. // if the index exists. - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); // if it doesn't exists. - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); // we can autobatch one or multiple DocumentDeletion together - debug_snapshot!(autobatch_from(true, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))"); // we can autobatch one or multiple Settings together - debug_snapshot!(autobatch_from(true, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))"); - debug_snapshot!(autobatch_from(false, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(false, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(false,None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false,None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))"); } #[test] fn simple_document_operation_dont_autobatch_with_other() { // addition, updates and deletion can't batch together - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); } #[test] fn document_addition_batch_with_settings() { // simple case - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); // multiple settings and doc addition - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); // addition and setting unordered - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))"); // We ensure this kind of batch doesn't batch with forbidden operations - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(UpdateDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(ReplaceDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); } #[test] fn clear_and_additions() { // these two doesn't need to batch - debug_snapshot!(autobatch_from(true, [doc_clr(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentClear { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_clr(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentClear { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_clr(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentClear { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_clr(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentClear { ids: [0] }, false))"); // Basic use case - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))"); // This batch kind doesn't mix with other document addition - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))"); // But you can batch multiple clear together - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))"); } #[test] fn clear_and_additions_and_settings() { // A clear don't need to autobatch the settings that happens AFTER there is no documents - debug_snapshot!(autobatch_from(true, [doc_clr(), settings(true)]), @"Some((DocumentClear { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_clr(), settings(true)]), @"Some((DocumentClear { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [settings(true), doc_clr(), settings(true)]), @"Some((ClearAndSettings { other: [1], allow_index_creation: true, settings_ids: [0, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [settings(true), doc_clr(), settings(true)]), @"Some((ClearAndSettings { other: [1], allow_index_creation: true, settings_ids: [0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))"); } #[test] fn anything_and_index_deletion() { // The `IndexDeletion` doesn't batch with anything that happens AFTER. - debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(ReplaceDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(UpdateDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(ReplaceDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(UpdateDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(ReplaceDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(UpdateDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(ReplaceDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(UpdateDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))"); // The index deletion can accept almost any type of `BatchKind` and transform it to an `IndexDeletion`. // First, the basic cases - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))"); // Then the mixed cases. // The index already exists, whatever is the right of the tasks it shouldn't change the result. - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); // When the index doesn't exists yet it's more complicated. // Either the first task we encounter create it, in which case we can create a big batch with everything. - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); // The right of the tasks following isn't really important. - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))"); // Or, the second case; the first task doesn't create the index and thus we wants to batch it with only tasks that can't create an index. // that can be a second task that don't have the right to create an index. Or anything that can't create an index like an index deletion, document deletion, document clear, etc. // All theses tasks are going to throw an error `Index doesn't exist` once the batch is processed. - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); // The third and final case is when the first task doesn't create an index but is directly followed by a task creating an index. In this case we can't batch whith what // follows because we first need to process the erronous batch. - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); } #[test] fn allowed_and_disallowed_index_creation() { // `DocumentImport` can't be mixed with those disallowed to do so except if the index already exists. - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + } + + #[test] + fn autobatch_primary_key() { + // ==> If I have a pk + // With a single update + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + + // With a multiple updates + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); + + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + + // ==> If I don't have a pk + // With a single update + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + + // With a multiple updates + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); } } From a4be4c49e8ca5d28c6ab6f7c22a56503f00f4475 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 23 Jan 2023 23:58:03 +0100 Subject: [PATCH 051/186] Update index-scheduler/src/batch.rs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Clément Renault --- index-scheduler/src/batch.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index b52d3d495..97fc14e44 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -1001,17 +1001,14 @@ impl IndexScheduler { if let Some(primary_key) = primary_key { match index.primary_key(index_wtxn)? { - // if a primary key was set AND had already be defined in the index - // but to a different value then we can make the whole batch fail. - Some(pk) if primary_key != pk => { + // if a primary key was set AND had already been defined in the index + // but to a different value, we can make the whole batch fail. + Some(pk) => if primary_key != pk { return Err(milli::Error::from( milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()), ) .into()); } - // if the primary key was set and equal to the one already set for - // the index then there is nothing to do. - Some(_) => (), // if the primary key was set and there was no primary key set for this index // we set it to the received value before starting the indexing process. None => { From ea3b269b7722b7e1f1b459b7b35487ad1d0c4eb4 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 23 Jan 2023 23:59:34 +0100 Subject: [PATCH 052/186] reformat --- index-scheduler/src/batch.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index 97fc14e44..bae92c37f 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -1003,11 +1003,13 @@ impl IndexScheduler { match index.primary_key(index_wtxn)? { // if a primary key was set AND had already been defined in the index // but to a different value, we can make the whole batch fail. - Some(pk) => if primary_key != pk { - return Err(milli::Error::from( - milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()), - ) - .into()); + Some(pk) => { + if primary_key != pk { + return Err(milli::Error::from( + milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()), + ) + .into()); + } } // if the primary key was set and there was no primary key set for this index // we set it to the received value before starting the indexing process. From de3c4f1986f0a87b93911ed8ceb76bec16d4b28e Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 12:20:50 +0100 Subject: [PATCH 053/186] throw an error on unknown fields specified in the _geo field --- milli/src/error.rs | 2 ++ milli/src/index.rs | 31 +++++++++++++++++++ .../geo_faceted_documents_ids.snap | 4 +++ milli/src/update/index_documents/enrich.rs | 4 +++ 4 files changed, 41 insertions(+) create mode 100644 milli/src/snapshots/index.rs/unexpected_extra_fields_in_geo_field/geo_faceted_documents_ids.snap diff --git a/milli/src/error.rs b/milli/src/error.rs index 87cb3f360..f96c633f2 100644 --- a/milli/src/error.rs +++ b/milli/src/error.rs @@ -154,6 +154,8 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco pub enum GeoError { #[error("The `_geo` field in the document with the id: `{document_id}` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `{value}`.")] NotAnObject { document_id: Value, value: Value }, + #[error("The `_geo` field in the document with the id: `{document_id}` contains the following unexpected fields: `{value}`.")] + UnexpectedExtraFields { document_id: Value, value: Value }, #[error("Could not find latitude nor longitude in the document with the id: `{document_id}`. Was expecting `_geo.lat` and `_geo.lng` fields.")] MissingLatitudeAndLongitude { document_id: Value }, #[error("Could not find latitude in the document with the id: `{document_id}`. Was expecting a `_geo.lat` field.")] diff --git a/milli/src/index.rs b/milli/src/index.rs index 0ab596fa9..8a17cebf4 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -2326,4 +2326,35 @@ pub(crate) mod tests { db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted } + + #[test] + fn unexpected_extra_fields_in_geo_field() { + let index = TempIndex::new(); + + index + .update_settings(|settings| { + settings.set_primary_key("id".to_string()); + settings.set_filterable_fields(HashSet::from(["_geo".to_string()])); + }) + .unwrap(); + + let err = index + .add_documents( + documents!({ "id" : "doggo", "_geo": { "lat": 1, "lng": 2, "doggo": "are the best" }}), + ) + .unwrap_err(); + insta::assert_display_snapshot!(err, @r###"The `_geo` field in the document with the id: `"\"doggo\""` contains the following unexpected fields: `{"doggo":"are the best"}`."###); + + db_snap!(index, geo_faceted_documents_ids); // ensure that no documents were inserted + + // multiple fields and complex values + let err = index + .add_documents( + documents!({ "id" : "doggo", "_geo": { "lat": 1, "lng": 2, "doggo": "are the best", "and": { "all": ["cats", { "are": "beautiful" } ] } } }), + ) + .unwrap_err(); + insta::assert_display_snapshot!(err, @r###"The `_geo` field in the document with the id: `"\"doggo\""` contains the following unexpected fields: `{"and":{"all":["cats",{"are":"beautiful"}]},"doggo":"are the best"}`."###); + + db_snap!(index, geo_faceted_documents_ids); // ensure that no documents were inserted + } } diff --git a/milli/src/snapshots/index.rs/unexpected_extra_fields_in_geo_field/geo_faceted_documents_ids.snap b/milli/src/snapshots/index.rs/unexpected_extra_fields_in_geo_field/geo_faceted_documents_ids.snap new file mode 100644 index 000000000..89fb1856a --- /dev/null +++ b/milli/src/snapshots/index.rs/unexpected_extra_fields_in_geo_field/geo_faceted_documents_ids.snap @@ -0,0 +1,4 @@ +--- +source: milli/src/index.rs +--- +[] diff --git a/milli/src/update/index_documents/enrich.rs b/milli/src/update/index_documents/enrich.rs index 4c735856d..ed04e9962 100644 --- a/milli/src/update/index_documents/enrich.rs +++ b/milli/src/update/index_documents/enrich.rs @@ -379,6 +379,10 @@ pub fn validate_geo_from_json(id: &DocumentId, bytes: &[u8]) -> Result match (object.remove("lat"), object.remove("lng")) { (Some(lat), Some(lng)) => { match (extract_finite_float_from_value(lat), extract_finite_float_from_value(lng)) { + (Ok(_), Ok(_)) if !object.is_empty() => Ok(Err(UnexpectedExtraFields { + document_id: debug_id(), + value: object.into(), + })), (Ok(_), Ok(_)) => Ok(Ok(())), (Err(value), Ok(_)) => Ok(Err(BadLatitude { document_id: debug_id(), value })), (Ok(_), Err(value)) => Ok(Err(BadLongitude { document_id: debug_id(), value })), From 32364e9919cdc63e5bb0a81a7b647db59e952c42 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 13:20:20 +0100 Subject: [PATCH 054/186] add tests on the index resource --- meilisearch/tests/common/index.rs | 5 + meilisearch/tests/common/server.rs | 8 + meilisearch/tests/index/errors.rs | 265 +++++++++++++++++++++++++++++ meilisearch/tests/index/mod.rs | 1 + 4 files changed, 279 insertions(+) create mode 100644 meilisearch/tests/index/errors.rs diff --git a/meilisearch/tests/common/index.rs b/meilisearch/tests/common/index.rs index 8e3f5b72a..c127af921 100644 --- a/meilisearch/tests/common/index.rs +++ b/meilisearch/tests/common/index.rs @@ -63,6 +63,11 @@ impl Index<'_> { self.service.post_encoded("/indexes", body, self.encoder).await } + pub async fn update_raw(&self, body: Value) -> (Value, StatusCode) { + let url = format!("/indexes/{}", urlencode(self.uid.as_ref())); + self.service.patch_encoded(url, body, self.encoder).await + } + pub async fn update(&self, primary_key: Option<&str>) -> (Value, StatusCode) { let body = json!({ "primaryKey": primary_key, diff --git a/meilisearch/tests/common/server.rs b/meilisearch/tests/common/server.rs index e325da0cb..f2d645563 100644 --- a/meilisearch/tests/common/server.rs +++ b/meilisearch/tests/common/server.rs @@ -95,10 +95,18 @@ impl Server { self.index_with_encoder(uid, Encoder::Plain) } + pub async fn create_index(&self, body: Value) -> (Value, StatusCode) { + self.service.post("/indexes", body).await + } + pub fn index_with_encoder(&self, uid: impl AsRef, encoder: Encoder) -> Index<'_> { Index { uid: uid.as_ref().to_string(), service: &self.service, encoder } } + pub async fn list_indexes_raw(&self, parameters: &str) -> (Value, StatusCode) { + self.service.get(format!("/indexes{parameters}")).await + } + pub async fn list_indexes( &self, offset: Option, diff --git a/meilisearch/tests/index/errors.rs b/meilisearch/tests/index/errors.rs new file mode 100644 index 000000000..ae17a68f1 --- /dev/null +++ b/meilisearch/tests/index/errors.rs @@ -0,0 +1,265 @@ +use meili_snap::*; +use serde_json::json; + +use crate::common::Server; + +#[actix_rt::test] +async fn get_indexes_bad_offset() { + let server = Server::new().await; + + let (response, code) = server.list_indexes_raw("?offset=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `offset`: could not parse `doggo` as a positive integer", + "code": "invalid_index_offset", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_offset" + } + "###); +} + +#[actix_rt::test] +async fn get_indexes_bad_limit() { + let server = Server::new().await; + + let (response, code) = server.list_indexes_raw("?limit=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", + "code": "invalid_index_limit", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_limit" + } + "###); +} + +#[actix_rt::test] +async fn get_indexes_unknown_field() { + let server = Server::new().await; + + let (response, code) = server.list_indexes_raw("?doggo=nolimit").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown parameter `doggo`: expected one of `offset`, `limit`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} + +#[actix_rt::test] +async fn create_index_missing_uid() { + let server = Server::new().await; + + let (response, code) = server.create_index(json!({ "primaryKey": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Missing field `uid`", + "code": "missing_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_index_uid" + } + "###); +} + +#[actix_rt::test] +async fn create_index_bad_uid() { + let server = Server::new().await; + + let (response, code) = server.create_index(json!({ "uid": "the best doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value at `.uid`: `the best doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" + } + "###); + + let (response, code) = server.create_index(json!({ "uid": true })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.uid`: expected a string, but found a boolean: `true`", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" + } + "###); +} + +#[actix_rt::test] +async fn create_index_bad_primary_key() { + let server = Server::new().await; + + let (response, code) = server + .create_index(json!({ "uid": "doggo", "primaryKey": ["the", "best", "doggo"] })) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.primaryKey`: expected a string, but found an array: `[\"the\",\"best\",\"doggo\"]`", + "code": "invalid_index_primary_key", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_primary_key" + } + "###); +} + +#[actix_rt::test] +async fn create_index_unknown_field() { + let server = Server::new().await; + + let (response, code) = server.create_index(json!({ "uid": "doggo", "doggo": "bernese" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown field `doggo`: expected one of `uid`, `primaryKey`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} + +#[actix_rt::test] +async fn get_index_bad_uid() { + let server = Server::new().await; + let index = server.index("the good doggo"); + + let (response, code) = index.get().await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" + } + "###); +} + +#[actix_rt::test] +async fn update_index_bad_primary_key() { + let server = Server::new().await; + let index = server.index("doggo"); + + let (response, code) = index.update_raw(json!({ "primaryKey": ["doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.primaryKey`: expected a string, but found an array: `[\"doggo\"]`", + "code": "invalid_index_primary_key", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_primary_key" + } + "###); +} + +#[actix_rt::test] +async fn update_index_immutable_uid() { + let server = Server::new().await; + let index = server.index("doggo"); + + let (response, code) = index.update_raw(json!({ "uid": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `uid`: expected one of `primaryKey`", + "code": "immutable_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_index_uid" + } + "###); +} + +#[actix_rt::test] +async fn update_index_immutable_created_at() { + let server = Server::new().await; + let index = server.index("doggo"); + + let (response, code) = index.update_raw(json!({ "createdAt": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `createdAt`: expected one of `primaryKey`", + "code": "immutable_index_created_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_index_created_at" + } + "###); +} + +#[actix_rt::test] +async fn update_index_immutable_updated_at() { + let server = Server::new().await; + let index = server.index("doggo"); + + let (response, code) = index.update_raw(json!({ "updatedAt": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `updatedAt`: expected one of `primaryKey`", + "code": "immutable_index_updated_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_index_updated_at" + } + "###); +} + +#[actix_rt::test] +async fn update_index_unknown_field() { + let server = Server::new().await; + let index = server.index("doggo"); + + let (response, code) = index.update_raw(json!({ "doggo": "bork" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown field `doggo`: expected one of `primaryKey`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} + +#[actix_rt::test] +async fn update_index_bad_uid() { + let server = Server::new().await; + let index = server.index("the good doggo"); + + let (response, code) = index.update_raw(json!({ "primaryKey": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" + } + "###); +} + +#[actix_rt::test] +async fn delete_index_bad_uid() { + let server = Server::new().await; + let index = server.index("the good doggo"); + + let (response, code) = index.delete().await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "`the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" + } + "###); +} diff --git a/meilisearch/tests/index/mod.rs b/meilisearch/tests/index/mod.rs index 9996df2e7..5df5e7e97 100644 --- a/meilisearch/tests/index/mod.rs +++ b/meilisearch/tests/index/mod.rs @@ -1,5 +1,6 @@ mod create_index; mod delete_index; +mod errors; mod get_index; mod stats; mod update_index; From 55e80465510ed6ea2f527406ab4c25a722517bff Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 13:52:21 +0100 Subject: [PATCH 055/186] bump milli --- benchmarks/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- filter-parser/Cargo.toml | 2 +- flatten-serde-json/Cargo.toml | 2 +- json-depth-checker/Cargo.toml | 2 +- milli/Cargo.toml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index 73ca8ec33..bf96ca84c 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "benchmarks" -version = "0.40.0" +version = "0.41.0" edition = "2018" publish = false diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 5acbbc632..c8e63a764 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cli" -version = "0.40.0" +version = "0.41.0" edition = "2018" description = "A CLI to interact with a milli index" publish = false diff --git a/filter-parser/Cargo.toml b/filter-parser/Cargo.toml index d7e96cebf..8f47bf2bc 100644 --- a/filter-parser/Cargo.toml +++ b/filter-parser/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "filter-parser" -version = "0.40.0" +version = "0.41.0" edition = "2021" description = "The parser for the Meilisearch filter syntax" publish = false diff --git a/flatten-serde-json/Cargo.toml b/flatten-serde-json/Cargo.toml index 802bf5f7c..9191364ae 100644 --- a/flatten-serde-json/Cargo.toml +++ b/flatten-serde-json/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flatten-serde-json" -version = "0.40.0" +version = "0.41.0" edition = "2021" description = "Flatten serde-json objects like elastic search" readme = "README.md" diff --git a/json-depth-checker/Cargo.toml b/json-depth-checker/Cargo.toml index 85e52c4fd..63906a276 100644 --- a/json-depth-checker/Cargo.toml +++ b/json-depth-checker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "json-depth-checker" -version = "0.40.0" +version = "0.41.0" edition = "2021" description = "A library that indicates if a JSON must be flattened" publish = false diff --git a/milli/Cargo.toml b/milli/Cargo.toml index b64d96368..c3fccc9e2 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "milli" -version = "0.40.0" +version = "0.41.0" authors = ["Kerollmops "] edition = "2018" From 3d8a3d22d15becfad9ed17c324180fed42784b3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Tue, 24 Jan 2023 15:58:34 +0100 Subject: [PATCH 056/186] Update README.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Clément Renault --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ea80410a5..a992b9440 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ DO NOT CONTRIBUTE TO THIS REPOSITORY ANYMORE, IT WILL BE ARCHIVED SOON. ONLY MEILI TEAM IS ALLOWED TO CONTRIBUTE. -The content of this repository is now available in the [Meilisearch repository](https://github.com/meilisearch/meilisearch) in the workspace `milli`. +The content of this repository is now available in the [Meilisearch repository in the workspace `milli`](https://github.com/meilisearch/meilisearch/tree/main/milli). --- From f7ae8bc0650a52515d4dc45f3e5d174715fb460c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Tue, 24 Jan 2023 15:58:41 +0100 Subject: [PATCH 057/186] Update README.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Clément Renault --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a992b9440..9450d5824 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ --- -DO NOT CONTRIBUTE TO THIS REPOSITORY ANYMORE, IT WILL BE ARCHIVED SOON. ONLY MEILI TEAM IS ALLOWED TO CONTRIBUTE. +DO NOT CONTRIBUTE TO THIS REPOSITORY ANYMORE. IT WILL BE ARCHIVED SOON. ONLY THE MEILISEARCH TEAM IS ALLOWED TO CONTRIBUTE. The content of this repository is now available in the [Meilisearch repository in the workspace `milli`](https://github.com/meilisearch/meilisearch/tree/main/milli). From aa17a54feb0508ab5d94171a431b22bc37632877 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 17:30:35 +0100 Subject: [PATCH 058/186] test all the api key error codes --- meilisearch/tests/auth/api_keys.rs | 10 +- meilisearch/tests/auth/errors.rs | 438 +++++++++++++++++++++++++++++ meilisearch/tests/auth/mod.rs | 7 +- meilisearch/tests/dumps/mod.rs | 2 +- 4 files changed, 448 insertions(+), 9 deletions(-) create mode 100644 meilisearch/tests/auth/errors.rs diff --git a/meilisearch/tests/auth/api_keys.rs b/meilisearch/tests/auth/api_keys.rs index d33610d0a..0ae57d726 100644 --- a/meilisearch/tests/auth/api_keys.rs +++ b/meilisearch/tests/auth/api_keys.rs @@ -790,7 +790,7 @@ async fn list_api_keys() { "###); meili_snap::snapshot!(code, @"201 Created"); - let (response, code) = server.list_api_keys().await; + let (response, code) = server.list_api_keys("").await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".results[].createdAt" => "[ignored]", ".results[].updatedAt" => "[ignored]", ".results[].uid" => "[ignored]", ".results[].key" => "[ignored]" }), @r###" { "results": [ @@ -864,7 +864,7 @@ async fn list_api_keys() { async fn error_list_api_keys_no_header() { let server = Server::new_auth().await; - let (response, code) = server.list_api_keys().await; + let (response, code) = server.list_api_keys("").await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { "message": "The Authorization header is missing. It must use the bearer authorization method.", @@ -881,7 +881,7 @@ async fn error_list_api_keys_bad_key() { let mut server = Server::new_auth().await; server.use_api_key("d4000bd7225f77d1eb22cc706ed36772bbc36767c016a27f76def7537b68600d"); - let (response, code) = server.list_api_keys().await; + let (response, code) = server.list_api_keys("").await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { "message": "The provided API key is invalid.", @@ -1723,7 +1723,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "###); meili_snap::snapshot!(code, @"401 Unauthorized"); - let (response, code) = server.list_api_keys().await; + let (response, code) = server.list_api_keys("").await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", @@ -1769,7 +1769,7 @@ async fn error_access_api_key_routes_no_master_key_set() { "###); meili_snap::snapshot!(code, @"401 Unauthorized"); - let (response, code) = server.list_api_keys().await; + let (response, code) = server.list_api_keys("").await; meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", diff --git a/meilisearch/tests/auth/errors.rs b/meilisearch/tests/auth/errors.rs new file mode 100644 index 000000000..2ef853d72 --- /dev/null +++ b/meilisearch/tests/auth/errors.rs @@ -0,0 +1,438 @@ +use meili_snap::*; +use serde_json::json; +use uuid::Uuid; + +use crate::common::Server; + +#[actix_rt::test] +async fn create_api_key_bad_description() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.add_api_key(json!({ "description": ["doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.description`: expected a string, but found an array: `[\"doggo\"]`", + "code": "invalid_api_key_description", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_description" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_bad_name() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.add_api_key(json!({ "name": ["doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.name`: expected a string, but found an array: `[\"doggo\"]`", + "code": "invalid_api_key_name", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_name" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_bad_uid() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + // bad type + let (response, code) = server.add_api_key(json!({ "uid": ["doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.uid`: expected a string, but found an array: `[\"doggo\"]`", + "code": "invalid_api_key_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_uid" + } + "###); + + // can't parse + let (response, code) = server.add_api_key(json!({ "uid": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value at `.uid`: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-zA-Z], found `o` at 2", + "code": "invalid_api_key_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_uid" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_bad_actions() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + // bad type + let (response, code) = server.add_api_key(json!({ "actions": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.actions`: expected an array, but found a string: `\"doggo\"`", + "code": "invalid_api_key_actions", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" + } + "###); + + // can't parse + let (response, code) = server.add_api_key(json!({ "actions": ["doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`", + "code": "invalid_api_key_actions", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_bad_indexes() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + // bad type + let (response, code) = server.add_api_key(json!({ "indexes": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.indexes`: expected an array, but found a string: `\"doggo\"`", + "code": "invalid_api_key_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" + } + "###); + + // can't parse + let (response, code) = server.add_api_key(json!({ "indexes": ["good doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value at `.indexes[0]`: `good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_api_key_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_bad_expires_at() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + // bad type + let (response, code) = server.add_api_key(json!({ "expires_at": ["doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown field `expires_at`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); + + // can't parse + let (response, code) = server.add_api_key(json!({ "expires_at": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown field `expires_at`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_missing_action() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = + server.add_api_key(json!({ "indexes": ["doggo"], "expiresAt": null })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Missing field `actions`", + "code": "missing_api_key_actions", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_api_key_actions" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_missing_indexes() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server + .add_api_key(json!({ "uid": Uuid::nil() , "actions": ["*"], "expiresAt": null })) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Missing field `indexes`", + "code": "missing_api_key_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_api_key_indexes" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_missing_expires_at() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server + .add_api_key(json!({ "uid": Uuid::nil(), "actions": ["*"], "indexes": ["doggo"] })) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Missing field `expiresAt`", + "code": "missing_api_key_expires_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_api_key_expires_at" + } + "###); +} + +#[actix_rt::test] +async fn create_api_key_unexpected_field() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server + .add_api_key(json!({ "uid": Uuid::nil(), "actions": ["*"], "indexes": ["doggo"], "expiresAt": null, "doggo": "bork" })) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown field `doggo`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} + +#[actix_rt::test] +async fn list_api_keys_bad_offset() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.list_api_keys("?offset=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `offset`: could not parse `doggo` as a positive integer", + "code": "invalid_api_key_offset", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_offset" + } + "###); +} + +#[actix_rt::test] +async fn list_api_keys_bad_limit() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.list_api_keys("?limit=doggo").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer", + "code": "invalid_api_key_limit", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_limit" + } + "###); +} + +#[actix_rt::test] +async fn list_api_keys_unexpected_field() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.list_api_keys("?doggo=no_limit").await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown parameter `doggo`: expected one of `offset`, `limit`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_bad_description() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "description": ["doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.description`: expected a string, but found an array: `[\"doggo\"]`", + "code": "invalid_api_key_description", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_description" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_bad_name() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "name": ["doggo"] })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.name`: expected a string, but found an array: `[\"doggo\"]`", + "code": "invalid_api_key_name", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_name" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_immutable_uid() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "uid": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `uid`: expected one of `description`, `name`", + "code": "immutable_api_key_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_api_key_uid" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_immutable_actions() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "actions": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `actions`: expected one of `description`, `name`", + "code": "immutable_api_key_actions", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_api_key_actions" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_immutable_indexes() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "indexes": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `indexes`: expected one of `description`, `name`", + "code": "immutable_api_key_indexes", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_api_key_indexes" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_immutable_expires_at() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "expiresAt": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `expiresAt`: expected one of `description`, `name`", + "code": "immutable_api_key_expires_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_api_key_expires_at" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_immutable_created_at() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "createdAt": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `createdAt`: expected one of `description`, `name`", + "code": "immutable_api_key_created_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_api_key_created_at" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_immutable_updated_at() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "updatedAt": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Immutable field `updatedAt`: expected one of `description`, `name`", + "code": "immutable_api_key_updated_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#immutable_api_key_updated_at" + } + "###); +} + +#[actix_rt::test] +async fn patch_api_keys_unknown_field() { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + + let (response, code) = server.patch_api_key("doggo", json!({ "doggo": "bork" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Unknown field `doggo`: expected one of `description`, `name`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} diff --git a/meilisearch/tests/auth/mod.rs b/meilisearch/tests/auth/mod.rs index dec02cf1f..422f92d6e 100644 --- a/meilisearch/tests/auth/mod.rs +++ b/meilisearch/tests/auth/mod.rs @@ -1,5 +1,6 @@ mod api_keys; mod authorization; +mod errors; mod payload; mod tenant_token; @@ -16,7 +17,7 @@ impl Server { /// Fetch and use the default admin key for nexts http requests. pub async fn use_admin_key(&mut self, master_key: impl AsRef) { self.use_api_key(master_key); - let (response, code) = self.list_api_keys().await; + let (response, code) = self.list_api_keys("").await; assert_eq!(200, code, "{:?}", response); let admin_key = &response["results"][1]["key"]; self.use_api_key(admin_key.as_str().unwrap()); @@ -37,8 +38,8 @@ impl Server { self.service.patch(url, content).await } - pub async fn list_api_keys(&self) -> (Value, StatusCode) { - let url = "/keys"; + pub async fn list_api_keys(&self, params: &str) -> (Value, StatusCode) { + let url = format!("/keys{params}"); self.service.get(url).await } diff --git a/meilisearch/tests/dumps/mod.rs b/meilisearch/tests/dumps/mod.rs index 0759454e8..06cb21f20 100644 --- a/meilisearch/tests/dumps/mod.rs +++ b/meilisearch/tests/dumps/mod.rs @@ -811,7 +811,7 @@ async fn import_dump_v5() { assert_eq!(code, 200); assert_eq!(stats, expected_stats); - let (keys, code) = server.list_api_keys().await; + let (keys, code) = server.list_api_keys("").await; assert_eq!(code, 200); let key = &keys["results"][0]; From c7b2e3be873b4ae40a523de772c09c253e1d2074 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 17:45:53 +0100 Subject: [PATCH 059/186] apply review comments --- index-scheduler/src/autobatcher.rs | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/index-scheduler/src/autobatcher.rs b/index-scheduler/src/autobatcher.rs index b5840c435..e1e48ab90 100644 --- a/index-scheduler/src/autobatcher.rs +++ b/index-scheduler/src/autobatcher.rs @@ -183,9 +183,6 @@ impl BatchKind { ) } // if the primary key set in the task was different than ours we should stop and make this batch fail asap. - // TODO: maybe we could continue to batch tasks that'll fail? But that would mean we need to be extra - // cautious with the index deletion and document clear because we should remember that these tasks were - // supposed to fail even if we don't execute them. K::DocumentImport { method, allow_index_creation, primary_key } => ( Break(BatchKind::DocumentImport { method, @@ -220,18 +217,6 @@ impl BatchKind { (this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => { Break(this) }, - // - // 1. If both task don't interact with primary key -> we can continue - // 2. Else -> - // 2.1 If we already have a primary-key -> - // 2.1.1 If the task we're trying to accumulate have a pk -> it must be equal to our primary key to continue - // 2.1.2 If the task don't have a primary-key -> we can continue - // (We've already ensured that the current batch was correct according to our pk in a previous step of the autobatcher) - // 2.2 If we don't have a primary-key -> - // 2.2.1 If both the batch and the task have a primary key they should be equal - // 2.2.2 If the batch is set to Some(None), the task should be too - // 2.2.3 If the batch is set to None -> we can continue - // // NOTE: We need to negate the whole condition since we're checking if we need to break instead of continue. // I wrote it this way because it's easier to understand than the other way around. (this, kind) if !( @@ -244,14 +229,14 @@ impl BatchKind { primary_key.is_some() && // 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key // 2.1.2 If the task don't have a primary-key -> we can continue - dbg!(kind.primary_key()).map_or(true, |pk| pk == primary_key) + kind.primary_key().map_or(true, |pk| pk == primary_key) ) || // 2.2 If we don't have a primary-key -> ( // 2.2.1 If both the batch and the task have a primary key they should be equal // 2.2.2 If the batch is set to Some(None), the task should be too // 2.2.3 If the batch is set to None -> we can continue - dbg!(&this.primary_key()).zip(dbg!(kind.primary_key())).map_or(true, |(this, kind)| this == kind) + this.primary_key().zip(kind.primary_key()).map_or(true, |(this, kind)| this == kind) ) ) From c92948b143dfd19b79c8a4cc61ccd3a198d168df Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 16:17:23 +0100 Subject: [PATCH 060/186] Compute the size of the auth-controller, index-scheduler and all update files in the global stats --- file-store/src/lib.rs | 33 +++++++++++++++------------ index-scheduler/src/insta_snapshot.rs | 5 +++- index-scheduler/src/lib.rs | 9 ++++++++ index-scheduler/src/utils.rs | 19 +++++++++++---- meilisearch-auth/src/lib.rs | 5 ++++ meilisearch-auth/src/store.rs | 5 ++++ meilisearch-types/src/error.rs | 1 + meilisearch/src/routes/mod.rs | 12 +++++++++- 8 files changed, 67 insertions(+), 22 deletions(-) diff --git a/file-store/src/lib.rs b/file-store/src/lib.rs index e05694c92..ed36f3a91 100644 --- a/file-store/src/lib.rs +++ b/file-store/src/lib.rs @@ -1,4 +1,3 @@ -use std::collections::BTreeSet; use std::fs::File as StdFile; use std::ops::{Deref, DerefMut}; use std::path::{Path, PathBuf}; @@ -11,10 +10,14 @@ const UPDATE_FILES_PATH: &str = "updates/updates_files"; #[derive(Debug, thiserror::Error)] pub enum Error { + #[error("Could not parse file name as utf-8")] + CouldNotParseFileNameAsUtf8, #[error(transparent)] IoError(#[from] std::io::Error), #[error(transparent)] PersistError(#[from] tempfile::PersistError), + #[error(transparent)] + UuidError(#[from] uuid::Error), } pub type Result = std::result::Result; @@ -33,13 +36,11 @@ impl DerefMut for File { } } -#[cfg_attr(test, faux::create)] #[derive(Clone, Debug)] pub struct FileStore { path: PathBuf, } -#[cfg(not(test))] impl FileStore { pub fn new(path: impl AsRef) -> Result { let path = path.as_ref().to_path_buf(); @@ -48,7 +49,6 @@ impl FileStore { } } -#[cfg_attr(test, faux::methods)] impl FileStore { /// Creates a new temporary update file. /// A call to `persist` is needed to persist the file in the database. @@ -94,6 +94,14 @@ impl FileStore { Ok(()) } + pub fn update_total_size(&self) -> Result { + let mut total = 0; + for uuid in self.all_uuids()? { + total += self.get_size(uuid?)?; + } + Ok(total) + } + pub fn get_size(&self, uuid: Uuid) -> Result { Ok(self.get_update(uuid)?.metadata()?.len()) } @@ -105,17 +113,12 @@ impl FileStore { } /// List the Uuids of the files in the FileStore - /// - /// This function is meant to be used by tests only. - #[doc(hidden)] - pub fn __all_uuids(&self) -> BTreeSet { - let mut uuids = BTreeSet::new(); - for entry in self.path.read_dir().unwrap() { - let entry = entry.unwrap(); - let uuid = Uuid::from_str(entry.file_name().to_str().unwrap()).unwrap(); - uuids.insert(uuid); - } - uuids + pub fn all_uuids(&self) -> Result>> { + Ok(self.path.read_dir()?.map(|entry| { + Ok(Uuid::from_str( + entry?.file_name().to_str().ok_or(Error::CouldNotParseFileNameAsUtf8)?, + )?) + })) } } diff --git a/index-scheduler/src/insta_snapshot.rs b/index-scheduler/src/insta_snapshot.rs index 0f0c9953a..e8d07ee63 100644 --- a/index-scheduler/src/insta_snapshot.rs +++ b/index-scheduler/src/insta_snapshot.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeSet; use std::fmt::Write; use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str}; @@ -92,7 +93,9 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String { pub fn snapshot_file_store(file_store: &file_store::FileStore) -> String { let mut snap = String::new(); - for uuid in file_store.__all_uuids() { + // we store the uuid in a `BTreeSet` to keep them ordered. + let all_uuids = file_store.all_uuids().unwrap().collect::, _>>().unwrap(); + for uuid in all_uuids { snap.push_str(&format!("{uuid}\n")); } snap diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 895b97813..1cc28c260 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -452,6 +452,10 @@ impl IndexScheduler { &self.index_mapper.indexer_config } + pub fn size(&self) -> Result { + Ok(self.env.real_disk_size()?) + } + /// Return the index corresponding to the name. /// /// * If the index wasn't opened before, the index will be opened. @@ -898,6 +902,11 @@ impl IndexScheduler { Ok(self.file_store.new_update_with_uuid(uuid)?) } + /// List the update files contained in the IndexScheduler. + pub fn update_file_size(&self) -> Result { + Ok(self.file_store.update_total_size()?) + } + /// Delete a file from the index scheduler. /// /// Counterpart to the [`create_update_file`](IndexScheduler::create_update_file) method. diff --git a/index-scheduler/src/utils.rs b/index-scheduler/src/utils.rs index e13d0e375..8c50e00c7 100644 --- a/index-scheduler/src/utils.rs +++ b/index-scheduler/src/utils.rs @@ -508,14 +508,23 @@ impl IndexScheduler { if let KindWithContent::DocumentAdditionOrUpdate { content_file, .. } = kind { match status { Status::Enqueued | Status::Processing => { - assert!( - self.file_store.__all_uuids().contains(&content_file), - "Could not find uuid `{content_file}` in the file_store. Available uuids are {:?}.", - self.file_store.__all_uuids(), + assert!(self + .file_store + .all_uuids() + .unwrap() + .find(|uuid| uuid.as_ref().unwrap() == &content_file) + .is_some(), + "Could not find uuid `{content_file}` in the file_store. Available uuids are {:?}.", + self.file_store.all_uuids().unwrap().collect::>>(), ); } Status::Succeeded | Status::Failed | Status::Canceled => { - assert!(!self.file_store.__all_uuids().contains(&content_file)); + assert!(self + .file_store + .all_uuids() + .unwrap() + .find(|uuid| uuid.as_ref().unwrap() == &content_file) + .is_none()); } } } diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 609409cf7..adfd00ce5 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -33,6 +33,11 @@ impl AuthController { Ok(Self { store: Arc::new(store), master_key: master_key.clone() }) } + /// Return the size of the `AuthController` database in bytes. + pub fn size(&self) -> Result { + self.store.size() + } + pub fn create_key(&self, create_key: CreateApiKey) -> Result { match self.store.get_api_key(create_key.uid)? { Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())), diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 2574572be..c1cec0ede 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -60,6 +60,11 @@ impl HeedAuthStore { Ok(Self { env, keys, action_keyid_index_expiration, should_close_on_drop: true }) } + /// Return the size in bytes of database + pub fn size(&self) -> Result { + Ok(self.env.real_disk_size()?) + } + pub fn set_drop_on_close(&mut self, v: bool) { self.should_close_on_drop = v; } diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 0f551d584..39d9a1551 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -343,6 +343,7 @@ impl ErrorCode for file_store::Error { match self { Self::IoError(e) => e.error_code(), Self::PersistError(e) => e.error_code(), + Self::CouldNotParseFileNameAsUtf8 | Self::UuidError(_) => Code::Internal, } } } diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index 9ef036554..eaf014a81 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -4,6 +4,7 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::{IndexScheduler, Query}; use log::debug; +use meilisearch_auth::AuthController; use meilisearch_types::error::ResponseError; use meilisearch_types::settings::{Settings, Unchecked}; use meilisearch_types::tasks::{Kind, Status, Task, TaskId}; @@ -230,13 +231,15 @@ pub struct Stats { async fn get_stats( index_scheduler: GuardedData, Data>, + auth_controller: GuardedData, AuthController>, req: HttpRequest, analytics: web::Data, ) -> Result { analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req)); let search_rules = &index_scheduler.filters().search_rules; - let stats = create_all_stats((*index_scheduler).clone(), search_rules)?; + let stats = + create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), search_rules)?; debug!("returns: {:?}", stats); Ok(HttpResponse::Ok().json(stats)) @@ -244,6 +247,7 @@ async fn get_stats( pub fn create_all_stats( index_scheduler: Data, + auth_controller: AuthController, search_rules: &meilisearch_auth::SearchRules, ) -> Result { let mut last_task: Option = None; @@ -253,6 +257,7 @@ pub fn create_all_stats( Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() }, search_rules.authorized_indexes(), )?; + // accumulate the size of each indexes let processing_index = processing_task.first().and_then(|task| task.index_uid()); for (name, index) in index_scheduler.indexes()? { if !search_rules.is_index_authorized(&name) { @@ -273,6 +278,11 @@ pub fn create_all_stats( indexes.insert(name, stats); } + + database_size += index_scheduler.size()?; + database_size += auth_controller.size()?; + database_size += index_scheduler.update_file_size()?; + let stats = Stats { database_size, last_update: last_task, indexes }; Ok(stats) } From 04c4487660487e9e62d312e26e556cb563c5a10e Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 18:09:03 +0100 Subject: [PATCH 061/186] udpate the analytics with the new stats method --- .../src/analytics/segment_analytics.rs | 24 +++++++++++++------ meilisearch/src/main.rs | 3 ++- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/meilisearch/src/analytics/segment_analytics.rs b/meilisearch/src/analytics/segment_analytics.rs index 1b5a1d73f..21b6696e7 100644 --- a/meilisearch/src/analytics/segment_analytics.rs +++ b/meilisearch/src/analytics/segment_analytics.rs @@ -9,7 +9,7 @@ use actix_web::HttpRequest; use byte_unit::Byte; use http::header::CONTENT_TYPE; use index_scheduler::IndexScheduler; -use meilisearch_auth::SearchRules; +use meilisearch_auth::{AuthController, SearchRules}; use meilisearch_types::InstanceUid; use once_cell::sync::Lazy; use regex::Regex; @@ -82,7 +82,11 @@ pub struct SegmentAnalytics { } impl SegmentAnalytics { - pub async fn new(opt: &Opt, index_scheduler: Arc) -> Arc { + pub async fn new( + opt: &Opt, + index_scheduler: Arc, + auth_controller: AuthController, + ) -> Arc { let instance_uid = super::find_user_id(&opt.db_path); let first_time_run = instance_uid.is_none(); let instance_uid = instance_uid.unwrap_or_else(|| Uuid::new_v4()); @@ -136,7 +140,7 @@ impl SegmentAnalytics { get_tasks_aggregator: TasksAggregator::default(), health_aggregator: HealthAggregator::default(), }); - tokio::spawn(segment.run(index_scheduler.clone())); + tokio::spawn(segment.run(index_scheduler.clone(), auth_controller.clone())); let this = Self { instance_uid, sender, user: user.clone() }; @@ -361,7 +365,7 @@ impl Segment { }) } - async fn run(mut self, index_scheduler: Arc) { + async fn run(mut self, index_scheduler: Arc, auth_controller: AuthController) { const INTERVAL: Duration = Duration::from_secs(60 * 60); // one hour // The first batch must be sent after one hour. let mut interval = @@ -370,7 +374,7 @@ impl Segment { loop { select! { _ = interval.tick() => { - self.tick(index_scheduler.clone()).await; + self.tick(index_scheduler.clone(), auth_controller.clone()).await; }, msg = self.inbox.recv() => { match msg { @@ -389,8 +393,14 @@ impl Segment { } } - async fn tick(&mut self, index_scheduler: Arc) { - if let Ok(stats) = create_all_stats(index_scheduler.into(), &SearchRules::default()) { + async fn tick( + &mut self, + index_scheduler: Arc, + auth_controller: AuthController, + ) { + if let Ok(stats) = + create_all_stats(index_scheduler.into(), auth_controller, &SearchRules::default()) + { let _ = self .batcher .push(Identify { diff --git a/meilisearch/src/main.rs b/meilisearch/src/main.rs index 2841dd52c..b78362ec1 100644 --- a/meilisearch/src/main.rs +++ b/meilisearch/src/main.rs @@ -57,7 +57,8 @@ async fn main() -> anyhow::Result<()> { #[cfg(all(not(debug_assertions), feature = "analytics"))] let analytics = if !opt.no_analytics { - analytics::SegmentAnalytics::new(&opt, index_scheduler.clone()).await + analytics::SegmentAnalytics::new(&opt, index_scheduler.clone(), auth_controller.clone()) + .await } else { analytics::MockAnalytics::new(&opt) }; From 3bcff60d1ce504ed35b9015ea5e8ec4432301dfc Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 18:34:36 +0100 Subject: [PATCH 062/186] makes clippy happy --- index-scheduler/src/utils.rs | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/index-scheduler/src/utils.rs b/index-scheduler/src/utils.rs index 8c50e00c7..b3982c19a 100644 --- a/index-scheduler/src/utils.rs +++ b/index-scheduler/src/utils.rs @@ -509,13 +509,12 @@ impl IndexScheduler { match status { Status::Enqueued | Status::Processing => { assert!(self - .file_store - .all_uuids() - .unwrap() - .find(|uuid| uuid.as_ref().unwrap() == &content_file) - .is_some(), - "Could not find uuid `{content_file}` in the file_store. Available uuids are {:?}.", - self.file_store.all_uuids().unwrap().collect::>>(), + .file_store + .all_uuids() + .unwrap() + .any(|uuid| uuid.as_ref().unwrap() == &content_file), + "Could not find uuid `{content_file}` in the file_store. Available uuids are {:?}.", + self.file_store.all_uuids().unwrap().collect::>>().unwrap(), ); } Status::Succeeded | Status::Failed | Status::Canceled => { @@ -523,8 +522,7 @@ impl IndexScheduler { .file_store .all_uuids() .unwrap() - .find(|uuid| uuid.as_ref().unwrap() == &content_file) - .is_none()); + .all(|uuid| uuid.as_ref().unwrap() != &content_file)); } } } From bf94f8903588662b895663ef64d9038460283cc7 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 25 Jan 2023 11:04:29 +0100 Subject: [PATCH 063/186] Update index-scheduler/src/lib.rs Co-authored-by: Louis Dureuil --- index-scheduler/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 1cc28c260..f880979be 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -902,7 +902,7 @@ impl IndexScheduler { Ok(self.file_store.new_update_with_uuid(uuid)?) } - /// List the update files contained in the IndexScheduler. + /// The size on disk taken by all the updates files contained in the `IndexScheduler`, in bytes. pub fn update_file_size(&self) -> Result { Ok(self.file_store.update_total_size()?) } From 474d4ec498860b489cf226b17d4b63d1b0df99e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 1 Dec 2022 15:09:43 +0100 Subject: [PATCH 064/186] Add tests for the index patterns --- meilisearch/tests/auth/authorization.rs | 347 ++++++++++++++++++++++-- 1 file changed, 330 insertions(+), 17 deletions(-) diff --git a/meilisearch/tests/auth/authorization.rs b/meilisearch/tests/auth/authorization.rs index 8ef2d108d..6013274ef 100644 --- a/meilisearch/tests/auth/authorization.rs +++ b/meilisearch/tests/auth/authorization.rs @@ -77,12 +77,14 @@ static INVALID_RESPONSE: Lazy = Lazy::new(|| { }) }); +const MASTER_KEY: &str = "MASTER_KEY"; + #[actix_rt::test] async fn error_access_expired_key() { use std::{thread, time}; let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); let content = json!({ "indexes": ["products"], @@ -111,7 +113,7 @@ async fn error_access_expired_key() { #[actix_rt::test] async fn error_access_unauthorized_index() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); let content = json!({ "indexes": ["sales"], @@ -144,7 +146,7 @@ async fn error_access_unauthorized_action() { for ((method, route), action) in AUTHORIZATIONS.iter() { // create a new API key letting only the needed action. - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); let content = json!({ "indexes": ["products"], @@ -168,7 +170,7 @@ async fn error_access_unauthorized_action() { #[actix_rt::test] async fn access_authorized_master_key() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); // master key must have access to all routes. for ((method, route), _) in AUTHORIZATIONS.iter() { @@ -185,7 +187,7 @@ async fn access_authorized_restricted_index() { for ((method, route), actions) in AUTHORIZATIONS.iter() { for action in actions { // create a new API key letting only the needed action. - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); let content = json!({ "indexes": ["products"], @@ -222,7 +224,7 @@ async fn access_authorized_no_index_restriction() { for ((method, route), actions) in AUTHORIZATIONS.iter() { for action in actions { // create a new API key letting only the needed action. - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); let content = json!({ "indexes": ["*"], @@ -255,7 +257,7 @@ async fn access_authorized_no_index_restriction() { #[actix_rt::test] async fn access_authorized_stats_restricted_index() { let mut server = Server::new_auth().await; - server.use_admin_key("MASTER_KEY").await; + server.use_admin_key(MASTER_KEY).await; // create index `test` let index = server.index("test"); @@ -295,7 +297,7 @@ async fn access_authorized_stats_restricted_index() { #[actix_rt::test] async fn access_authorized_stats_no_index_restriction() { let mut server = Server::new_auth().await; - server.use_admin_key("MASTER_KEY").await; + server.use_admin_key(MASTER_KEY).await; // create index `test` let index = server.index("test"); @@ -335,7 +337,7 @@ async fn access_authorized_stats_no_index_restriction() { #[actix_rt::test] async fn list_authorized_indexes_restricted_index() { let mut server = Server::new_auth().await; - server.use_admin_key("MASTER_KEY").await; + server.use_admin_key(MASTER_KEY).await; // create index `test` let index = server.index("test"); @@ -376,7 +378,7 @@ async fn list_authorized_indexes_restricted_index() { #[actix_rt::test] async fn list_authorized_indexes_no_index_restriction() { let mut server = Server::new_auth().await; - server.use_admin_key("MASTER_KEY").await; + server.use_admin_key(MASTER_KEY).await; // create index `test` let index = server.index("test"); @@ -414,10 +416,194 @@ async fn list_authorized_indexes_no_index_restriction() { assert!(response.iter().any(|index| index["uid"] == "test")); } +#[actix_rt::test] +async fn access_authorized_index_patterns() { + let mut server = Server::new_auth().await; + server.use_admin_key(MASTER_KEY).await; + + // create products_1 index + let index_1 = server.index("products_1"); + let (response, code) = index_1.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); + + // create products index + let index_ = server.index("products"); + let (response, code) = index_.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); + + // create key with all document access on indices with product_* pattern. + let content = json!({ + "indexes": ["products_*"], + "actions": ["documents.*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), + }); + + // Register the key + let (response, code) = server.add_api_key(content).await; + assert_eq!(201, code, "{:?}", &response); + assert!(response["key"].is_string()); + + // use created key. + let key = response["key"].as_str().unwrap(); + server.use_api_key(&key); + + // refer to products_1 and products with modified api key. + let index_1 = server.index("products_1"); + + let index_ = server.index("products"); + + // try to create a index via add documents route + let documents = json!([ + { + "id": 1, + "content": "foo", + } + ]); + + // Adding document to products_1 index. Should succeed with 202 + let (response, code) = index_1.add_documents(documents.clone(), None).await; + assert_eq!(202, code, "{:?}", &response); + let task_id = response["taskUid"].as_u64().unwrap(); + + // Adding document to products index. Should Fail with 403 -- invalid_api_key + let (response, code) = index_.add_documents(documents, None).await; + assert_eq!(403, code, "{:?}", &response); + + server.use_api_key(MASTER_KEY); + + // refer to products_1 with modified api key. + let index_1 = server.index("products_1"); + + index_1.wait_task(task_id).await; + + let (response, code) = index_1.get_task(task_id).await; + assert_eq!(200, code, "{:?}", &response); + assert_eq!(response["status"], "succeeded"); +} + +#[actix_rt::test] +async fn raise_error_non_authorized_index_patterns() { + let mut server = Server::new_auth().await; + server.use_admin_key(MASTER_KEY).await; + + // create products_1 index + let product_1_index = server.index("products_1"); + let (response, code) = product_1_index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); + + // create products_2 index + let product_2_index = server.index("products_2"); + let (response, code) = product_2_index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); + + // create test index + let test_index = server.index("test"); + let (response, code) = test_index.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); + + // create key with all document access on indices with product_* pattern. + let content = json!({ + "indexes": ["products_*"], + "actions": ["documents.*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), + }); + + // Register the key + let (response, code) = server.add_api_key(content).await; + assert_eq!(201, code, "{:?}", &response); + assert!(response["key"].is_string()); + + // use created key. + let key = response["key"].as_str().unwrap(); + server.use_api_key(&key); + + // refer to products_1 and products_2 with modified api key. + let product_1_index = server.index("products_1"); + let product_2_index = server.index("products_2"); + + // refer to test index + let test_index = server.index("test"); + + // try to create a index via add documents route + let documents = json!([ + { + "id": 1, + "content": "foo", + } + ]); + + // Adding document to products_1 index. Should succeed with 202 + let (response, code) = product_1_index.add_documents(documents.clone(), None).await; + assert_eq!(202, code, "{:?}", &response); + let task1_id = response["taskUid"].as_u64().unwrap(); + + // Adding document to products_2 index. Should succeed with 202 + let (response, code) = product_2_index.add_documents(documents.clone(), None).await; + assert_eq!(202, code, "{:?}", &response); + let task2_id = response["taskUid"].as_u64().unwrap(); + + // Adding document to test index. Should Fail with 403 -- invalid_api_key + let (response, code) = test_index.add_documents(documents, None).await; + assert_eq!(403, code, "{:?}", &response); + + server.use_api_key(MASTER_KEY); + + // refer to products_1 with modified api key. + let product_1_index = server.index("products_1"); + // refer to products_2 with modified api key. + let product_2_index = server.index("products_2"); + + product_1_index.wait_task(task1_id).await; + product_2_index.wait_task(task2_id).await; + + let (response, code) = product_1_index.get_task(task1_id).await; + assert_eq!(200, code, "{:?}", &response); + assert_eq!(response["status"], "succeeded"); + + let (response, code) = product_1_index.get_task(task2_id).await; + assert_eq!(200, code, "{:?}", &response); + assert_eq!(response["status"], "succeeded"); +} + +#[actix_rt::test] +async fn pattern_indexes() { + // Create server with master key + let mut server = Server::new_auth().await; + server.use_admin_key(MASTER_KEY).await; + + // index.* constraints on products_* index pattern + let content = json!({ + "indexes": ["products_*"], + "actions": ["indexes.*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), + }); + + // Generate and use the api key + let (response, code) = server.add_api_key(content).await; + assert_eq!(201, code, "{:?}", &response); + let key = response["key"].as_str().expect("Key is not string"); + server.use_api_key(&key); + + // Create Index products_1 using generated api key + let products_1 = server.index("products_1"); + let (response, code) = products_1.create(Some("id")).await; + assert_eq!(202, code, "{:?}", &response); + + // Fail to create products_* using generated api key + let products_1 = server.index("products_*"); + let (response, code) = products_1.create(Some("id")).await; + assert_eq!(400, code, "{:?}", &response); + + // Fail to create test_1 using generated api key + let products_1 = server.index("test_1"); + let (response, code) = products_1.create(Some("id")).await; + assert_eq!(403, code, "{:?}", &response); +} + #[actix_rt::test] async fn list_authorized_tasks_restricted_index() { let mut server = Server::new_auth().await; - server.use_admin_key("MASTER_KEY").await; + server.use_admin_key(MASTER_KEY).await; // create index `test` let index = server.index("test"); @@ -446,7 +632,6 @@ async fn list_authorized_tasks_restricted_index() { let (response, code) = server.service.get("/tasks").await; assert_eq!(200, code, "{:?}", &response); - println!("{}", response); let response = response["results"].as_array().unwrap(); // key should have access on `products` index. assert!(response.iter().any(|task| task["indexUid"] == "products")); @@ -458,7 +643,7 @@ async fn list_authorized_tasks_restricted_index() { #[actix_rt::test] async fn list_authorized_tasks_no_index_restriction() { let mut server = Server::new_auth().await; - server.use_admin_key("MASTER_KEY").await; + server.use_admin_key(MASTER_KEY).await; // create index `test` let index = server.index("test"); @@ -499,7 +684,7 @@ async fn list_authorized_tasks_no_index_restriction() { #[actix_rt::test] async fn error_creating_index_without_action() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); // create key with access on all indexes. let content = json!({ @@ -587,7 +772,7 @@ async fn lazy_create_index() { ]; for content in contents { - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); let (response, code) = server.add_api_key(content).await; assert_eq!(201, code, "{:?}", &response); assert!(response["key"].is_string()); @@ -643,14 +828,114 @@ async fn lazy_create_index() { } } +#[actix_rt::test] +async fn lazy_create_index_from_pattern() { + let mut server = Server::new_auth().await; + + // create key with access on all indexes. + let contents = vec![ + json!({ + "indexes": ["products_*"], + "actions": ["*"], + "expiresAt": "2050-11-13T00:00:00Z" + }), + json!({ + "indexes": ["products_*"], + "actions": ["indexes.*", "documents.*", "settings.*", "tasks.*"], + "expiresAt": "2050-11-13T00:00:00Z" + }), + json!({ + "indexes": ["products_*"], + "actions": ["indexes.create", "documents.add", "settings.update", "tasks.get"], + "expiresAt": "2050-11-13T00:00:00Z" + }), + ]; + + for content in contents { + server.use_api_key(MASTER_KEY); + let (response, code) = server.add_api_key(content).await; + assert_eq!(201, code, "{:?}", &response); + assert!(response["key"].is_string()); + + // use created key. + let key = response["key"].as_str().unwrap(); + server.use_api_key(&key); + + // try to create a index via add documents route + let index = server.index("products_1"); + let test = server.index("test"); + let documents = json!([ + { + "id": 1, + "content": "foo", + } + ]); + + let (response, code) = index.add_documents(documents.clone(), None).await; + assert_eq!(202, code, "{:?}", &response); + let task_id = response["taskUid"].as_u64().unwrap(); + + index.wait_task(task_id).await; + + let (response, code) = index.get_task(task_id).await; + assert_eq!(200, code, "{:?}", &response); + assert_eq!(response["status"], "succeeded"); + + // Fail to create test index + let (response, code) = test.add_documents(documents, None).await; + assert_eq!(403, code, "{:?}", &response); + + // try to create a index via add settings route + let index = server.index("products_2"); + let settings = json!({ "distinctAttribute": "test"}); + + let (response, code) = index.update_settings(settings).await; + assert_eq!(202, code, "{:?}", &response); + let task_id = response["taskUid"].as_u64().unwrap(); + + index.wait_task(task_id).await; + + let (response, code) = index.get_task(task_id).await; + assert_eq!(200, code, "{:?}", &response); + assert_eq!(response["status"], "succeeded"); + + // Fail to create test index + + let index = server.index("test"); + let settings = json!({ "distinctAttribute": "test"}); + + let (response, code) = index.update_settings(settings).await; + assert_eq!(403, code, "{:?}", &response); + + // try to create a index via add specialized settings route + let index = server.index("products_3"); + let (response, code) = index.update_distinct_attribute(json!("test")).await; + assert_eq!(202, code, "{:?}", &response); + let task_id = response["taskUid"].as_u64().unwrap(); + + index.wait_task(task_id).await; + + let (response, code) = index.get_task(task_id).await; + assert_eq!(200, code, "{:?}", &response); + assert_eq!(response["status"], "succeeded"); + + // Fail to create test index + let index = server.index("test"); + let settings = json!({ "distinctAttribute": "test"}); + + let (response, code) = index.update_settings(settings).await; + assert_eq!(403, code, "{:?}", &response); + } +} + #[actix_rt::test] async fn error_creating_index_without_index() { let mut server = Server::new_auth().await; - server.use_api_key("MASTER_KEY"); + server.use_api_key(MASTER_KEY); // create key with access on all indexes. let content = json!({ - "indexes": ["unexpected"], + "indexes": ["unexpected","products_*"], "actions": ["*"], "expiresAt": "2050-11-13T00:00:00Z" }); @@ -690,4 +975,32 @@ async fn error_creating_index_without_index() { let index = server.index("test3"); let (response, code) = index.create(None).await; assert_eq!(403, code, "{:?}", &response); + + // try to create a index via add documents route + let index = server.index("products"); + let documents = json!([ + { + "id": 1, + "content": "foo", + } + ]); + + let (response, code) = index.add_documents(documents, None).await; + assert_eq!(403, code, "{:?}", &response); + + // try to create a index via add settings route + let index = server.index("products"); + let settings = json!({ "distinctAttribute": "test"}); + let (response, code) = index.update_settings(settings).await; + assert_eq!(403, code, "{:?}", &response); + + // try to create a index via add specialized settings route + let index = server.index("products"); + let (response, code) = index.update_distinct_attribute(json!("test")).await; + assert_eq!(403, code, "{:?}", &response); + + // try to create a index via create index route + let index = server.index("products"); + let (response, code) = index.create(None).await; + assert_eq!(403, code, "{:?}", &response); } From 0b08413c98cee79798de9c2a59c677832a29820f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 1 Dec 2022 15:48:23 +0100 Subject: [PATCH 065/186] Introduce the IndexUidPattern type --- meilisearch-types/src/index_uid_pattern.rs | 97 ++++++++++++++++++++++ meilisearch-types/src/lib.rs | 1 + 2 files changed, 98 insertions(+) create mode 100644 meilisearch-types/src/index_uid_pattern.rs diff --git a/meilisearch-types/src/index_uid_pattern.rs b/meilisearch-types/src/index_uid_pattern.rs new file mode 100644 index 000000000..1f82e83db --- /dev/null +++ b/meilisearch-types/src/index_uid_pattern.rs @@ -0,0 +1,97 @@ +use std::error::Error; +use std::fmt; +use std::str::FromStr; + +use serde::{Deserialize, Serialize}; + +use crate::error::{Code, ErrorCode}; +use crate::index_uid::{IndexUid, IndexUidFormatError}; + +/// An index uid pattern is composed of only ascii alphanumeric characters, - and _, between 1 and 400 +/// bytes long and optionally ending with a *. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +pub struct IndexUidPattern( + #[cfg_attr(feature = "test-traits", proptest(regex("[a-zA-Z0-9_-]{1,400}\\*?")))] String, +); + +impl IndexUidPattern { + /// Returns wether this index uid matches this index uid pattern. + pub fn matches(&self, uid: &IndexUid) -> bool { + self.matches_str(uid.as_str()) + } + + /// Returns wether this string matches this index uid pattern. + pub fn matches_str(&self, uid: &str) -> bool { + match self.0.strip_suffix('*') { + Some(prefix) => uid.starts_with(prefix), + None => self.0 == uid, + } + } +} + +impl std::ops::Deref for IndexUidPattern { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl TryFrom for IndexUidPattern { + type Error = IndexUidPatternFormatError; + + fn try_from(uid: String) -> Result { + let result = match uid.strip_suffix('*') { + Some(prefix) => IndexUid::from_str(prefix).map(|_| IndexUidPattern(uid)), + None => IndexUid::try_from(uid).map(IndexUid::into_inner).map(IndexUidPattern), + }; + + match result { + Ok(index_uid_pattern) => Ok(index_uid_pattern), + Err(IndexUidFormatError { invalid_uid }) => { + Err(IndexUidPatternFormatError { invalid_uid }) + } + } + } +} + +impl FromStr for IndexUidPattern { + type Err = IndexUidPatternFormatError; + + fn from_str(uid: &str) -> Result { + uid.to_string().try_into() + } +} + +impl From for String { + fn from(IndexUidPattern(uid): IndexUidPattern) -> Self { + uid + } +} + +#[derive(Debug)] +pub struct IndexUidPatternFormatError { + pub invalid_uid: String, +} + +impl fmt::Display for IndexUidPatternFormatError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "`{}` is not a valid index uid pattern. Index uid patterns \ + can be an integer or a string containing only alphanumeric \ + characters, hyphens (-), underscores (_), and \ + optionally end with a star (*).", + self.invalid_uid, + ) + } +} + +impl Error for IndexUidPatternFormatError {} + +impl ErrorCode for IndexUidPatternFormatError { + fn error_code(&self) -> Code { + // TODO should I return a new error code? + Code::InvalidIndexUid + } +} diff --git a/meilisearch-types/src/lib.rs b/meilisearch-types/src/lib.rs index c7f7ca7f5..22caa8114 100644 --- a/meilisearch-types/src/lib.rs +++ b/meilisearch-types/src/lib.rs @@ -2,6 +2,7 @@ pub mod compression; pub mod document_formats; pub mod error; pub mod index_uid; +pub mod index_uid_pattern; pub mod keys; pub mod settings; pub mod star_or; From 29961b8c6b5cb3104aae407cb1949d9301b83dc6 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 25 Jan 2023 14:41:36 +0100 Subject: [PATCH 066/186] Make it work with the dumps --- dump/src/reader/compat/v5_to_v6.rs | 2 +- dump/src/reader/v6/mod.rs | 2 +- meilisearch-types/src/index_uid_pattern.rs | 5 +++++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index 237381414..51858450e 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -183,7 +183,7 @@ impl CompatV5ToV6 { .map(|index| match index { v5::StarOr::Star => v6::StarOr::Star, v5::StarOr::Other(uid) => { - v6::StarOr::Other(v6::IndexUid::new_unchecked(uid.as_str())) + v6::StarOr::Other(v6::IndexUidPattern::new_unchecked(uid.as_str())) } }) .collect(), diff --git a/dump/src/reader/v6/mod.rs b/dump/src/reader/v6/mod.rs index edf552452..77d7a52bc 100644 --- a/dump/src/reader/v6/mod.rs +++ b/dump/src/reader/v6/mod.rs @@ -35,7 +35,7 @@ pub type PaginationSettings = meilisearch_types::settings::PaginationSettings; // everything related to the api keys pub type Action = meilisearch_types::keys::Action; pub type StarOr = meilisearch_types::star_or::StarOr; -pub type IndexUid = meilisearch_types::index_uid::IndexUid; +pub type IndexUidPattern = meilisearch_types::index_uid_pattern::IndexUidPattern; // everything related to the errors pub type ResponseError = meilisearch_types::error::ResponseError; diff --git a/meilisearch-types/src/index_uid_pattern.rs b/meilisearch-types/src/index_uid_pattern.rs index 1f82e83db..8cb50fee9 100644 --- a/meilisearch-types/src/index_uid_pattern.rs +++ b/meilisearch-types/src/index_uid_pattern.rs @@ -10,11 +10,16 @@ use crate::index_uid::{IndexUid, IndexUidFormatError}; /// An index uid pattern is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long and optionally ending with a *. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] pub struct IndexUidPattern( #[cfg_attr(feature = "test-traits", proptest(regex("[a-zA-Z0-9_-]{1,400}\\*?")))] String, ); impl IndexUidPattern { + pub fn new_unchecked(s: impl AsRef) -> Self { + Self(s.as_ref().to_string()) + } + /// Returns wether this index uid matches this index uid pattern. pub fn matches(&self, uid: &IndexUid) -> bool { self.matches_str(uid.as_str()) From a858531574f45a1b740c0d610e6d9fd2d1b1d824 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 25 Jan 2023 11:20:15 +0100 Subject: [PATCH 067/186] apply review comments --- file-store/src/lib.rs | 8 +++++--- index-scheduler/src/lib.rs | 4 ++-- index-scheduler/src/utils.rs | 2 +- meilisearch/src/routes/mod.rs | 2 +- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/file-store/src/lib.rs b/file-store/src/lib.rs index ed36f3a91..4b7e52e5d 100644 --- a/file-store/src/lib.rs +++ b/file-store/src/lib.rs @@ -94,15 +94,17 @@ impl FileStore { Ok(()) } - pub fn update_total_size(&self) -> Result { + /// Compute the size of all the updates contained in the file store. + pub fn compute_total_size(&self) -> Result { let mut total = 0; for uuid in self.all_uuids()? { - total += self.get_size(uuid?)?; + total += self.compute_size(uuid?).unwrap_or_default(); } Ok(total) } - pub fn get_size(&self, uuid: Uuid) -> Result { + /// Compute the size of one update + pub fn compute_size(&self, uuid: Uuid) -> Result { Ok(self.get_update(uuid)?.metadata()?.len()) } diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index f880979be..387dac2d0 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -903,8 +903,8 @@ impl IndexScheduler { } /// The size on disk taken by all the updates files contained in the `IndexScheduler`, in bytes. - pub fn update_file_size(&self) -> Result { - Ok(self.file_store.update_total_size()?) + pub fn compute_update_file_size(&self) -> Result { + Ok(self.file_store.compute_total_size()?) } /// Delete a file from the index scheduler. diff --git a/index-scheduler/src/utils.rs b/index-scheduler/src/utils.rs index b3982c19a..c9b71b523 100644 --- a/index-scheduler/src/utils.rs +++ b/index-scheduler/src/utils.rs @@ -514,7 +514,7 @@ impl IndexScheduler { .unwrap() .any(|uuid| uuid.as_ref().unwrap() == &content_file), "Could not find uuid `{content_file}` in the file_store. Available uuids are {:?}.", - self.file_store.all_uuids().unwrap().collect::>>().unwrap(), + self.file_store.all_uuids().unwrap().collect::, file_store::Error>>().unwrap(), ); } Status::Succeeded | Status::Failed | Status::Canceled => { diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index eaf014a81..7aaad7125 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -281,7 +281,7 @@ pub fn create_all_stats( database_size += index_scheduler.size()?; database_size += auth_controller.size()?; - database_size += index_scheduler.update_file_size()?; + database_size += index_scheduler.compute_update_file_size()?; let stats = Stats { database_size, last_update: last_task, indexes }; Ok(stats) From 184b8afd9e479651cc6063e7fab2d6bc3f3a0d9a Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 25 Jan 2023 14:42:03 +0100 Subject: [PATCH 068/186] Make it work in the CreateApiKey struct --- meilisearch-types/src/keys.rs | 11 ++++++----- meilisearch/src/routes/api_key.rs | 1 + meilisearch/src/routes/mod.rs | 2 ++ 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index b41bb06b6..50afa755c 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -12,15 +12,15 @@ use uuid::Uuid; use crate::error::deserr_codes::*; use crate::error::{unwrap_any, Code, DeserrError, ErrorCode, TakeErrorMessage}; -use crate::index_uid::{IndexUid, IndexUidFormatError}; +use crate::index_uid_pattern::{IndexUidPattern, IndexUidPatternFormatError}; use crate::star_or::StarOr; pub type KeyId = Uuid; -impl MergeWithError for DeserrError { +impl MergeWithError for DeserrError { fn merge( _self_: Option, - other: IndexUidFormatError, + other: IndexUidPatternFormatError, merge_location: deserr::ValuePointerRef, ) -> std::result::Result { DeserrError::error::( @@ -47,10 +47,11 @@ pub struct CreateApiKey { #[deserr(error = DeserrError)] pub actions: Vec, #[deserr(error = DeserrError)] - pub indexes: Vec>, + pub indexes: Vec>, #[deserr(error = DeserrError, default = None, from(&String) = parse_expiration_date -> TakeErrorMessage)] pub expires_at: Option, } + impl CreateApiKey { pub fn to_key(self) -> Key { let CreateApiKey { description, name, uid, actions, indexes, expires_at } = self; @@ -108,7 +109,7 @@ pub struct Key { pub name: Option, pub uid: KeyId, pub actions: Vec, - pub indexes: Vec>, + pub indexes: Vec>, #[serde(with = "time::serde::rfc3339::option")] pub expires_at: Option, #[serde(with = "time::serde::rfc3339")] diff --git a/meilisearch/src/routes/api_key.rs b/meilisearch/src/routes/api_key.rs index 76912bbaa..b43398da1 100644 --- a/meilisearch/src/routes/api_key.rs +++ b/meilisearch/src/routes/api_key.rs @@ -61,6 +61,7 @@ pub struct ListApiKeys { #[deserr(error = DeserrError, default = PAGINATION_DEFAULT_LIMIT(), from(&String) = parse_usize_take_error_message -> TakeErrorMessage)] pub limit: usize, } + impl ListApiKeys { fn as_pagination(self) -> Pagination { Pagination { offset: self.offset, limit: self.limit } diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index 2e619540a..5111478c9 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -56,6 +56,7 @@ where { Ok(Some(input.parse()?)) } + pub fn from_string_to_option_take_error_message( input: &str, ) -> Result, TakeErrorMessage> @@ -90,6 +91,7 @@ impl From for SummarizedTaskView { } } } + pub struct Pagination { pub offset: usize, pub limit: usize, From ec7de4bae7730ad14921bbb6e00273fe62f50f5b Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 25 Jan 2023 16:12:40 +0100 Subject: [PATCH 069/186] Make it work for any all routes including stats and index swaps --- meilisearch-auth/src/lib.rs | 21 ++++++++++++++++----- meilisearch-auth/src/store.rs | 24 +++++++++++++++++++++--- 2 files changed, 37 insertions(+), 8 deletions(-) diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 8d4a7f2b7..c81f9f20b 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -8,6 +8,7 @@ use std::path::Path; use std::sync::Arc; use error::{AuthControllerError, Result}; +use meilisearch_types::index_uid_pattern::IndexUidPattern; use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey}; use meilisearch_types::star_or::StarOr; use serde::{Deserialize, Serialize}; @@ -141,9 +142,7 @@ impl AuthController { .get_expiration_date(uid, action, None)? .or(match index { // else check if the key has access to the requested index. - Some(index) => { - self.store.get_expiration_date(uid, action, Some(index.as_bytes()))? - } + Some(index) => self.store.get_expiration_date(uid, action, Some(index))?, // or to any index if no index has been requested. None => self.store.prefix_first_expiration_date(uid, action)?, }) { @@ -196,8 +195,20 @@ impl Default for SearchRules { impl SearchRules { pub fn is_index_authorized(&self, index: &str) -> bool { match self { - Self::Set(set) => set.contains("*") || set.contains(index), - Self::Map(map) => map.contains_key("*") || map.contains_key(index), + Self::Set(set) => { + set.contains("*") + || set.contains(index) + || set + .iter() // We must store the IndexUidPattern in the Set + .any(|pattern| IndexUidPattern::new_unchecked(pattern).matches_str(index)) + } + Self::Map(map) => { + map.contains_key("*") + || map.contains_key(index) + || map + .keys() // We must store the IndexUidPattern in the Map + .any(|pattern| IndexUidPattern::new_unchecked(pattern).matches_str(index)) + } } } diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index b3f9ed672..d1c2562c1 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -9,6 +9,7 @@ use std::str; use std::sync::Arc; use hmac::{Hmac, Mac}; +use meilisearch_types::index_uid_pattern::IndexUidPattern; use meilisearch_types::keys::KeyId; use meilisearch_types::milli; use meilisearch_types::milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; @@ -210,11 +211,28 @@ impl HeedAuthStore { &self, uid: Uuid, action: Action, - index: Option<&[u8]>, + index: Option<&str>, ) -> Result>> { let rtxn = self.env.read_txn()?; - let tuple = (&uid, &action, index); - Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?) + let tuple = (&uid, &action, index.map(|s| s.as_bytes())); + match self.action_keyid_index_expiration.get(&rtxn, &tuple)? { + Some(expiration) => Ok(Some(expiration)), + None => { + let tuple = (&uid, &action, None); + for result in self.action_keyid_index_expiration.prefix_iter(&rtxn, &tuple)? { + let ((_, _, index_uid_pattern), expiration) = result?; + if let Some((pattern, index)) = index_uid_pattern.zip(index) { + let index_uid_pattern = str::from_utf8(pattern)?.to_string(); + // TODO I shouldn't unwrap here but rather return an internal error + let pattern = IndexUidPattern::try_from(index_uid_pattern).unwrap(); + if pattern.matches_str(index) { + return Ok(Some(expiration)); + } + } + } + Ok(None) + } + } } pub fn prefix_first_expiration_date( From a3f1b8fdb949019cb8d6b191b93496d3dc91bcbd Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 11:27:51 +0100 Subject: [PATCH 070/186] refactorize the test suite of the add_documents module to use snapshot tests when possible --- meilisearch/tests/documents/add_documents.rs | 704 ++++++++++++------- 1 file changed, 453 insertions(+), 251 deletions(-) diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 83fcb6e83..cbce41e2e 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -1,4 +1,5 @@ use actix_web::test; +use meili_snap::{json_string, snapshot}; use serde_json::{json, Value}; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; @@ -30,8 +31,17 @@ async fn add_documents_test_json_content_types() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], 0); + snapshot!(status_code, @"202 Accepted"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "taskUid": 0, + "indexUid": "dog", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "enqueuedAt": "[date]" + } + "###); // put let req = test::TestRequest::put() @@ -43,8 +53,17 @@ async fn add_documents_test_json_content_types() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], 1); + snapshot!(status_code, @"202 Accepted"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "taskUid": 1, + "indexUid": "dog", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "enqueuedAt": "[date]" + } + "###); } /// Here we try to send a single document instead of an array with a single document inside. @@ -69,8 +88,17 @@ async fn add_single_document_test_json_content_types() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], 0); + snapshot!(status_code, @"202 Accepted"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "taskUid": 0, + "indexUid": "dog", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "enqueuedAt": "[date]" + } + "###); // put let req = test::TestRequest::put() @@ -82,8 +110,17 @@ async fn add_single_document_test_json_content_types() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], 1); + snapshot!(status_code, @"202 Accepted"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "taskUid": 1, + "indexUid": "dog", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "enqueuedAt": "[date]" + } + "###); } /// Here we try sending encoded (compressed) document request @@ -110,8 +147,17 @@ async fn add_single_document_gzip_encoded() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], 0); + snapshot!(status_code, @"202 Accepted"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "taskUid": 0, + "indexUid": "dog", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "enqueuedAt": "[date]" + } + "###); // put let req = test::TestRequest::put() @@ -124,8 +170,17 @@ async fn add_single_document_gzip_encoded() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 202); - assert_eq!(response["taskUid"], 1); + snapshot!(status_code, @"202 Accepted"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "taskUid": 1, + "indexUid": "dog", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "enqueuedAt": "[date]" + } + "###); } /// Here we try document request with every encoding @@ -184,16 +239,16 @@ async fn error_add_documents_test_bad_content_types() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 415); - assert_eq!( - response["message"], - json!( - r#"The Content-Type `text/plain` is invalid. Accepted values for the Content-Type header are: `application/json`, `application/x-ndjson`, `text/csv`"# - ) - ); - assert_eq!(response["code"], "invalid_content_type"); - assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type"); + snapshot!(status_code, @"415 Unsupported Media Type"); + snapshot!(json_string!(response), + @r###" + { + "message": "The Content-Type `text/plain` is invalid. Accepted values for the Content-Type header are: `application/json`, `application/x-ndjson`, `text/csv`", + "code": "invalid_content_type", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_content_type" + } + "###); // put let req = test::TestRequest::put() @@ -205,16 +260,16 @@ async fn error_add_documents_test_bad_content_types() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 415); - assert_eq!( - response["message"], - json!( - r#"The Content-Type `text/plain` is invalid. Accepted values for the Content-Type header are: `application/json`, `application/x-ndjson`, `text/csv`"# - ) - ); - assert_eq!(response["code"], "invalid_content_type"); - assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type"); + snapshot!(status_code, @"415 Unsupported Media Type"); + snapshot!(json_string!(response), + @r###" + { + "message": "The Content-Type `text/plain` is invalid. Accepted values for the Content-Type header are: `application/json`, `application/x-ndjson`, `text/csv`", + "code": "invalid_content_type", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_content_type" + } + "###); } /// missing content-type must be refused @@ -239,16 +294,16 @@ async fn error_add_documents_test_no_content_type() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 415); - assert_eq!( - response["message"], - json!( - r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`, `application/x-ndjson`, `text/csv`"# - ) - ); - assert_eq!(response["code"], "missing_content_type"); - assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type"); + snapshot!(status_code, @"415 Unsupported Media Type"); + snapshot!(json_string!(response), + @r###" + { + "message": "A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`, `application/x-ndjson`, `text/csv`", + "code": "missing_content_type", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_content_type" + } + "###); // put let req = test::TestRequest::put() @@ -259,16 +314,16 @@ async fn error_add_documents_test_no_content_type() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 415); - assert_eq!( - response["message"], - json!( - r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`, `application/x-ndjson`, `text/csv`"# - ) - ); - assert_eq!(response["code"], "missing_content_type"); - assert_eq!(response["type"], "invalid_request"); - assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type"); + snapshot!(status_code, @"415 Unsupported Media Type"); + snapshot!(json_string!(response), + @r###" + { + "message": "A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`, `application/x-ndjson`, `text/csv`", + "code": "missing_content_type", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_content_type" + } + "###); } #[actix_rt::test] @@ -288,16 +343,16 @@ async fn error_add_malformed_csv_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!( - response["message"], - json!( - r#"The `csv` payload provided is malformed: `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."# - ) - ); - assert_eq!(response["code"], json!("malformed_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "The `csv` payload provided is malformed: `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`.", + "code": "malformed_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#malformed_payload" + } + "###); // put let req = test::TestRequest::put() @@ -309,16 +364,16 @@ async fn error_add_malformed_csv_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!( - response["message"], - json!( - r#"The `csv` payload provided is malformed: `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."# - ) - ); - assert_eq!(response["code"], json!("malformed_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "The `csv` payload provided is malformed: `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`.", + "code": "malformed_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#malformed_payload" + } + "###); } #[actix_rt::test] @@ -338,16 +393,16 @@ async fn error_add_malformed_json_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!( - response["message"], - json!( - r#"The `json` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 1 column 14`."# - ) - ); - assert_eq!(response["code"], json!("malformed_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "The `json` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 1 column 14`.", + "code": "malformed_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#malformed_payload" + } + "###); // put let req = test::TestRequest::put() @@ -359,16 +414,16 @@ async fn error_add_malformed_json_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!( - response["message"], - json!( - r#"The `json` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 1 column 14`."# - ) - ); - assert_eq!(response["code"], json!("malformed_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "The `json` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 1 column 14`.", + "code": "malformed_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#malformed_payload" + } + "###); // truncate @@ -384,16 +439,16 @@ async fn error_add_malformed_json_documents() { let res = test::call_service(&app, req).await; let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!( - response["message"], - json!( - r#"The `json` payload provided is malformed. `Couldn't serialize document value: data are neither an object nor a list of objects`."# - ) - ); - assert_eq!(response["code"], json!("malformed_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "The `json` payload provided is malformed. `Couldn't serialize document value: data are neither an object nor a list of objects`.", + "code": "malformed_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#malformed_payload" + } + "###); // add one more char to the long string to test if the truncating works. let document = format!("\"{}m\"", long); @@ -405,14 +460,16 @@ async fn error_add_malformed_json_documents() { let res = test::call_service(&app, req).await; let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!( - response["message"], - json!("The `json` payload provided is malformed. `Couldn't serialize document value: data are neither an object nor a list of objects`.") - ); - assert_eq!(response["code"], json!("malformed_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "The `json` payload provided is malformed. `Couldn't serialize document value: data are neither an object nor a list of objects`.", + "code": "malformed_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#malformed_payload" + } + "###); } #[actix_rt::test] @@ -432,16 +489,16 @@ async fn error_add_malformed_ndjson_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!( - response["message"], - json!( - r#"The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 2 column 2`."# - ) - ); - assert_eq!(response["code"], json!("malformed_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 2 column 2`.", + "code": "malformed_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#malformed_payload" + } + "###); // put let req = test::TestRequest::put() @@ -453,14 +510,16 @@ async fn error_add_malformed_ndjson_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!( - response["message"], - json!("The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 2 column 2`.") - ); - assert_eq!(response["code"], json!("malformed_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 2 column 2`.", + "code": "malformed_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#malformed_payload" + } + "###); } #[actix_rt::test] @@ -480,11 +539,16 @@ async fn error_add_missing_payload_csv_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!(response["message"], json!(r#"A csv payload is missing."#)); - assert_eq!(response["code"], json!("missing_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "A csv payload is missing.", + "code": "missing_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_payload" + } + "###); // put let req = test::TestRequest::put() @@ -496,11 +560,16 @@ async fn error_add_missing_payload_csv_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!(response["message"], json!(r#"A csv payload is missing."#)); - assert_eq!(response["code"], json!("missing_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "A csv payload is missing.", + "code": "missing_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_payload" + } + "###); } #[actix_rt::test] @@ -520,11 +589,16 @@ async fn error_add_missing_payload_json_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!(response["message"], json!(r#"A json payload is missing."#)); - assert_eq!(response["code"], json!("missing_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "A json payload is missing.", + "code": "missing_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_payload" + } + "###); // put let req = test::TestRequest::put() @@ -536,11 +610,16 @@ async fn error_add_missing_payload_json_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!(response["message"], json!(r#"A json payload is missing."#)); - assert_eq!(response["code"], json!("missing_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "A json payload is missing.", + "code": "missing_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_payload" + } + "###); } #[actix_rt::test] @@ -560,11 +639,16 @@ async fn error_add_missing_payload_ndjson_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#)); - assert_eq!(response["code"], json!("missing_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "A ndjson payload is missing.", + "code": "missing_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_payload" + } + "###); // put let req = test::TestRequest::put() @@ -576,11 +660,16 @@ async fn error_add_missing_payload_ndjson_documents() { let status_code = res.status(); let body = test::read_body(res).await; let response: Value = serde_json::from_slice(&body).unwrap_or_default(); - assert_eq!(status_code, 400); - assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#)); - assert_eq!(response["code"], json!("missing_payload")); - assert_eq!(response["type"], json!("invalid_request")); - assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload")); + snapshot!(status_code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "A ndjson payload is missing.", + "code": "missing_payload", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_payload" + } + "###); } #[actix_rt::test] @@ -596,26 +685,32 @@ async fn add_documents_no_index_creation() { ]); let (response, code) = index.add_documents(documents, None).await; - assert_eq!(code, 202); + snapshot!(code, @"202 Accepted"); assert_eq!(response["taskUid"], 0); - /* - * currently we don’t check these field to stay ISO with meilisearch - * assert_eq!(response["status"], "pending"); - * assert_eq!(response["meta"]["type"], "DocumentsAddition"); - * assert_eq!(response["meta"]["format"], "Json"); - * assert_eq!(response["meta"]["primaryKey"], Value::Null); - * assert!(response.get("enqueuedAt").is_some()); - */ index.wait_task(0).await; let (response, code) = index.get_task(0).await; - assert_eq!(code, 200); - assert_eq!(response["status"], "succeeded"); - assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentAdditionOrUpdate"); - assert_eq!(response["details"]["receivedDocuments"], 1); - assert_eq!(response["details"]["indexedDocuments"], 1); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 0, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); let processed_at = OffsetDateTime::parse(response["finishedAt"].as_str().unwrap(), &Rfc3339).unwrap(); @@ -625,7 +720,7 @@ async fn add_documents_no_index_creation() { // index was created, and primary key was inferred. let (response, code) = index.get().await; - assert_eq!(code, 200); + snapshot!(code, @"200 OK"); assert_eq!(response["primaryKey"], "id"); } @@ -635,15 +730,16 @@ async fn error_document_add_create_index_bad_uid() { let index = server.index("883 fj!"); let (response, code) = index.add_documents(json!([{"id": 1}]), None).await; - let expected_response = json!({ - "message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", - "code": "invalid_index_uid", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#invalid_index_uid" - }); - - assert_eq!(code, 400); - assert_eq!(response, expected_response); + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), + @r###" + { + "message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" + } + "###); } #[actix_rt::test] @@ -658,21 +754,53 @@ async fn document_addition_with_primary_key() { } ]); let (response, code) = index.add_documents(documents, Some("primary")).await; - assert_eq!(code, 202, "response: {}", response); + snapshot!(code, @"202 Accepted"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "taskUid": 0, + "indexUid": "test", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "enqueuedAt": "[date]" + } + "###); index.wait_task(0).await; let (response, code) = index.get_task(0).await; - assert_eq!(code, 200); - assert_eq!(response["status"], "succeeded"); - assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentAdditionOrUpdate"); - assert_eq!(response["details"]["receivedDocuments"], 1); - assert_eq!(response["details"]["indexedDocuments"], 1); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 0, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); let (response, code) = index.get().await; - assert_eq!(code, 200); - assert_eq!(response["primaryKey"], "primary"); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]" }), + @r###" + { + "uid": "test", + "createdAt": "[date]", + "updatedAt": "[date]", + "primaryKey": "primary" + } + "###); } #[actix_rt::test] @@ -688,7 +816,17 @@ async fn replace_document() { ]); let (response, code) = index.add_documents(documents, None).await; - assert_eq!(code, 202, "response: {}", response); + snapshot!(code,@"202 Accepted"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "taskUid": 0, + "indexUid": "test", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "enqueuedAt": "[date]" + } + "###); index.wait_task(0).await; @@ -700,17 +838,41 @@ async fn replace_document() { ]); let (_response, code) = index.add_documents(documents, None).await; - assert_eq!(code, 202); + snapshot!(code,@"202 Accepted"); index.wait_task(1).await; let (response, code) = index.get_task(1).await; - assert_eq!(code, 200); - assert_eq!(response["status"], "succeeded"); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 1, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); let (response, code) = index.get_document(1, None).await; - assert_eq!(code, 200); - assert_eq!(response.to_string(), r##"{"doc_id":1,"other":"bar"}"##); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response), + @r###" + { + "doc_id": 1, + "other": "bar" + } + "###); } #[actix_rt::test] @@ -718,7 +880,7 @@ async fn add_no_documents() { let server = Server::new().await; let index = server.index("test"); let (_response, code) = index.add_documents(json!([]), None).await; - assert_eq!(code, 202); + snapshot!(code, @"202 Accepted"); } #[actix_rt::test] @@ -769,20 +931,31 @@ async fn error_add_documents_bad_document_id() { index.add_documents(documents, None).await; index.wait_task(1).await; let (response, code) = index.get_task(1).await; - assert_eq!(code, 200); - assert_eq!(response["status"], json!("failed")); - assert_eq!( - response["error"]["message"], - json!( - r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."# - ) - ); - assert_eq!(response["error"]["code"], json!("invalid_document_id")); - assert_eq!(response["error"]["type"], json!("invalid_request")); - assert_eq!( - response["error"]["link"], - json!("https://docs.meilisearch.com/errors#invalid_document_id") - ); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 1, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Document identifier `\"foo & bar\"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).", + "code": "invalid_document_id", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_id" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); } #[actix_rt::test] @@ -799,18 +972,31 @@ async fn error_add_documents_missing_document_id() { index.add_documents(documents, None).await; index.wait_task(1).await; let (response, code) = index.get_task(1).await; - assert_eq!(code, 200); - assert_eq!(response["status"], "failed"); - assert_eq!( - response["error"]["message"], - json!(r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."#) - ); - assert_eq!(response["error"]["code"], json!("missing_document_id")); - assert_eq!(response["error"]["type"], json!("invalid_request")); - assert_eq!( - response["error"]["link"], - json!("https://docs.meilisearch.com/errors#missing_document_id") - ); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 1, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Document doesn't have a `docid` attribute: `{\"id\":\"11\",\"content\":\"foobar\"}`.", + "code": "missing_document_id", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_document_id" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); } #[actix_rt::test] @@ -831,22 +1017,14 @@ async fn error_document_field_limit_reached() { let documents = json!([big_object]); let (_response, code) = index.update_documents(documents, Some("id")).await; - assert_eq!(code, 202); + snapshot!(code, @"202"); index.wait_task(0).await; let (response, code) = index.get_task(0).await; - assert_eq!(code, 200); + snapshot!(code, @"200"); // Documents without a primary key are not accepted. - assert_eq!(response["status"], "failed"); - - let expected_error = json!({ - "message": "A document cannot contain more than 65,535 fields.", - "code": "document_fields_limit_reached", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#document_fields_limit_reached" - }); - - assert_eq!(response["error"], expected_error); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @""); } #[actix_rt::test] @@ -866,8 +1044,31 @@ async fn add_documents_invalid_geo_field() { index.add_documents(documents, None).await; index.wait_task(2).await; let (response, code) = index.get_task(2).await; - assert_eq!(code, 200); - assert_eq!(response["status"], "failed"); + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 2, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "The `_geo` field in the document with the id: `11` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); } #[actix_rt::test] @@ -885,15 +1086,16 @@ async fn error_add_documents_payload_size() { let documents = json!(documents); let (response, code) = index.add_documents(documents, None).await; - let expected_response = json!({ - "message": "The provided payload reached the size limit.", - "code": "payload_too_large", - "type": "invalid_request", - "link": "https://docs.meilisearch.com/errors#payload_too_large" - }); - - assert_eq!(response, expected_response); - assert_eq!(code, 413); + snapshot!(code, @"413 Payload Too Large"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "message": "The provided payload reached the size limit.", + "code": "payload_too_large", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#payload_too_large" + } + "###); } #[actix_rt::test] @@ -913,7 +1115,7 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(0).await; assert_eq!(code, 200); - insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { "uid": 0, @@ -953,7 +1155,7 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(1).await; assert_eq!(code, 200); - insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { "uid": 1, @@ -991,7 +1193,7 @@ async fn error_primary_key_inference() { let (response, code) = index.get_task(2).await; assert_eq!(code, 200); - insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { "uid": 2, From 934f2b3cb517e6ae53c47be252883faebdcfab28 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 11:51:05 +0100 Subject: [PATCH 071/186] exhaustively test all the errors that can arise from a bad geo field --- meilisearch/tests/documents/add_documents.rs | 403 +++++++++++++++++++ 1 file changed, 403 insertions(+) diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index cbce41e2e..301760701 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -1034,6 +1034,7 @@ async fn add_documents_invalid_geo_field() { index.create(Some("id")).await; index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; + // _geo is not an object let documents = json!([ { "id": "11", @@ -1069,6 +1070,408 @@ async fn add_documents_invalid_geo_field() { "finishedAt": "[date]" } "###); + + // _geo is an object but is missing both the lat and lng + let documents = json!([ + { + "id": "11", + "_geo": {} + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(3).await; + let (response, code) = index.get_task(3).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 3, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not find latitude nor longitude in the document with the id: `11`. Was expecting `_geo.lat` and `_geo.lng` fields.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but is missing both the lat and lng and contain an unexpected field + let documents = json!([ + { + "id": "11", + "_geo": { "doggos": "are good" } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(4).await; + let (response, code) = index.get_task(4).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 4, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not find latitude nor longitude in the document with the id: `11`. Was expecting `_geo.lat` and `_geo.lng` fields.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but only contains the lat + let documents = json!([ + { + "id": "11", + "_geo": { "lat": 12 } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(5).await; + let (response, code) = index.get_task(5).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 5, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not find longitude in the document with the id: `11`. Was expecting a `_geo.lng` field.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but only contains the lng + let documents = json!([ + { + "id": "11", + "_geo": { "lng": 12 } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(6).await; + let (response, code) = index.get_task(6).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 6, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not find latitude in the document with the id: `11`. Was expecting a `_geo.lat` field.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but the lat has a wrong type + let documents = json!([ + { + "id": "11", + "_geo": { "lat": true } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(7).await; + let (response, code) = index.get_task(7).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 7, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not find longitude in the document with the id: `11`. Was expecting a `_geo.lng` field.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but the lng has a wrong type + let documents = json!([ + { + "id": "11", + "_geo": { "lng": true } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(8).await; + let (response, code) = index.get_task(8).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 8, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not find latitude in the document with the id: `11`. Was expecting a `_geo.lat` field.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but the lat and lng have a wrong type + let documents = json!([ + { + "id": "11", + "_geo": { "lat": false, "lng": true } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(9).await; + let (response, code) = index.get_task(9).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 9, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not parse latitude nor longitude in the document with the id: `11`. Was expecting finite numbers but instead got `false` and `true`.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but the lat can't be parsed as a float + let documents = json!([ + { + "id": "11", + "_geo": { "lat": "doggo" } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(10).await; + let (response, code) = index.get_task(10).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 10, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not find longitude in the document with the id: `11`. Was expecting a `_geo.lng` field.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but the lng can't be parsed as a float + let documents = json!([ + { + "id": "11", + "_geo": { "lng": "doggo" } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(11).await; + let (response, code) = index.get_task(11).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 11, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not find latitude in the document with the id: `11`. Was expecting a `_geo.lat` field.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is an object but the lat and lng can't be parsed as a float + let documents = json!([ + { + "id": "11", + "_geo": { "lat": "doggo", "lng": "doggo" } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(12).await; + let (response, code) = index.get_task(12).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 12, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": { + "message": "Could not parse latitude nor longitude in the document with the id: `11`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + // _geo is a valid object but contains one extra unknown field + let documents = json!([ + { + "id": "11", + "_geo": { "lat": 1, "lng": 2, "doggo": "are the best" } + } + ]); + + index.add_documents(documents, None).await; + index.wait_task(13).await; + let (response, code) = index.get_task(13).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 13, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); } #[actix_rt::test] From 8356f109c138eab9e076c658a82feb1859ef8d6b Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 24 Jan 2023 13:25:31 +0100 Subject: [PATCH 072/186] bump milli to fix the last test --- Cargo.lock | 8 ++++---- meilisearch-types/Cargo.toml | 2 +- meilisearch/tests/documents/add_documents.rs | 9 +++++++-- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 35a2313a1..bbd41a5e3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1301,7 +1301,7 @@ dependencies = [ [[package]] name = "filter-parser" version = "0.40.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.40.0#1c4b1b3b2dcd1b84da603a381c898da879c4adb5" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.0#4e4d8dfda72e9301d66a637dabaee619ec0d1a02" dependencies = [ "nom", "nom_locate", @@ -1320,7 +1320,7 @@ dependencies = [ [[package]] name = "flatten-serde-json" version = "0.40.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.40.0#1c4b1b3b2dcd1b84da603a381c898da879c4adb5" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.0#4e4d8dfda72e9301d66a637dabaee619ec0d1a02" dependencies = [ "serde_json", ] @@ -1885,7 +1885,7 @@ dependencies = [ [[package]] name = "json-depth-checker" version = "0.40.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.40.0#1c4b1b3b2dcd1b84da603a381c898da879c4adb5" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.0#4e4d8dfda72e9301d66a637dabaee619ec0d1a02" dependencies = [ "serde_json", ] @@ -2434,7 +2434,7 @@ dependencies = [ [[package]] name = "milli" version = "0.40.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.40.0#1c4b1b3b2dcd1b84da603a381c898da879c4adb5" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.0#4e4d8dfda72e9301d66a637dabaee619ec0d1a02" dependencies = [ "bimap", "bincode", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 3bc43bee3..5c3c13af5 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -16,7 +16,7 @@ file-store = { path = "../file-store" } flate2 = "1.0.24" fst = "0.4.7" memmap2 = "0.5.7" -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.40.0", default-features = false } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.41.0", default-features = false } roaring = { version = "0.10.0", features = ["serde"] } serde = { version = "1.0.145", features = ["derive"] } serde-cs = "0.2.4" diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 301760701..0e1d33f47 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -1458,14 +1458,19 @@ async fn add_documents_invalid_geo_field() { { "uid": 13, "indexUid": "test", - "status": "succeeded", + "status": "failed", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 1, "indexedDocuments": 1 }, - "error": null, + "error": { + "message": "The `_geo` field in the document with the id: `11` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", + "code": "invalid_document_geo_field", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" + }, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", From 481df7a8b66b1214881620c0028fd80ad602c618 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 25 Jan 2023 16:19:55 +0100 Subject: [PATCH 073/186] Update meilisearch/tests/documents/add_documents.rs Co-authored-by: Louis Dureuil --- meilisearch/tests/documents/add_documents.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 0e1d33f47..13cbd8a04 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -1108,7 +1108,7 @@ async fn add_documents_invalid_geo_field() { } "###); - // _geo is an object but is missing both the lat and lng and contain an unexpected field + // _geo is an object but is missing both the lat and lng and contains an unexpected field let documents = json!([ { "id": "11", From cac93f149e45a7b9525b332b88d6f4cec53adfb6 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 25 Jan 2023 16:52:54 +0100 Subject: [PATCH 074/186] fix the tests after rebasing --- meilisearch/tests/documents/add_documents.rs | 28 ++++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 13cbd8a04..e553dcacd 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -942,7 +942,7 @@ async fn error_add_documents_bad_document_id() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Document identifier `\"foo & bar\"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).", @@ -983,7 +983,7 @@ async fn error_add_documents_missing_document_id() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Document doesn't have a `docid` attribute: `{\"id\":\"11\",\"content\":\"foobar\"}`.", @@ -1056,7 +1056,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "The `_geo` field in the document with the id: `11` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", @@ -1093,7 +1093,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not find latitude nor longitude in the document with the id: `11`. Was expecting `_geo.lat` and `_geo.lng` fields.", @@ -1130,7 +1130,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not find latitude nor longitude in the document with the id: `11`. Was expecting `_geo.lat` and `_geo.lng` fields.", @@ -1167,7 +1167,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not find longitude in the document with the id: `11`. Was expecting a `_geo.lng` field.", @@ -1204,7 +1204,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not find latitude in the document with the id: `11`. Was expecting a `_geo.lat` field.", @@ -1241,7 +1241,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not find longitude in the document with the id: `11`. Was expecting a `_geo.lng` field.", @@ -1278,7 +1278,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not find latitude in the document with the id: `11`. Was expecting a `_geo.lat` field.", @@ -1315,7 +1315,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not parse latitude nor longitude in the document with the id: `11`. Was expecting finite numbers but instead got `false` and `true`.", @@ -1352,7 +1352,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not find longitude in the document with the id: `11`. Was expecting a `_geo.lng` field.", @@ -1389,7 +1389,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not find latitude in the document with the id: `11`. Was expecting a `_geo.lat` field.", @@ -1426,7 +1426,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "Could not parse latitude nor longitude in the document with the id: `11`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", @@ -1463,7 +1463,7 @@ async fn add_documents_invalid_geo_field() { "canceledBy": null, "details": { "receivedDocuments": 1, - "indexedDocuments": 1 + "indexedDocuments": 0 }, "error": { "message": "The `_geo` field in the document with the id: `11` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", From b45235812447c5f3dc33c2527430bfc00a7cf498 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Thu, 26 Jan 2023 18:12:56 +0100 Subject: [PATCH 075/186] Update README.md Co-authored-by: gui machiavelli --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 62d9ef241..4081f66bc 100644 --- a/README.md +++ b/README.md @@ -103,7 +103,7 @@ Thank you for your support! ## 👩‍💻 Contributing -Meilisearch is and always will be open-source! If you want to contribute to the project, please take a look at this [guidelines](CONTRIBUTING.md). +Meilisearch is, and will always be, open-source! If you want to contribute to the project, please take a look at [our contribution guidelines](CONTRIBUTING.md). We are looking forward to review your contribution! ❤️ From 8a6d5480410ada7bfeac2ba0aa76f95941d1bcd3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Thu, 26 Jan 2023 18:13:08 +0100 Subject: [PATCH 076/186] Update README.md Co-authored-by: gui machiavelli --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 4081f66bc..ae3a38818 100644 --- a/README.md +++ b/README.md @@ -105,8 +105,6 @@ Thank you for your support! Meilisearch is, and will always be, open-source! If you want to contribute to the project, please take a look at [our contribution guidelines](CONTRIBUTING.md). -We are looking forward to review your contribution! ❤️ - ## 📦 Technical information More information about technical details related to the Meilisearch project: From 8a66ba01d87f0a60736f9b0d6bdf528bd2a15680 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Thu, 26 Jan 2023 18:13:53 +0100 Subject: [PATCH 077/186] Update README.md Co-authored-by: gui machiavelli --- README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index ae3a38818..8cfcd4364 100644 --- a/README.md +++ b/README.md @@ -107,7 +107,8 @@ Meilisearch is, and will always be, open-source! If you want to contribute to th ## 📦 Technical information -More information about technical details related to the Meilisearch project: -- The releases of Meilisearch and their associated binaries are available [in this GitHub section](https://github.com/meilisearch/meilisearch/releases). -- We versionize the Meilisearch binaries following the [SemVer conventions](https://semver.org/), and we also provide our detailed [versioning policy](https://github.com/meilisearch/engine-team/blob/main/resources/versioning-policy.md). -- The crates in this repository are not currently available on [crates.io](https://crates.io/) and do not follow [SemVer conventions](https://semver.org). +Meilisearch releases and their associated binaries are available [in this GitHub page](https://github.com/meilisearch/meilisearch/releases). + +The binaries are versioned following [SemVer conventions](https://semver.org/). To know more, read our [versioning policy](https://github.com/meilisearch/engine-team/blob/main/resources/versioning-policy.md). + +Differently from the binaries, crates in this repository are not currently available on [crates.io](https://crates.io/) and do not follow [SemVer conventions](https://semver.org). From b9d8bd77fc3eef41b3815cce1d5fff2b7b4b9d92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Thu, 26 Jan 2023 18:14:00 +0100 Subject: [PATCH 078/186] Update README.md Co-authored-by: gui machiavelli --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8cfcd4364..e722e036e 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ Thank you for your support! Meilisearch is, and will always be, open-source! If you want to contribute to the project, please take a look at [our contribution guidelines](CONTRIBUTING.md). -## 📦 Technical information +## 📦 Versioning Meilisearch releases and their associated binaries are available [in this GitHub page](https://github.com/meilisearch/meilisearch/releases). From b2d25c07d79ba1e93692c1daada1359a46372eff Mon Sep 17 00:00:00 2001 From: curquiza Date: Mon, 30 Jan 2023 14:31:36 +0100 Subject: [PATCH 079/186] Add guide to create a proto --- CONTRIBUTING.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4b849756b..ec268e2de 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -99,6 +99,28 @@ _[Read more about this](https://github.com/meilisearch/integration-guides/blob/m The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/engine-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release. +### How to publish a prototype + +Depending on the developed feature, you might need to provide a prototyped version of Meilisearch to make it easier to test by the users. + +The prototype name must follow this convention: `prototype-X-Y` where +- `X` is the feature name +- `Y` is the version of the prototype, starting from `0`. + +Example: `prototype-auto-resize-0`. + +Steps to create a prototype: + +1. In your terminal, go to the last commit of your branch (the one you want to provide as a prototype). +2. Create a tag following the convention: `git tag prototype-X-Y` +3. Push the tag: `git push origin prototype-X-Y` +4. Check the [Docker CI](https://github.com/meilisearch/meilisearch/actions/workflows/publish-docker-images.yml) is now running. + +⚙️ Once the CI has finished to run (~1h30), a Docker image named `prototype-X-Y` will be available on [DockerHub](https://hub.docker.com/repository/docker/getmeili/meilisearch/general). People can use it with the following command: `docker run -p 7700:7700 getmeili/meilisearch:prototype-X-Y`. +However, no binaries will be created. If the users do not use Docker, they can go to the `prototype-X-Y` tag in the Meilisearch repository and compile from the source code. + +⚠️ When sharing a prototype with users, prevent them from using it in production. Prototypes are only for test purposes. + ### Release assets For each release, the following assets are created: From 3505ee47f84cfd069e85835cda83ad93b419ec7d Mon Sep 17 00:00:00 2001 From: curquiza Date: Mon, 30 Jan 2023 14:33:09 +0100 Subject: [PATCH 080/186] Add volume to docker command --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ec268e2de..50d8d13bc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -116,7 +116,7 @@ Steps to create a prototype: 3. Push the tag: `git push origin prototype-X-Y` 4. Check the [Docker CI](https://github.com/meilisearch/meilisearch/actions/workflows/publish-docker-images.yml) is now running. -⚙️ Once the CI has finished to run (~1h30), a Docker image named `prototype-X-Y` will be available on [DockerHub](https://hub.docker.com/repository/docker/getmeili/meilisearch/general). People can use it with the following command: `docker run -p 7700:7700 getmeili/meilisearch:prototype-X-Y`. +⚙️ Once the CI has finished to run (~1h30), a Docker image named `prototype-X-Y` will be available on [DockerHub](https://hub.docker.com/repository/docker/getmeili/meilisearch/general). People can use it with the following command: `docker run -p 7700:7700 -v $(pwd)/meili_data:/meili_data getmeili/meilisearch:prototype-X-Y`. More information about [how to run Meilisearch with Docher](https://docs.meilisearch.com/learn/cookbooks/docker.html#download-meilisearch-with-docker). However, no binaries will be created. If the users do not use Docker, they can go to the `prototype-X-Y` tag in the Meilisearch repository and compile from the source code. ⚠️ When sharing a prototype with users, prevent them from using it in production. Prototypes are only for test purposes. From 982dd760429d41719f76e98c9c3fcdace080b07e Mon Sep 17 00:00:00 2001 From: curquiza Date: Mon, 30 Jan 2023 14:35:14 +0100 Subject: [PATCH 081/186] Improve readability --- CONTRIBUTING.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 50d8d13bc..2467f3aa8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -116,8 +116,10 @@ Steps to create a prototype: 3. Push the tag: `git push origin prototype-X-Y` 4. Check the [Docker CI](https://github.com/meilisearch/meilisearch/actions/workflows/publish-docker-images.yml) is now running. -⚙️ Once the CI has finished to run (~1h30), a Docker image named `prototype-X-Y` will be available on [DockerHub](https://hub.docker.com/repository/docker/getmeili/meilisearch/general). People can use it with the following command: `docker run -p 7700:7700 -v $(pwd)/meili_data:/meili_data getmeili/meilisearch:prototype-X-Y`. More information about [how to run Meilisearch with Docher](https://docs.meilisearch.com/learn/cookbooks/docker.html#download-meilisearch-with-docker). -However, no binaries will be created. If the users do not use Docker, they can go to the `prototype-X-Y` tag in the Meilisearch repository and compile from the source code. +🐳 Once the CI has finished to run (~1h30), a Docker image named `prototype-X-Y` will be available on [DockerHub](https://hub.docker.com/repository/docker/getmeili/meilisearch/general). People can use it with the following command: `docker run -p 7700:7700 -v $(pwd)/meili_data:/meili_data getmeili/meilisearch:prototype-X-Y`.
+More information about [how to run Meilisearch with Docker](https://docs.meilisearch.com/learn/cookbooks/docker.html#download-meilisearch-with-docker). + +⚙️ However, no binaries will be created. If the users do not use Docker, they can go to the `prototype-X-Y` tag in the Meilisearch repository and compile from the source code. ⚠️ When sharing a prototype with users, prevent them from using it in production. Prototypes are only for test purposes. From 2ba4629938588fbc2d5e4fdb3d237773e33e8f72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Mon, 30 Jan 2023 15:56:30 +0100 Subject: [PATCH 082/186] Update CONTRIBUTING.md Co-authored-by: Many the fish --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2467f3aa8..158629ffc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -104,7 +104,7 @@ The full Meilisearch release process is described in [this guide](https://github Depending on the developed feature, you might need to provide a prototyped version of Meilisearch to make it easier to test by the users. The prototype name must follow this convention: `prototype-X-Y` where -- `X` is the feature name +- `X` is the feature name formatted in `kebab-case` - `Y` is the version of the prototype, starting from `0`. Example: `prototype-auto-resize-0`. From 47b7d515edd99d88c69baa4e75c0d4633a1aed8f Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 30 Jan 2023 17:39:05 +0100 Subject: [PATCH 083/186] Add more detailed contribution instructions for tests --- CONTRIBUTING.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4b849756b..1a43e62b4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -52,6 +52,23 @@ cargo test This command will be triggered to each PR as a requirement for merging it. +#### Snapshot-based tests + +We are using [insta](https://insta.rs) to perform snapshot-based testing. +We recommend using the insta tooling (such as `cargo-insta`) to update the snapshots if they change following a PR. + +New tests should use insta where possible rather than manual `assert` statements. + +Furthermore, we provide some macros on top of insta, notably a way to use snapshot hashes instead of inline snapshots, saving a lot of space in the repository. + +To effectively debug snapshot-based hashes, we recommend you export the `MEILI_TEST_FULL_SNAPS` environment variable so that snapshot are fully created locally: + +``` +export MEILI_TEST_FULL_SNAPS=true # add this to your .bashrc, .zshrc, ... +``` + +#### Test troubleshooting + If you get a "Too many open files" error you might want to increase the open file limit using this command: ```bash From 89675e5f15f2e287345dbe7d5a93c618ef6701b2 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 30 Jan 2023 17:17:35 +0100 Subject: [PATCH 084/186] clippy: Replace seek 0 by rewind --- dump/src/lib.rs | 4 ++-- dump/src/reader/v5/mod.rs | 4 ++-- .../update/index_documents/helpers/grenad_helpers.rs | 4 ++-- milli/src/update/index_documents/transform.rs | 10 +++++----- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/dump/src/lib.rs b/dump/src/lib.rs index 7a7b9a5b7..6ca3e000e 100644 --- a/dump/src/lib.rs +++ b/dump/src/lib.rs @@ -198,7 +198,7 @@ impl From for KindDump { #[cfg(test)] pub(crate) mod test { use std::fs::File; - use std::io::{Seek, SeekFrom}; + use std::io::Seek; use std::str::FromStr; use big_s::S; @@ -410,7 +410,7 @@ pub(crate) mod test { // create the dump let mut file = tempfile::tempfile().unwrap(); dump.persist_to(&mut file).unwrap(); - file.seek(SeekFrom::Start(0)).unwrap(); + file.rewind().unwrap(); file } diff --git a/dump/src/reader/v5/mod.rs b/dump/src/reader/v5/mod.rs index 35bdcb453..3a22ca0a9 100644 --- a/dump/src/reader/v5/mod.rs +++ b/dump/src/reader/v5/mod.rs @@ -33,7 +33,7 @@ //! use std::fs::{self, File}; -use std::io::{BufRead, BufReader, ErrorKind, Seek, SeekFrom}; +use std::io::{BufRead, BufReader, ErrorKind, Seek}; use std::path::Path; use serde::{Deserialize, Serialize}; @@ -178,7 +178,7 @@ impl V5Reader { } pub fn keys(&mut self) -> Result> + '_>> { - self.keys.seek(SeekFrom::Start(0))?; + self.keys.rewind()?; Ok(Box::new( (&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }), )) diff --git a/milli/src/update/index_documents/helpers/grenad_helpers.rs b/milli/src/update/index_documents/helpers/grenad_helpers.rs index 03f15945a..eb66a28fe 100644 --- a/milli/src/update/index_documents/helpers/grenad_helpers.rs +++ b/milli/src/update/index_documents/helpers/grenad_helpers.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; use std::fs::File; -use std::io::{self, Seek, SeekFrom}; +use std::io::{self, Seek}; use std::time::Instant; use grenad::{CompressionType, Sorter}; @@ -66,7 +66,7 @@ pub fn sorter_into_reader( pub fn writer_into_reader(writer: grenad::Writer) -> Result> { let mut file = writer.into_inner()?; - file.seek(SeekFrom::Start(0))?; + file.rewind()?; grenad::Reader::new(file).map_err(Into::into) } diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 68ef2b7ee..9e07e78ad 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; use std::fs::File; -use std::io::{Read, Seek, SeekFrom}; +use std::io::{Read, Seek}; use fxhash::FxHashMap; use heed::RoTxn; @@ -510,7 +510,7 @@ impl<'a, 'i> Transform<'a, 'i> { let mut original_documents = writer.into_inner()?; // We then extract the file and reset the seek to be able to read it again. - original_documents.seek(SeekFrom::Start(0))?; + original_documents.rewind()?; // We create a final writer to write the new documents in order from the sorter. let mut writer = create_writer( @@ -522,7 +522,7 @@ impl<'a, 'i> Transform<'a, 'i> { // into this writer, extract the file and reset the seek to be able to read it again. self.flattened_sorter.write_into_stream_writer(&mut writer)?; let mut flattened_documents = writer.into_inner()?; - flattened_documents.seek(SeekFrom::Start(0))?; + flattened_documents.rewind()?; let mut new_external_documents_ids_builder: Vec<_> = self.new_external_documents_ids_builder.into_iter().collect(); @@ -650,10 +650,10 @@ impl<'a, 'i> Transform<'a, 'i> { // Once we have written all the documents, we extract // the file and reset the seek to be able to read it again. let mut original_documents = original_writer.into_inner()?; - original_documents.seek(SeekFrom::Start(0))?; + original_documents.rewind()?; let mut flattened_documents = flattened_writer.into_inner()?; - flattened_documents.seek(SeekFrom::Start(0))?; + flattened_documents.rewind()?; let output = TransformOutput { primary_key, From 3296cf7ae6c36b4784cb14b70d49f41aaedd369e Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 30 Jan 2023 17:18:02 +0100 Subject: [PATCH 085/186] clippy: remove needless lifetimes --- index-scheduler/src/batch.rs | 4 ++-- meilisearch/src/search.rs | 8 ++++---- milli/src/index.rs | 6 +++--- milli/src/search/criteria/proximity.rs | 16 ++++++++-------- milli/src/search/criteria/typo.rs | 12 ++++++------ milli/src/update/delete_documents.rs | 6 +++--- milli/src/update/facet/incremental.rs | 24 ++++++++++++------------ 7 files changed, 38 insertions(+), 38 deletions(-) diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index 423e2f23d..e7e52c7c3 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -947,9 +947,9 @@ impl IndexScheduler { /// /// ## Return /// The list of processed tasks. - fn apply_index_operation<'txn, 'i>( + fn apply_index_operation<'i>( &self, - index_wtxn: &'txn mut RwTxn<'i, '_>, + index_wtxn: &'_ mut RwTxn<'i, '_>, index: &'i Index, operation: IndexOperation, ) -> Result> { diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index a3972cd38..9e4d81ffd 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -474,10 +474,10 @@ fn make_document( Ok(document) } -fn format_fields<'a, A: AsRef<[u8]>>( +fn format_fields>( document: &Document, field_ids_map: &FieldsIdsMap, - builder: &MatcherBuilder<'a, A>, + builder: &MatcherBuilder<'_, A>, formatted_options: &BTreeMap, compute_matches: bool, displayable_ids: &BTreeSet, @@ -522,9 +522,9 @@ fn format_fields<'a, A: AsRef<[u8]>>( Ok((matches_position, document)) } -fn format_value<'a, A: AsRef<[u8]>>( +fn format_value>( value: Value, - builder: &MatcherBuilder<'a, A>, + builder: &MatcherBuilder<'_, A>, format_options: Option, infos: &mut Vec, compute_matches: bool, diff --git a/milli/src/index.rs b/milli/src/index.rs index 3f7ef14e6..31311d318 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -348,10 +348,10 @@ impl Index { /* external documents ids */ /// Writes the external documents ids and internal ids (i.e. `u32`). - pub(crate) fn put_external_documents_ids<'a>( + pub(crate) fn put_external_documents_ids( &self, wtxn: &mut RwTxn, - external_documents_ids: &ExternalDocumentsIds<'a>, + external_documents_ids: &ExternalDocumentsIds<'_>, ) -> heed::Result<()> { let ExternalDocumentsIds { hard, soft, .. } = external_documents_ids; let hard = hard.as_fst().as_bytes(); @@ -426,7 +426,7 @@ impl Index { } /// Returns the `rtree` which associates coordinates to documents ids. - pub fn geo_rtree<'t>(&self, rtxn: &'t RoTxn) -> Result>> { + pub fn geo_rtree(&self, rtxn: &'_ RoTxn) -> Result>> { match self .main .get::<_, Str, SerdeBincode>>(rtxn, main_key::GEO_RTREE_KEY)? diff --git a/milli/src/search/criteria/proximity.rs b/milli/src/search/criteria/proximity.rs index 66e5c95bf..0f5b340bf 100644 --- a/milli/src/search/criteria/proximity.rs +++ b/milli/src/search/criteria/proximity.rs @@ -182,15 +182,15 @@ impl<'t> Criterion for Proximity<'t> { } } -fn resolve_candidates<'t>( - ctx: &'t dyn Context, +fn resolve_candidates( + ctx: &'_ dyn Context, query_tree: &Operation, proximity: u8, cache: &mut Cache, wdcache: &mut WordDerivationsCache, ) -> Result { - fn resolve_operation<'t>( - ctx: &'t dyn Context, + fn resolve_operation( + ctx: &'_ dyn Context, query_tree: &Operation, proximity: u8, cache: &mut Cache, @@ -243,8 +243,8 @@ fn resolve_candidates<'t>( Ok(result) } - fn mdfs_pair<'t>( - ctx: &'t dyn Context, + fn mdfs_pair( + ctx: &'_ dyn Context, left: &Operation, right: &Operation, proximity: u8, @@ -298,8 +298,8 @@ fn resolve_candidates<'t>( Ok(output) } - fn mdfs<'t>( - ctx: &'t dyn Context, + fn mdfs( + ctx: &'_ dyn Context, branches: &[Operation], proximity: u8, cache: &mut Cache, diff --git a/milli/src/search/criteria/typo.rs b/milli/src/search/criteria/typo.rs index 911d46e3e..95e722074 100644 --- a/milli/src/search/criteria/typo.rs +++ b/milli/src/search/criteria/typo.rs @@ -239,15 +239,15 @@ fn alterate_query_tree( Ok(query_tree) } -fn resolve_candidates<'t>( - ctx: &'t dyn Context, +fn resolve_candidates( + ctx: &'_ dyn Context, query_tree: &Operation, number_typos: u8, cache: &mut HashMap<(Operation, u8), RoaringBitmap>, wdcache: &mut WordDerivationsCache, ) -> Result { - fn resolve_operation<'t>( - ctx: &'t dyn Context, + fn resolve_operation( + ctx: &'_ dyn Context, query_tree: &Operation, number_typos: u8, cache: &mut HashMap<(Operation, u8), RoaringBitmap>, @@ -276,8 +276,8 @@ fn resolve_candidates<'t>( } } - fn mdfs<'t>( - ctx: &'t dyn Context, + fn mdfs( + ctx: &'_ dyn Context, branches: &[Operation], mana: u8, cache: &mut HashMap<(Operation, u8), RoaringBitmap>, diff --git a/milli/src/update/delete_documents.rs b/milli/src/update/delete_documents.rs index f4a6d396e..4ba7eb08f 100644 --- a/milli/src/update/delete_documents.rs +++ b/milli/src/update/delete_documents.rs @@ -574,9 +574,9 @@ fn remove_from_word_docids( Ok(()) } -fn remove_docids_from_field_id_docid_facet_value<'i, 'a>( - index: &'i Index, - wtxn: &'a mut heed::RwTxn, +fn remove_docids_from_field_id_docid_facet_value( + index: &'_ Index, + wtxn: &'_ mut heed::RwTxn, facet_type: FacetType, field_id: FieldId, to_remove: &RoaringBitmap, diff --git a/milli/src/update/facet/incremental.rs b/milli/src/update/facet/incremental.rs index abe83be7a..d5f09c783 100644 --- a/milli/src/update/facet/incremental.rs +++ b/milli/src/update/facet/incremental.rs @@ -157,9 +157,9 @@ impl FacetsUpdateIncrementalInner { /// /// ## Return /// See documentation of `insert_in_level` - fn insert_in_level_0<'t>( + fn insert_in_level_0( &self, - txn: &'t mut RwTxn, + txn: &'_ mut RwTxn, field_id: u16, facet_value: &[u8], docids: &RoaringBitmap, @@ -211,9 +211,9 @@ impl FacetsUpdateIncrementalInner { /// - `InsertionResult::Insert` means that inserting the `facet_value` into the `level` resulted /// in the addition of a new key in that level, and that therefore the number of children /// of the parent node should be incremented. - fn insert_in_level<'t>( + fn insert_in_level( &self, - txn: &'t mut RwTxn, + txn: &'_ mut RwTxn, field_id: u16, level: u8, facet_value: &[u8], @@ -348,9 +348,9 @@ impl FacetsUpdateIncrementalInner { } /// Insert the given facet value and corresponding document ids in the database. - pub fn insert<'t>( + pub fn insert( &self, - txn: &'t mut RwTxn, + txn: &'_ mut RwTxn, field_id: u16, facet_value: &[u8], docids: &RoaringBitmap, @@ -470,9 +470,9 @@ impl FacetsUpdateIncrementalInner { /// in level 1, the key with the left bound `3` had to be changed to the next facet value (e.g. 4). /// In that case `DeletionResult::Reduce` is returned. The parent of the reduced key may need to adjust /// its left bound as well. - fn delete_in_level<'t>( + fn delete_in_level( &self, - txn: &'t mut RwTxn, + txn: &'_ mut RwTxn, field_id: u16, level: u8, facet_value: &[u8], @@ -529,9 +529,9 @@ impl FacetsUpdateIncrementalInner { } } - fn delete_in_level_0<'t>( + fn delete_in_level_0( &self, - txn: &'t mut RwTxn, + txn: &'_ mut RwTxn, field_id: u16, facet_value: &[u8], docids: &RoaringBitmap, @@ -557,9 +557,9 @@ impl FacetsUpdateIncrementalInner { } } - pub fn delete<'t>( + pub fn delete( &self, - txn: &'t mut RwTxn, + txn: &'_ mut RwTxn, field_id: u16, facet_value: &[u8], docids: &RoaringBitmap, From d91f8fc4933878f95b2f48291396c7c65d6cfbdb Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 31 Jan 2023 09:36:57 +0100 Subject: [PATCH 086/186] clippy: Allow uninlined_format_args in CI --- .github/workflows/rust.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 3bbbd0752..cec364eef 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -109,7 +109,8 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy - args: --all-targets -- --deny warnings + # allow unlined_format_args https://github.com/rust-lang/rust-clippy/issues/10087 + args: --all-targets -- --deny warnings --allow clippy::uninlined_format_args fmt: name: Run Rustfmt From 07603373f3bb921276f2a4232ffe82b26946c3f8 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 31 Jan 2023 09:45:43 +0100 Subject: [PATCH 087/186] clippy: allow uninlined_format_args --- .github/workflows/rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index f1260124e..0ff5ffc1d 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -108,7 +108,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy - args: --all-targets -- --deny warnings + args: --all-targets -- --deny warnings --allow clippy::uninlined_format_args fmt: name: Run Rustfmt From 771a367b978c55368dac48712d3c3af0af6ef987 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 31 Jan 2023 10:14:19 +0100 Subject: [PATCH 088/186] clippy: use rewind instead of seek 0 --- dump/src/lib.rs | 4 ++-- dump/src/reader/v5/mod.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dump/src/lib.rs b/dump/src/lib.rs index 7a7b9a5b7..6ca3e000e 100644 --- a/dump/src/lib.rs +++ b/dump/src/lib.rs @@ -198,7 +198,7 @@ impl From for KindDump { #[cfg(test)] pub(crate) mod test { use std::fs::File; - use std::io::{Seek, SeekFrom}; + use std::io::Seek; use std::str::FromStr; use big_s::S; @@ -410,7 +410,7 @@ pub(crate) mod test { // create the dump let mut file = tempfile::tempfile().unwrap(); dump.persist_to(&mut file).unwrap(); - file.seek(SeekFrom::Start(0)).unwrap(); + file.rewind().unwrap(); file } diff --git a/dump/src/reader/v5/mod.rs b/dump/src/reader/v5/mod.rs index 35bdcb453..3a22ca0a9 100644 --- a/dump/src/reader/v5/mod.rs +++ b/dump/src/reader/v5/mod.rs @@ -33,7 +33,7 @@ //! use std::fs::{self, File}; -use std::io::{BufRead, BufReader, ErrorKind, Seek, SeekFrom}; +use std::io::{BufRead, BufReader, ErrorKind, Seek}; use std::path::Path; use serde::{Deserialize, Serialize}; @@ -178,7 +178,7 @@ impl V5Reader { } pub fn keys(&mut self) -> Result> + '_>> { - self.keys.seek(SeekFrom::Start(0))?; + self.keys.rewind()?; Ok(Box::new( (&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }), )) From 924d5d4c11e8f0e8fd3b7c116924a17cd0d4d8f8 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 31 Jan 2023 10:14:43 +0100 Subject: [PATCH 089/186] clippy: remove needless lifetimes --- index-scheduler/src/batch.rs | 4 ++-- meilisearch/src/search.rs | 8 ++++---- permissive-json-pointer/src/lib.rs | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index bae92c37f..8a479a12b 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -960,9 +960,9 @@ impl IndexScheduler { /// /// ## Return /// The list of processed tasks. - fn apply_index_operation<'txn, 'i>( + fn apply_index_operation<'i>( &self, - index_wtxn: &'txn mut RwTxn<'i, '_>, + index_wtxn: &mut RwTxn<'i, '_>, index: &'i Index, operation: IndexOperation, ) -> Result> { diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index c199944f1..5cd9acee7 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -473,10 +473,10 @@ fn make_document( Ok(document) } -fn format_fields<'a, A: AsRef<[u8]>>( +fn format_fields>( document: &Document, field_ids_map: &FieldsIdsMap, - builder: &MatcherBuilder<'a, A>, + builder: &MatcherBuilder<'_, A>, formatted_options: &BTreeMap, compute_matches: bool, displayable_ids: &BTreeSet, @@ -521,9 +521,9 @@ fn format_fields<'a, A: AsRef<[u8]>>( Ok((matches_position, document)) } -fn format_value<'a, A: AsRef<[u8]>>( +fn format_value>( value: Value, - builder: &MatcherBuilder<'a, A>, + builder: &MatcherBuilder<'_, A>, format_options: Option, infos: &mut Vec, compute_matches: bool, diff --git a/permissive-json-pointer/src/lib.rs b/permissive-json-pointer/src/lib.rs index 039bd3320..7e5b3371c 100644 --- a/permissive-json-pointer/src/lib.rs +++ b/permissive-json-pointer/src/lib.rs @@ -72,9 +72,9 @@ pub fn map_leaf_values<'a>( map_leaf_values_in_object(value, &selectors, "", &mut mapper); } -pub fn map_leaf_values_in_object<'a>( +pub fn map_leaf_values_in_object( value: &mut Map, - selectors: &[&'a str], + selectors: &[&str], base_key: &str, mapper: &mut impl FnMut(&str, &mut Value), ) { From bffabf9cc63f29a73f74faab15da11020a1329aa Mon Sep 17 00:00:00 2001 From: curquiza Date: Tue, 31 Jan 2023 09:56:22 +0000 Subject: [PATCH 090/186] Update version for the next release (v0.41.1) in Cargo.toml files --- benchmarks/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- filter-parser/Cargo.toml | 2 +- flatten-serde-json/Cargo.toml | 2 +- json-depth-checker/Cargo.toml | 2 +- milli/Cargo.toml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index bf96ca84c..6d3608a5e 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "benchmarks" -version = "0.41.0" +version = "0.41.1" edition = "2018" publish = false diff --git a/cli/Cargo.toml b/cli/Cargo.toml index c8e63a764..ede953756 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cli" -version = "0.41.0" +version = "0.41.1" edition = "2018" description = "A CLI to interact with a milli index" publish = false diff --git a/filter-parser/Cargo.toml b/filter-parser/Cargo.toml index 8f47bf2bc..6dd13c645 100644 --- a/filter-parser/Cargo.toml +++ b/filter-parser/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "filter-parser" -version = "0.41.0" +version = "0.41.1" edition = "2021" description = "The parser for the Meilisearch filter syntax" publish = false diff --git a/flatten-serde-json/Cargo.toml b/flatten-serde-json/Cargo.toml index 9191364ae..136fd3f7e 100644 --- a/flatten-serde-json/Cargo.toml +++ b/flatten-serde-json/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flatten-serde-json" -version = "0.41.0" +version = "0.41.1" edition = "2021" description = "Flatten serde-json objects like elastic search" readme = "README.md" diff --git a/json-depth-checker/Cargo.toml b/json-depth-checker/Cargo.toml index 63906a276..50a66da4b 100644 --- a/json-depth-checker/Cargo.toml +++ b/json-depth-checker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "json-depth-checker" -version = "0.41.0" +version = "0.41.1" edition = "2021" description = "A library that indicates if a JSON must be flattened" publish = false diff --git a/milli/Cargo.toml b/milli/Cargo.toml index c3fccc9e2..b3d87304d 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "milli" -version = "0.41.0" +version = "0.41.1" authors = ["Kerollmops "] edition = "2018" From cbf029f64c6d014d29930d4a71a3b7b17763f306 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 31 Jan 2023 11:06:43 +0100 Subject: [PATCH 091/186] clippy: --fix --- .../update/index_documents/helpers/grenad_helpers.rs | 4 ++-- milli/src/update/index_documents/transform.rs | 10 +++++----- milli/tests/search/phrase_search.rs | 6 +++--- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/milli/src/update/index_documents/helpers/grenad_helpers.rs b/milli/src/update/index_documents/helpers/grenad_helpers.rs index 03f15945a..eb66a28fe 100644 --- a/milli/src/update/index_documents/helpers/grenad_helpers.rs +++ b/milli/src/update/index_documents/helpers/grenad_helpers.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; use std::fs::File; -use std::io::{self, Seek, SeekFrom}; +use std::io::{self, Seek}; use std::time::Instant; use grenad::{CompressionType, Sorter}; @@ -66,7 +66,7 @@ pub fn sorter_into_reader( pub fn writer_into_reader(writer: grenad::Writer) -> Result> { let mut file = writer.into_inner()?; - file.seek(SeekFrom::Start(0))?; + file.rewind()?; grenad::Reader::new(file).map_err(Into::into) } diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 68ef2b7ee..9e07e78ad 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; use std::fs::File; -use std::io::{Read, Seek, SeekFrom}; +use std::io::{Read, Seek}; use fxhash::FxHashMap; use heed::RoTxn; @@ -510,7 +510,7 @@ impl<'a, 'i> Transform<'a, 'i> { let mut original_documents = writer.into_inner()?; // We then extract the file and reset the seek to be able to read it again. - original_documents.seek(SeekFrom::Start(0))?; + original_documents.rewind()?; // We create a final writer to write the new documents in order from the sorter. let mut writer = create_writer( @@ -522,7 +522,7 @@ impl<'a, 'i> Transform<'a, 'i> { // into this writer, extract the file and reset the seek to be able to read it again. self.flattened_sorter.write_into_stream_writer(&mut writer)?; let mut flattened_documents = writer.into_inner()?; - flattened_documents.seek(SeekFrom::Start(0))?; + flattened_documents.rewind()?; let mut new_external_documents_ids_builder: Vec<_> = self.new_external_documents_ids_builder.into_iter().collect(); @@ -650,10 +650,10 @@ impl<'a, 'i> Transform<'a, 'i> { // Once we have written all the documents, we extract // the file and reset the seek to be able to read it again. let mut original_documents = original_writer.into_inner()?; - original_documents.seek(SeekFrom::Start(0))?; + original_documents.rewind()?; let mut flattened_documents = flattened_writer.into_inner()?; - flattened_documents.seek(SeekFrom::Start(0))?; + flattened_documents.rewind()?; let output = TransformOutput { primary_key, diff --git a/milli/tests/search/phrase_search.rs b/milli/tests/search/phrase_search.rs index ca5eaad48..2e63c96c4 100644 --- a/milli/tests/search/phrase_search.rs +++ b/milli/tests/search/phrase_search.rs @@ -7,15 +7,15 @@ fn set_stop_words(index: &Index, stop_words: &[&str]) { let mut wtxn = index.write_txn().unwrap(); let config = IndexerConfig::default(); - let mut builder = Settings::new(&mut wtxn, &index, &config); - let stop_words = stop_words.into_iter().map(|s| s.to_string()).collect(); + let mut builder = Settings::new(&mut wtxn, index, &config); + let stop_words = stop_words.iter().map(|s| s.to_string()).collect(); builder.set_stop_words(stop_words); builder.execute(|_| (), || false).unwrap(); wtxn.commit().unwrap(); } fn test_phrase_search_with_stop_words_given_criteria(criteria: &[Criterion]) { - let index = super::setup_search_index_with_criteria(&criteria); + let index = super::setup_search_index_with_criteria(criteria); // Add stop_words set_stop_words(&index, &["a", "an", "the", "of"]); From 20f05efb3c423a272124e650a54df0c68952abd4 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 31 Jan 2023 11:11:49 +0100 Subject: [PATCH 092/186] clippy: needless_lifetimes --- milli/src/index.rs | 6 +++--- milli/src/search/criteria/proximity.rs | 16 ++++++++-------- milli/src/search/criteria/typo.rs | 12 ++++++------ milli/src/update/delete_documents.rs | 6 +++--- milli/src/update/facet/incremental.rs | 24 ++++++++++++------------ 5 files changed, 32 insertions(+), 32 deletions(-) diff --git a/milli/src/index.rs b/milli/src/index.rs index 8a17cebf4..32ffe45cf 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -348,10 +348,10 @@ impl Index { /* external documents ids */ /// Writes the external documents ids and internal ids (i.e. `u32`). - pub(crate) fn put_external_documents_ids<'a>( + pub(crate) fn put_external_documents_ids( &self, wtxn: &mut RwTxn, - external_documents_ids: &ExternalDocumentsIds<'a>, + external_documents_ids: &ExternalDocumentsIds<'_>, ) -> heed::Result<()> { let ExternalDocumentsIds { hard, soft, .. } = external_documents_ids; let hard = hard.as_fst().as_bytes(); @@ -426,7 +426,7 @@ impl Index { } /// Returns the `rtree` which associates coordinates to documents ids. - pub fn geo_rtree<'t>(&self, rtxn: &'t RoTxn) -> Result>> { + pub fn geo_rtree(&self, rtxn: &RoTxn) -> Result>> { match self .main .get::<_, Str, SerdeBincode>>(rtxn, main_key::GEO_RTREE_KEY)? diff --git a/milli/src/search/criteria/proximity.rs b/milli/src/search/criteria/proximity.rs index 66e5c95bf..182f9fbea 100644 --- a/milli/src/search/criteria/proximity.rs +++ b/milli/src/search/criteria/proximity.rs @@ -182,15 +182,15 @@ impl<'t> Criterion for Proximity<'t> { } } -fn resolve_candidates<'t>( - ctx: &'t dyn Context, +fn resolve_candidates( + ctx: &dyn Context, query_tree: &Operation, proximity: u8, cache: &mut Cache, wdcache: &mut WordDerivationsCache, ) -> Result { - fn resolve_operation<'t>( - ctx: &'t dyn Context, + fn resolve_operation( + ctx: &dyn Context, query_tree: &Operation, proximity: u8, cache: &mut Cache, @@ -243,8 +243,8 @@ fn resolve_candidates<'t>( Ok(result) } - fn mdfs_pair<'t>( - ctx: &'t dyn Context, + fn mdfs_pair( + ctx: &dyn Context, left: &Operation, right: &Operation, proximity: u8, @@ -298,8 +298,8 @@ fn resolve_candidates<'t>( Ok(output) } - fn mdfs<'t>( - ctx: &'t dyn Context, + fn mdfs( + ctx: &dyn Context, branches: &[Operation], proximity: u8, cache: &mut Cache, diff --git a/milli/src/search/criteria/typo.rs b/milli/src/search/criteria/typo.rs index 20bc718fd..ff2567304 100644 --- a/milli/src/search/criteria/typo.rs +++ b/milli/src/search/criteria/typo.rs @@ -239,15 +239,15 @@ fn alterate_query_tree( Ok(query_tree) } -fn resolve_candidates<'t>( - ctx: &'t dyn Context, +fn resolve_candidates( + ctx: &dyn Context, query_tree: &Operation, number_typos: u8, cache: &mut HashMap<(Operation, u8), RoaringBitmap>, wdcache: &mut WordDerivationsCache, ) -> Result { - fn resolve_operation<'t>( - ctx: &'t dyn Context, + fn resolve_operation( + ctx: &dyn Context, query_tree: &Operation, number_typos: u8, cache: &mut HashMap<(Operation, u8), RoaringBitmap>, @@ -276,8 +276,8 @@ fn resolve_candidates<'t>( } } - fn mdfs<'t>( - ctx: &'t dyn Context, + fn mdfs( + ctx: &dyn Context, branches: &[Operation], mana: u8, cache: &mut HashMap<(Operation, u8), RoaringBitmap>, diff --git a/milli/src/update/delete_documents.rs b/milli/src/update/delete_documents.rs index 635ce85be..90118af18 100644 --- a/milli/src/update/delete_documents.rs +++ b/milli/src/update/delete_documents.rs @@ -574,9 +574,9 @@ fn remove_from_word_docids( Ok(()) } -fn remove_docids_from_field_id_docid_facet_value<'i, 'a>( - index: &'i Index, - wtxn: &'a mut heed::RwTxn, +fn remove_docids_from_field_id_docid_facet_value( + index: &Index, + wtxn: &mut heed::RwTxn, facet_type: FacetType, field_id: FieldId, to_remove: &RoaringBitmap, diff --git a/milli/src/update/facet/incremental.rs b/milli/src/update/facet/incremental.rs index cffce5525..a5840dc6e 100644 --- a/milli/src/update/facet/incremental.rs +++ b/milli/src/update/facet/incremental.rs @@ -157,9 +157,9 @@ impl FacetsUpdateIncrementalInner { /// /// ## Return /// See documentation of `insert_in_level` - fn insert_in_level_0<'t>( + fn insert_in_level_0( &self, - txn: &'t mut RwTxn, + txn: &mut RwTxn, field_id: u16, facet_value: &[u8], docids: &RoaringBitmap, @@ -211,9 +211,9 @@ impl FacetsUpdateIncrementalInner { /// - `InsertionResult::Insert` means that inserting the `facet_value` into the `level` resulted /// in the addition of a new key in that level, and that therefore the number of children /// of the parent node should be incremented. - fn insert_in_level<'t>( + fn insert_in_level( &self, - txn: &'t mut RwTxn, + txn: &mut RwTxn, field_id: u16, level: u8, facet_value: &[u8], @@ -348,9 +348,9 @@ impl FacetsUpdateIncrementalInner { } /// Insert the given facet value and corresponding document ids in the database. - pub fn insert<'t>( + pub fn insert( &self, - txn: &'t mut RwTxn, + txn: &mut RwTxn, field_id: u16, facet_value: &[u8], docids: &RoaringBitmap, @@ -470,9 +470,9 @@ impl FacetsUpdateIncrementalInner { /// in level 1, the key with the left bound `3` had to be changed to the next facet value (e.g. 4). /// In that case `DeletionResult::Reduce` is returned. The parent of the reduced key may need to adjust /// its left bound as well. - fn delete_in_level<'t>( + fn delete_in_level( &self, - txn: &'t mut RwTxn, + txn: &mut RwTxn, field_id: u16, level: u8, facet_value: &[u8], @@ -529,9 +529,9 @@ impl FacetsUpdateIncrementalInner { } } - fn delete_in_level_0<'t>( + fn delete_in_level_0( &self, - txn: &'t mut RwTxn, + txn: &mut RwTxn, field_id: u16, facet_value: &[u8], docids: &RoaringBitmap, @@ -557,9 +557,9 @@ impl FacetsUpdateIncrementalInner { } } - pub fn delete<'t>( + pub fn delete( &self, - txn: &'t mut RwTxn, + txn: &mut RwTxn, field_id: u16, facet_value: &[u8], docids: &RoaringBitmap, From 5c0668afcfca87b525f6101dd4b5515c7423de1c Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 31 Jan 2023 11:13:47 +0100 Subject: [PATCH 093/186] clippy: allow uninlined_format_args --- .github/workflows/rust.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index abe227db0..119be47f9 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -65,6 +65,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy + args: -- --allow clippy::uninlined_format_args fmt: name: Run Rustfmt From a2690ea8d47d6104bf9e6a011b65e33ada822a2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Lecrenier?= Date: Tue, 31 Jan 2023 11:42:24 +0100 Subject: [PATCH 094/186] Reduce incremental indexing time of `words_prefix_position_docids` DB This database can easily contain millions of entries. Thus, iterating over it can be very expensive. For regular `documentAdditionOrUpdate` tasks, `del_prefix_fst_words` will always be empty. Thus, we can save a significant amount of time by adding this `if !del_prefix_fst_words.is_empty()` condition. The code's behaviour remains completely unchanged. --- .../src/update/words_prefix_position_docids.rs | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/milli/src/update/words_prefix_position_docids.rs b/milli/src/update/words_prefix_position_docids.rs index 5dbc9f89b..6f12dde38 100644 --- a/milli/src/update/words_prefix_position_docids.rs +++ b/milli/src/update/words_prefix_position_docids.rs @@ -140,16 +140,20 @@ impl<'t, 'u, 'i> WordPrefixPositionDocids<'t, 'u, 'i> { // We remove all the entries that are no more required in this word prefix position // docids database. - let mut iter = - self.index.word_prefix_position_docids.iter_mut(self.wtxn)?.lazily_decode_data(); - while let Some(((prefix, _), _)) = iter.next().transpose()? { - if del_prefix_fst_words.contains(prefix.as_bytes()) { - unsafe { iter.del_current()? }; + // We also avoid iterating over the whole `word_prefix_position_docids` database if we know in + // advance that the `if del_prefix_fst_words.contains(prefix.as_bytes()) {` condition below + // will always be false (i.e. if `del_prefix_fst_words` is empty). + if !del_prefix_fst_words.is_empty() { + let mut iter = + self.index.word_prefix_position_docids.iter_mut(self.wtxn)?.lazily_decode_data(); + while let Some(((prefix, _), _)) = iter.next().transpose()? { + if del_prefix_fst_words.contains(prefix.as_bytes()) { + unsafe { iter.del_current()? }; + } } + drop(iter); } - drop(iter); - // We finally write all the word prefix position docids into the LMDB database. sorter_into_lmdb_database( self.wtxn, From e269027cdd9bde88b012c5248c010a0a323c17e6 Mon Sep 17 00:00:00 2001 From: curquiza Date: Tue, 31 Jan 2023 12:04:41 +0100 Subject: [PATCH 095/186] Add git config about ownershio in Docker CI --- .github/workflows/publish-docker-images.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index 3dd93b6eb..2e62630ee 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -52,6 +52,9 @@ jobs: - name: Set build-args for Docker buildx id: build-metadata run: | + # Define ownership + git config --global --add safe.directory /home/meili/actions-runner/_work/meilisearch/meilisearch + # Extract commit date commit_date=$(git show -s --format=%cd --date=iso-strict ${{ github.sha }}) From 4b7b2d6a906a5fc1f737bf014a4925b387873fef Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 31 Jan 2023 12:24:37 +0100 Subject: [PATCH 096/186] fix the import of dump v2 generated by meilisearch v0.22.0 --- ...ompat__v1_to_v2__test__compat_v1_v2-3.snap | 1 + ...ompat__v1_to_v2__test__compat_v1_v2-6.snap | 6 +- ...ompat__v1_to_v2__test__compat_v1_v2-9.snap | 1 + dump/src/reader/compat/v1_to_v2.rs | 96 ++++--- dump/src/reader/compat/v2_to_v3.rs | 30 ++- dump/src/reader/mod.rs | 78 +++++- ...dump__reader__test__import_dump_v1-11.snap | 1 + .../dump__reader__test__import_dump_v1-5.snap | 1 + .../dump__reader__test__import_dump_v1-8.snap | 4 + ...rom_meilisearch_v0_22_0_issue_3435-11.snap | 25 ++ ...from_meilisearch_v0_22_0_issue_3435-5.snap | 39 +++ ...from_meilisearch_v0_22_0_issue_3435-8.snap | 30 +++ dump/src/reader/v2/mod.rs | 78 ++++++ dump/src/reader/v2/settings.rs | 248 ++++++++++++------ ...rom_meilisearch_v0_22_0_issue_3435-10.snap | 25 ++ ...from_meilisearch_v0_22_0_issue_3435-4.snap | 39 +++ ...from_meilisearch_v0_22_0_issue_3435-7.snap | 30 +++ dump/tests/assets/v2-v0.22.0.dump | Bin 0 -> 9809 bytes meilisearch/tests/dumps/mod.rs | 6 +- 19 files changed, 584 insertions(+), 154 deletions(-) create mode 100644 dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-11.snap create mode 100644 dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-5.snap create mode 100644 dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-8.snap create mode 100644 dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-10.snap create mode 100644 dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-4.snap create mode 100644 dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-7.snap create mode 100644 dump/tests/assets/v2-v0.22.0.dump diff --git a/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-3.snap b/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-3.snap index f7e1736b1..8edf789d0 100644 --- a/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-3.snap +++ b/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-3.snap @@ -10,6 +10,7 @@ expression: products.settings().unwrap() "*" ], "filterableAttributes": [], + "sortableAttributes": [], "rankingRules": [ "typo", "words", diff --git a/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-6.snap b/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-6.snap index 8c36fe96c..80c26874b 100644 --- a/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-6.snap +++ b/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-6.snap @@ -13,13 +13,17 @@ expression: movies.settings().unwrap() "genres", "id" ], + "sortableAttributes": [ + "genres", + "id" + ], "rankingRules": [ "typo", "words", "proximity", "attribute", "exactness", - "asc(release_date)" + "release_date:asc" ], "stopWords": [], "synonyms": {}, diff --git a/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-9.snap b/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-9.snap index 1adf85e6a..89da27c25 100644 --- a/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-9.snap +++ b/dump/src/reader/compat/snapshots/dump__reader__compat__v1_to_v2__test__compat_v1_v2-9.snap @@ -10,6 +10,7 @@ expression: spells.settings().unwrap() "*" ], "filterableAttributes": [], + "sortableAttributes": [], "rankingRules": [ "typo", "words", diff --git a/dump/src/reader/compat/v1_to_v2.rs b/dump/src/reader/compat/v1_to_v2.rs index 741d18fa8..baadd2104 100644 --- a/dump/src/reader/compat/v1_to_v2.rs +++ b/dump/src/reader/compat/v1_to_v2.rs @@ -1,4 +1,3 @@ -use std::collections::BTreeSet; use std::str::FromStr; use super::v2_to_v3::CompatV2ToV3; @@ -102,14 +101,15 @@ impl CompatIndexV1ToV2 { impl From for v2::Settings { fn from(source: v1::settings::Settings) -> Self { - let displayed_attributes = source - .displayed_attributes - .map(|opt| opt.map(|displayed_attributes| displayed_attributes.into_iter().collect())); - let attributes_for_faceting = source.attributes_for_faceting.map(|opt| { - opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()) - }); - let ranking_rules = source.ranking_rules.map(|opt| { - opt.map(|ranking_rules| { + Self { + displayed_attributes: option_to_setting(source.displayed_attributes) + .map(|displayed| displayed.into_iter().collect()), + searchable_attributes: option_to_setting(source.searchable_attributes), + filterable_attributes: option_to_setting(source.attributes_for_faceting.clone()) + .map(|filterable| filterable.into_iter().collect()), + sortable_attributes: option_to_setting(source.attributes_for_faceting) + .map(|sortable| sortable.into_iter().collect()), + ranking_rules: option_to_setting(source.ranking_rules).map(|ranking_rules| { ranking_rules .into_iter() .filter_map(|ranking_rule| { @@ -119,26 +119,33 @@ impl From for v2::Settings { ranking_rule.into(); criterion.as_ref().map(ToString::to_string) } - Err(()) => Some(ranking_rule), + Err(()) => { + log::warn!( + "Could not import the following ranking rule: `{}`.", + ranking_rule + ); + None + } } }) .collect() - }) - }); - - Self { - displayed_attributes, - searchable_attributes: source.searchable_attributes, - filterable_attributes: attributes_for_faceting, - ranking_rules, - stop_words: source.stop_words, - synonyms: source.synonyms, - distinct_attribute: source.distinct_attribute, + }), + stop_words: option_to_setting(source.stop_words), + synonyms: option_to_setting(source.synonyms), + distinct_attribute: option_to_setting(source.distinct_attribute), _kind: std::marker::PhantomData, } } } +fn option_to_setting(opt: Option>) -> v2::Setting { + match opt { + Some(Some(t)) => v2::Setting::Set(t), + None => v2::Setting::NotSet, + Some(None) => v2::Setting::Reset, + } +} + impl From for Option { fn from(source: v1::update::UpdateStatus) -> Self { use v1::update::UpdateStatus as UpdateStatusV1; @@ -251,38 +258,27 @@ impl From for Option { impl From for v2::Settings { fn from(source: v1::settings::SettingsUpdate) -> Self { - let displayed_attributes: Option>> = - source.displayed_attributes.into(); - - let attributes_for_faceting: Option>> = - source.attributes_for_faceting.into(); - - let ranking_rules: Option>> = - source.ranking_rules.into(); + let ranking_rules = v2::Setting::from(source.ranking_rules); // go from the concrete types of v1 (RankingRule) to the concrete type of v2 (Criterion), // and then back to string as this is what the settings manipulate - let ranking_rules = ranking_rules.map(|opt| { - opt.map(|ranking_rules| { - ranking_rules - .into_iter() - // filter out the WordsPosition ranking rule that exists in v1 but not v2 - .filter_map(|ranking_rule| { - Option::::from(ranking_rule) - }) - .map(|criterion| criterion.to_string()) - .collect() - }) + let ranking_rules = ranking_rules.map(|ranking_rules| { + ranking_rules + .into_iter() + // filter out the WordsPosition ranking rule that exists in v1 but not v2 + .filter_map(|ranking_rule| Option::::from(ranking_rule)) + .map(|criterion| criterion.to_string()) + .collect() }); Self { - displayed_attributes: displayed_attributes.map(|opt| { - opt.map(|displayed_attributes| displayed_attributes.into_iter().collect()) - }), + displayed_attributes: v2::Setting::from(source.displayed_attributes) + .map(|displayed_attributes| displayed_attributes.into_iter().collect()), searchable_attributes: source.searchable_attributes.into(), - filterable_attributes: attributes_for_faceting.map(|opt| { - opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()) - }), + filterable_attributes: v2::Setting::from(source.attributes_for_faceting.clone()) + .map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()), + sortable_attributes: v2::Setting::from(source.attributes_for_faceting) + .map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()), ranking_rules, stop_words: source.stop_words.into(), synonyms: source.synonyms.into(), @@ -314,12 +310,12 @@ impl From for Option { } } -impl From> for Option> { +impl From> for v2::Setting { fn from(source: v1::settings::UpdateState) -> Self { match source { - v1::settings::UpdateState::Update(new_value) => Some(Some(new_value)), - v1::settings::UpdateState::Clear => Some(None), - v1::settings::UpdateState::Nothing => None, + v1::settings::UpdateState::Update(new_value) => v2::Setting::Set(new_value), + v1::settings::UpdateState::Clear => v2::Setting::Reset, + v1::settings::UpdateState::Nothing => v2::Setting::NotSet, } } } diff --git a/dump/src/reader/compat/v2_to_v3.rs b/dump/src/reader/compat/v2_to_v3.rs index 8574e04b4..14fc0ee4d 100644 --- a/dump/src/reader/compat/v2_to_v3.rs +++ b/dump/src/reader/compat/v2_to_v3.rs @@ -361,28 +361,29 @@ impl From for v3::Code { } } -fn option_to_setting(opt: Option>) -> v3::Setting { - match opt { - Some(Some(t)) => v3::Setting::Set(t), - None => v3::Setting::NotSet, - Some(None) => v3::Setting::Reset, +impl From> for v3::Setting { + fn from(setting: v2::Setting) -> Self { + match setting { + v2::settings::Setting::Set(a) => v3::settings::Setting::Set(a), + v2::settings::Setting::Reset => v3::settings::Setting::Reset, + v2::settings::Setting::NotSet => v3::settings::Setting::NotSet, + } } } impl From> for v3::Settings { fn from(settings: v2::Settings) -> Self { v3::Settings { - displayed_attributes: option_to_setting(settings.displayed_attributes), - searchable_attributes: option_to_setting(settings.searchable_attributes), - filterable_attributes: option_to_setting(settings.filterable_attributes) - .map(|f| f.into_iter().collect()), - sortable_attributes: v3::Setting::NotSet, - ranking_rules: option_to_setting(settings.ranking_rules).map(|criteria| { + displayed_attributes: settings.displayed_attributes.into(), + searchable_attributes: settings.searchable_attributes.into(), + filterable_attributes: settings.filterable_attributes.into(), + sortable_attributes: settings.sortable_attributes.into(), + ranking_rules: v3::Setting::from(settings.ranking_rules).map(|criteria| { criteria.into_iter().map(|criterion| patch_ranking_rules(&criterion)).collect() }), - stop_words: option_to_setting(settings.stop_words), - synonyms: option_to_setting(settings.synonyms), - distinct_attribute: option_to_setting(settings.distinct_attribute), + stop_words: settings.stop_words.into(), + synonyms: settings.synonyms.into(), + distinct_attribute: settings.distinct_attribute.into(), _kind: std::marker::PhantomData, } } @@ -394,6 +395,7 @@ fn patch_ranking_rules(ranking_rule: &str) -> String { Ok(v2::settings::Criterion::Typo) => String::from("typo"), Ok(v2::settings::Criterion::Proximity) => String::from("proximity"), Ok(v2::settings::Criterion::Attribute) => String::from("attribute"), + Ok(v2::settings::Criterion::Sort) => String::from("sort"), Ok(v2::settings::Criterion::Exactness) => String::from("exactness"), Ok(v2::settings::Criterion::Asc(name)) => format!("{name}:asc"), Ok(v2::settings::Criterion::Desc(name)) => format!("{name}:desc"), diff --git a/dump/src/reader/mod.rs b/dump/src/reader/mod.rs index cf671ea45..a5a66591b 100644 --- a/dump/src/reader/mod.rs +++ b/dump/src/reader/mod.rs @@ -530,6 +530,82 @@ pub(crate) mod test { meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce"); } + #[test] + fn import_dump_v2_from_meilisearch_v0_22_0_issue_3435() { + let dump = File::open("tests/assets/v2-v0.22.0.dump").unwrap(); + let mut dump = DumpReader::open(dump).unwrap(); + + // top level infos + insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00"); + assert_eq!(dump.instance_uid().unwrap(), None); + + // tasks + let tasks = dump.tasks().unwrap().collect::>>().unwrap(); + let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2db37756d8af1fb7623436b76e8956a6"); + assert_eq!(update_files.len(), 8); + assert!(update_files[0..].iter().all(|u| u.is_none())); // everything already processed + + // keys + let keys = dump.keys().unwrap().collect::>>().unwrap(); + meili_snap::snapshot_hash!(meili_snap::json_string!(keys), @"d751713988987e9331980363e24189ce"); + + // indexes + let mut indexes = dump.indexes().unwrap().collect::>>().unwrap(); + // the index are not ordered in any way by default + indexes.sort_by_key(|index| index.metadata().uid.to_string()); + + let mut products = indexes.pop().unwrap(); + let mut movies = indexes.pop().unwrap(); + let mut spells = indexes.pop().unwrap(); + assert!(indexes.is_empty()); + + // products + insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "products", + "primaryKey": "sku", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_json_snapshot!(products.settings().unwrap()); + let documents = products.documents().unwrap().collect::>>().unwrap(); + assert_eq!(documents.len(), 10); + meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5"); + + // movies + insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "movies", + "primaryKey": "id", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_json_snapshot!(movies.settings().unwrap()); + let documents = movies.documents().unwrap().collect::>>().unwrap(); + assert_eq!(documents.len(), 10); + meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720"); + + // spells + insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "dnd_spells", + "primaryKey": "index", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_json_snapshot!(spells.settings().unwrap()); + let documents = spells.documents().unwrap().collect::>>().unwrap(); + assert_eq!(documents.len(), 10); + meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce"); + } + #[test] fn import_dump_v1() { let dump = File::open("tests/assets/v1.dump").unwrap(); @@ -542,7 +618,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b3e3652bfc10a76670be157d2507d761"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"8df6eab075a44b3c1af6b726f9fd9a43"); assert_eq!(update_files.len(), 9); assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1 diff --git a/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-11.snap b/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-11.snap index 997d303e7..92fc61d72 100644 --- a/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-11.snap +++ b/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-11.snap @@ -10,6 +10,7 @@ expression: spells.settings().unwrap() "*" ], "filterableAttributes": [], + "sortableAttributes": [], "rankingRules": [ "typo", "words", diff --git a/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-5.snap b/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-5.snap index 282cd6ba7..b0b54c136 100644 --- a/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-5.snap +++ b/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-5.snap @@ -10,6 +10,7 @@ expression: products.settings().unwrap() "*" ], "filterableAttributes": [], + "sortableAttributes": [], "rankingRules": [ "typo", "words", diff --git a/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-8.snap b/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-8.snap index d20fdc77e..5c12a0438 100644 --- a/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-8.snap +++ b/dump/src/reader/snapshots/dump__reader__test__import_dump_v1-8.snap @@ -13,6 +13,10 @@ expression: movies.settings().unwrap() "genres", "id" ], + "sortableAttributes": [ + "genres", + "id" + ], "rankingRules": [ "typo", "words", diff --git a/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-11.snap b/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-11.snap new file mode 100644 index 000000000..d8a5bafbe --- /dev/null +++ b/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-11.snap @@ -0,0 +1,25 @@ +--- +source: dump/src/reader/mod.rs +expression: spells.settings().unwrap() +--- +{ + "displayedAttributes": [ + "*" + ], + "searchableAttributes": [ + "*" + ], + "filterableAttributes": [], + "sortableAttributes": [], + "rankingRules": [ + "words", + "typo", + "proximity", + "attribute", + "sort", + "exactness" + ], + "stopWords": [], + "synonyms": {}, + "distinctAttribute": null +} diff --git a/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-5.snap b/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-5.snap new file mode 100644 index 000000000..abf97a8ab --- /dev/null +++ b/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-5.snap @@ -0,0 +1,39 @@ +--- +source: dump/src/reader/mod.rs +expression: products.settings().unwrap() +--- +{ + "displayedAttributes": [ + "*" + ], + "searchableAttributes": [ + "*" + ], + "filterableAttributes": [], + "sortableAttributes": [], + "rankingRules": [ + "words", + "typo", + "proximity", + "attribute", + "sort", + "exactness" + ], + "stopWords": [], + "synonyms": { + "android": [ + "phone", + "smartphone" + ], + "iphone": [ + "phone", + "smartphone" + ], + "phone": [ + "android", + "iphone", + "smartphone" + ] + }, + "distinctAttribute": null +} diff --git a/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-8.snap b/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-8.snap new file mode 100644 index 000000000..f02a3685e --- /dev/null +++ b/dump/src/reader/snapshots/dump__reader__test__import_dump_v2_from_meilisearch_v0_22_0_issue_3435-8.snap @@ -0,0 +1,30 @@ +--- +source: dump/src/reader/mod.rs +expression: movies.settings().unwrap() +--- +{ + "displayedAttributes": [ + "*" + ], + "searchableAttributes": [ + "*" + ], + "filterableAttributes": [ + "genres", + "id" + ], + "sortableAttributes": [ + "release_date" + ], + "rankingRules": [ + "words", + "typo", + "proximity", + "attribute", + "exactness", + "release_date:asc" + ], + "stopWords": [], + "synonyms": {}, + "distinctAttribute": null +} diff --git a/dump/src/reader/v2/mod.rs b/dump/src/reader/v2/mod.rs index befebbdb3..4016e6341 100644 --- a/dump/src/reader/v2/mod.rs +++ b/dump/src/reader/v2/mod.rs @@ -41,6 +41,7 @@ use super::Document; use crate::{IndexMetadata, Result, Version}; pub type Settings = settings::Settings; +pub type Setting = settings::Setting; pub type Checked = settings::Checked; pub type Unchecked = settings::Unchecked; @@ -306,4 +307,81 @@ pub(crate) mod test { assert_eq!(documents.len(), 10); meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce"); } + + #[test] + fn read_dump_v2_from_meilisearch_v0_22_0_issue_3435() { + let dump = File::open("tests/assets/v2-v0.22.0.dump").unwrap(); + let dir = TempDir::new().unwrap(); + let mut dump = BufReader::new(dump); + let gz = GzDecoder::new(&mut dump); + let mut archive = tar::Archive::new(gz); + archive.unpack(dir.path()).unwrap(); + + let mut dump = V2Reader::open(dir).unwrap(); + + // top level infos + insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00"); + + // tasks + let tasks = dump.tasks().collect::>>().unwrap(); + let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"aca8ba13046272664eb3ea2da3031633"); + assert_eq!(update_files.len(), 8); + assert!(update_files[0..].iter().all(|u| u.is_none())); // everything has already been processed + + // indexes + let mut indexes = dump.indexes().unwrap().collect::>>().unwrap(); + // the index are not ordered in any way by default + indexes.sort_by_key(|index| index.metadata().uid.to_string()); + + let mut products = indexes.pop().unwrap(); + let mut movies = indexes.pop().unwrap(); + let mut spells = indexes.pop().unwrap(); + assert!(indexes.is_empty()); + + // products + insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "products", + "primaryKey": "sku", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_json_snapshot!(products.settings().unwrap()); + let documents = products.documents().unwrap().collect::>>().unwrap(); + assert_eq!(documents.len(), 10); + meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5"); + + // movies + insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "movies", + "primaryKey": "id", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_json_snapshot!(movies.settings().unwrap()); + let documents = movies.documents().unwrap().collect::>>().unwrap(); + assert_eq!(documents.len(), 10); + meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720"); + + // spells + insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###" + { + "uid": "dnd_spells", + "primaryKey": "index", + "createdAt": "[now]", + "updatedAt": "[now]" + } + "###); + + insta::assert_json_snapshot!(spells.settings().unwrap()); + let documents = spells.documents().unwrap().collect::>>().unwrap(); + assert_eq!(documents.len(), 10); + meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce"); + } } diff --git a/dump/src/reader/v2/settings.rs b/dump/src/reader/v2/settings.rs index 1a7935b56..9cd363ca5 100644 --- a/dump/src/reader/v2/settings.rs +++ b/dump/src/reader/v2/settings.rs @@ -1,35 +1,33 @@ use std::collections::{BTreeMap, BTreeSet}; -use std::fmt::Display; +use std::fmt; use std::marker::PhantomData; use std::str::FromStr; -use once_cell::sync::Lazy; -use regex::Regex; use serde::{Deserialize, Deserializer}; #[cfg(test)] fn serialize_with_wildcard( - field: &Option>>, + field: &Setting>, s: S, ) -> std::result::Result where S: serde::Serializer, { - let wildcard = vec!["*".to_string()]; - s.serialize_some(&field.as_ref().map(|o| o.as_ref().unwrap_or(&wildcard))) -} + use serde::Serialize; -fn deserialize_some<'de, T, D>(deserializer: D) -> std::result::Result, D::Error> -where - T: Deserialize<'de>, - D: Deserializer<'de>, -{ - Deserialize::deserialize(deserializer).map(Some) + let wildcard = vec!["*".to_string()]; + match field { + Setting::Set(value) => Some(value), + Setting::Reset => Some(&wildcard), + Setting::NotSet => None, + } + .serialize(s) } #[derive(Clone, Default, Debug)] #[cfg_attr(test, derive(serde::Serialize))] pub struct Checked; + #[derive(Clone, Default, Debug, Deserialize)] #[cfg_attr(test, derive(serde::Serialize))] pub struct Unchecked; @@ -42,75 +40,54 @@ pub struct Unchecked; pub struct Settings { #[serde( default, - deserialize_with = "deserialize_some", serialize_with = "serialize_with_wildcard", - skip_serializing_if = "Option::is_none" + skip_serializing_if = "Setting::is_not_set" )] - pub displayed_attributes: Option>>, + pub displayed_attributes: Setting>, #[serde( default, - deserialize_with = "deserialize_some", serialize_with = "serialize_with_wildcard", - skip_serializing_if = "Option::is_none" + skip_serializing_if = "Setting::is_not_set" )] - pub searchable_attributes: Option>>, + pub searchable_attributes: Setting>, - #[serde( - default, - deserialize_with = "deserialize_some", - skip_serializing_if = "Option::is_none" - )] - pub filterable_attributes: Option>>, - - #[serde( - default, - deserialize_with = "deserialize_some", - skip_serializing_if = "Option::is_none" - )] - pub ranking_rules: Option>>, - #[serde( - default, - deserialize_with = "deserialize_some", - skip_serializing_if = "Option::is_none" - )] - pub stop_words: Option>>, - #[serde( - default, - deserialize_with = "deserialize_some", - skip_serializing_if = "Option::is_none" - )] - pub synonyms: Option>>>, - #[serde( - default, - deserialize_with = "deserialize_some", - skip_serializing_if = "Option::is_none" - )] - pub distinct_attribute: Option>, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + pub filterable_attributes: Setting>, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + pub sortable_attributes: Setting>, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + pub ranking_rules: Setting>, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + pub stop_words: Setting>, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + pub synonyms: Setting>>, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + pub distinct_attribute: Setting, #[serde(skip)] pub _kind: PhantomData, } impl Settings { - pub fn check(mut self) -> Settings { - let displayed_attributes = match self.displayed_attributes.take() { - Some(Some(fields)) => { + pub fn check(self) -> Settings { + let displayed_attributes = match self.displayed_attributes { + Setting::Set(fields) => { if fields.iter().any(|f| f == "*") { - Some(None) + Setting::Reset } else { - Some(Some(fields)) + Setting::Set(fields) } } otherwise => otherwise, }; - let searchable_attributes = match self.searchable_attributes.take() { - Some(Some(fields)) => { + let searchable_attributes = match self.searchable_attributes { + Setting::Set(fields) => { if fields.iter().any(|f| f == "*") { - Some(None) + Setting::Reset } else { - Some(Some(fields)) + Setting::Set(fields) } } otherwise => otherwise, @@ -120,6 +97,7 @@ impl Settings { displayed_attributes, searchable_attributes, filterable_attributes: self.filterable_attributes, + sortable_attributes: self.sortable_attributes, ranking_rules: self.ranking_rules, stop_words: self.stop_words, synonyms: self.synonyms, @@ -129,10 +107,61 @@ impl Settings { } } -static ASC_DESC_REGEX: Lazy = - Lazy::new(|| Regex::new(r#"(asc|desc)\(([\w_-]+)\)"#).unwrap()); +#[derive(Debug, Clone, PartialEq)] +pub enum Setting { + Set(T), + Reset, + NotSet, +} -#[derive(Debug, Deserialize, Clone, PartialEq, Eq)] +impl Default for Setting { + fn default() -> Self { + Self::NotSet + } +} + +impl Setting { + pub const fn is_not_set(&self) -> bool { + matches!(self, Self::NotSet) + } + + pub fn map(self, f: fn(T) -> A) -> Setting { + match self { + Setting::Set(a) => Setting::Set(f(a)), + Setting::Reset => Setting::Reset, + Setting::NotSet => Setting::NotSet, + } + } +} + +#[cfg(test)] +impl serde::Serialize for Setting { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + match self { + Self::Set(value) => Some(value), + // Usually not_set isn't serialized by setting skip_serializing_if field attribute + Self::NotSet | Self::Reset => None, + } + .serialize(serializer) + } +} + +impl<'de, T: Deserialize<'de>> Deserialize<'de> for Setting { + fn deserialize(deserializer: D) -> std::result::Result + where + D: Deserializer<'de>, + { + Deserialize::deserialize(deserializer).map(|x| match x { + Some(x) => Self::Set(x), + None => Self::Reset, // Reset is forced by sending null value + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] pub enum Criterion { /// Sorted by decreasing number of matched query terms. /// Query words at the front of an attribute is considered better than if it was at the back. @@ -142,8 +171,11 @@ pub enum Criterion { /// Sorted by increasing distance between matched query terms. Proximity, /// Documents with quey words contained in more important - /// attributes are considred better. + /// attributes are considered better. Attribute, + /// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable + /// attributes can be used in place of this criterion at query time. + Sort, /// Sorted by the similarity of the matched words with the query words. Exactness, /// Sorted by the increasing value of the field specified. @@ -152,40 +184,86 @@ pub enum Criterion { Desc(String), } +impl Criterion { + /// Returns the field name parameter of this criterion. + pub fn field_name(&self) -> Option<&str> { + match self { + Criterion::Asc(name) | Criterion::Desc(name) => Some(name), + _otherwise => None, + } + } +} + impl FromStr for Criterion { + // since we're not going to show the custom error message we can override the + // error type. type Err = (); - fn from_str(txt: &str) -> Result { - match txt { + fn from_str(text: &str) -> Result { + match text { "words" => Ok(Criterion::Words), "typo" => Ok(Criterion::Typo), "proximity" => Ok(Criterion::Proximity), "attribute" => Ok(Criterion::Attribute), + "sort" => Ok(Criterion::Sort), "exactness" => Ok(Criterion::Exactness), - text => { - let caps = ASC_DESC_REGEX.captures(text).ok_or(())?; - let order = caps.get(1).unwrap().as_str(); - let field_name = caps.get(2).unwrap().as_str(); - match order { - "asc" => Ok(Criterion::Asc(field_name.to_string())), - "desc" => Ok(Criterion::Desc(field_name.to_string())), - _text => Err(()), - } - } + text => match AscDesc::from_str(text) { + Ok(AscDesc::Asc(field)) => Ok(Criterion::Asc(field)), + Ok(AscDesc::Desc(field)) => Ok(Criterion::Desc(field)), + Err(_) => Err(()), + }, } } } -impl Display for Criterion { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Criterion::Words => write!(f, "words"), - Criterion::Typo => write!(f, "typo"), - Criterion::Proximity => write!(f, "proximity"), - Criterion::Attribute => write!(f, "attribute"), - Criterion::Exactness => write!(f, "exactness"), - Criterion::Asc(field_name) => write!(f, "asc({})", field_name), - Criterion::Desc(field_name) => write!(f, "desc({})", field_name), +#[derive(Debug, Deserialize, Clone, PartialEq, Eq)] +pub enum AscDesc { + Asc(String), + Desc(String), +} + +impl FromStr for AscDesc { + type Err = (); + + // since we don't know if this comes from the old or new syntax we need to check + // for both syntax. + // WARN: this code doesn't come from the original meilisearch v0.22.0 but was + // written specifically to be able to import the dump of meilisearch v0.21.0 AND + // meilisearch v0.22.0. + fn from_str(text: &str) -> Result { + if let Some((field_name, asc_desc)) = text.rsplit_once(':') { + match asc_desc { + "asc" => Ok(AscDesc::Asc(field_name.to_string())), + "desc" => Ok(AscDesc::Desc(field_name.to_string())), + _ => Err(()), + } + } else if text.starts_with("asc(") && text.ends_with(")") { + Ok(AscDesc::Asc( + text.strip_prefix("asc(").unwrap().strip_suffix(")").unwrap().to_string(), + )) + } else if text.starts_with("desc(") && text.ends_with(")") { + Ok(AscDesc::Desc( + text.strip_prefix("desc(").unwrap().strip_suffix(")").unwrap().to_string(), + )) + } else { + Err(()) + } + } +} + +impl fmt::Display for Criterion { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use Criterion::*; + + match self { + Words => f.write_str("words"), + Typo => f.write_str("typo"), + Proximity => f.write_str("proximity"), + Attribute => f.write_str("attribute"), + Sort => f.write_str("sort"), + Exactness => f.write_str("exactness"), + Asc(attr) => write!(f, "{}:asc", attr), + Desc(attr) => write!(f, "{}:desc", attr), } } } diff --git a/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-10.snap b/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-10.snap new file mode 100644 index 000000000..fbff0a0e6 --- /dev/null +++ b/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-10.snap @@ -0,0 +1,25 @@ +--- +source: dump/src/reader/v2/mod.rs +expression: spells.settings().unwrap() +--- +{ + "displayedAttributes": [ + "*" + ], + "searchableAttributes": [ + "*" + ], + "filterableAttributes": [], + "sortableAttributes": [], + "rankingRules": [ + "words", + "typo", + "proximity", + "attribute", + "sort", + "exactness" + ], + "stopWords": [], + "synonyms": {}, + "distinctAttribute": null +} diff --git a/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-4.snap b/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-4.snap new file mode 100644 index 000000000..809ced4f6 --- /dev/null +++ b/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-4.snap @@ -0,0 +1,39 @@ +--- +source: dump/src/reader/v2/mod.rs +expression: products.settings().unwrap() +--- +{ + "displayedAttributes": [ + "*" + ], + "searchableAttributes": [ + "*" + ], + "filterableAttributes": [], + "sortableAttributes": [], + "rankingRules": [ + "words", + "typo", + "proximity", + "attribute", + "sort", + "exactness" + ], + "stopWords": [], + "synonyms": { + "android": [ + "phone", + "smartphone" + ], + "iphone": [ + "phone", + "smartphone" + ], + "phone": [ + "android", + "iphone", + "smartphone" + ] + }, + "distinctAttribute": null +} diff --git a/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-7.snap b/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-7.snap new file mode 100644 index 000000000..016ada2a3 --- /dev/null +++ b/dump/src/reader/v2/snapshots/dump__reader__v2__test__read_dump_v2_from_meilisearch_v0_22_0_issue_3435-7.snap @@ -0,0 +1,30 @@ +--- +source: dump/src/reader/v2/mod.rs +expression: movies.settings().unwrap() +--- +{ + "displayedAttributes": [ + "*" + ], + "searchableAttributes": [ + "*" + ], + "filterableAttributes": [ + "genres", + "id" + ], + "sortableAttributes": [ + "release_date" + ], + "rankingRules": [ + "words", + "typo", + "proximity", + "attribute", + "exactness", + "release_date:asc" + ], + "stopWords": [], + "synonyms": {}, + "distinctAttribute": null +} diff --git a/dump/tests/assets/v2-v0.22.0.dump b/dump/tests/assets/v2-v0.22.0.dump new file mode 100644 index 0000000000000000000000000000000000000000..8932284eaa591dd569b0ed277594796250afda9c GIT binary patch literal 9809 zcmV-XCa&2ZiwFP!00000|LuM0a@$Cju7A%{;Bq2r=_Vxt1i08Saf6n;O?fLzwySHV zLjeLwGA$5bApy~Jl_TzJ+}E2YneUuTEL73~LU&mtfZWgeos-GE{d+flwcBpH z=Xvo*b$eacru#kDY2Wki-NKct%ug-r-c_$+AjRK*kxRy* z_$bu9o4j^kf4kjvJ9q1U3zu2{eg4}Lop8_(J-^krL$~Gj+g@wn54@Jwb-i{s@STzS z6^rr}>)%C*yY;_?YxnivpNY)h`=FBevTcB!!O!lw>rb+Nb^V=ohoS36LRLK%ac>FY}{`!12jJ0`-@#)!T(_rn*Lc|sqvs9J}|5RC7vhdT;@-58lL}b=NW>Xnk;P{EK{5Z5md8#sH z;ht})dFn5%WNcCUd)5p86z}MFLp&5wWP$(v;MDRbzKm67Wm92l-m_j&6;qitYj^SI zR4B_0U9Qs#{h5!3smr1CGZ8Jh6VBRmU)L<~2)&W$Uo1Z2UY^FLotachH)O^OS*fOI zs@67*RaZMsQful*7?`qVc`&6~=|vgKO!`sTEZU2c%XET7-BeDdB0Y)3nTTjD-cK?6 z%Otk~pJzrYGiSM?)$z?Y6(t!}*uk=JS&KTyb?E__=D!;y=zh^d>RN;eOTWjh0Bfr- z*3r^}Y~axeAEV-`b)EhwITMx>hul?OLeS8lZHp&cZ%c@mT4psv5eog@7Bf?l3c zGiVd#daO_7)fJ*b!&twv# zre~lQKgRz>k}GaM4$yk>+!Uah!(5$Yl1bji>hbet!_2b78VD440^LNfXyPP};8|nE z$)C&%t30*JP573lG|YW}F86h@78O(b{PO3b))PNFp}8&A`qzeoQXp#7(7`8-DPQh5Q7mP}Tar_+G2hl-oD&ekN_6kG9noC8?0{jETxyr$ji_FwkAfkWLT4+;5y3W~RHp6!|6roxZ5n`fRe&g{0a z&1|m_%xzhHFdTf7h!}|2m0q-*c59q~F@k&qVemH2WW=1T zc@dtAYSy}hrOj9hvChX_p*icAxBv~Cic1RzvMC^ciWo3v@kJGrGl{`a7x%1nkzm?w zntL%U79}UKpVj3Tk7bd>-)1~v$yj@0+{HbM<`Ha^6!sK4N_J883QYq69zF{eKMy5r zHDcIdaeH&H>;_tM5>qnPQh0_Lx8#_g*K9Hy547pGF<7>xxDI~-b1T+$*Q!vF5cHfo z6Bk`G-2sFa=7DifRg!C;67Dj);IvpI>FhS<1X3j|#k{eku0zssE9|;q7$V?NjwOs* zB_kq*g-k_V@_}5hDEKhR8Ynb;Y2UWs;~A?NJa3BcTjZ8`_YZIE7{NnCd6JAz#CR-% zj3(4L#3Tv`o0XsN&1tU*F%U&diD+DxWVmM?>3el?hOPiwBq=4)23ruI(b)xTA5@N# z;h=F6MahDgCYjSh8OTO=S{bzq?H*B0>t;w%3D-`|5^Gk{jmoG>)-X&%EptNz^pN0V z)k|g+iDNE2g5(=Z-FSU$C`;7;x4->u_)CuV18W%i^Gq8=xT{4I-O>_dl!QyFm|8If zM0_UGBqll3BrGk-Glu0P7LGY@lIqD=h|!4tjzqQ)!iXgBA_!1v8Au}HW>^PP)+NLF9Ki!03|x*1j2tUkTVtVC zZ}8(#z`!O^?DNpAC`nGONS?y+h3OVS&Y>kPITbMUF!qJ{JBDeFUX6|HZ&s3zUq4G^ zfJ&ymg!&QjB%6YU5)eO6y7n`5$Y7F7;GAftbh_wvfEp#iQOj!5X5=bkd{4WXB*0un$M|kSMYgmn&E%8rs@GiV%r%lC9@Lr|mFnSW z((x6jMNR#=pq7?KD}Wy#7=`jJDjUhU2>nW%qS2fXhRX8BYLB2Uz@%9GFwM>T#7C0d zXj;J`86}`p2<3}8tYRfBC~4LVp&3joIQcwDywb11tYt_)t))q%>jx z*x3F;5pf{UMSKDsgul}yCGkK?y*7ohsc69!mFojZBMo_`;17sI0;pli6WAQWBxGb` z1lpX4BnII$N#~^je?k+AoJFE1?anuO9p~U!#G%MsVZTePx6bo3g@`NHk`+ISjb$LI zyKHJKw{&XHdMx45M9b#3ipi!S)mBzJ_0LH1{@sR{iE~x-rvTsM5-3j-k}Xt(319nS za@AfivVCVL<}Lf5Hf#M&rLWC{uXA=-;LP%DP3<4A-9A_Iua^JI;T@G0Us3KcLa0$| zoJz9$r*dA?@%+eoTz^!k`U)e23^o=smKp$KKqNR4TFD#PKNFA-aNXgFOxa*BeVMfv zNscHX*5Ha~1+FP{uw_az6tCmS*l1_e0(jsVTn+#w6Twmj0i^XHiSF^(ml1gD6^ZkO zRH9i`vIRiJ8(}irKu<@4)e3Pd5D4mgAty`dv;an^oAd&n*VP5F7N#GPj%TGFgsSyc zu|sx6Q+}3^W@FnuNy3Yf3dg-FRI$}!$C^MJWMt;l;u0%2&yP&hUOR_xM=K2X@LYD| z_v*ZQMVJE$v<~MTny*_5>o@CVzsAv67ZclgeG5~ci*SOzYm(~O(S-;ny?hCTQ@20E zbFn%^BU4_~o)jYG;gaQq(&j}WUbKBdwgs8Yx-|4lC?ctm2x^1^7nMvo)_yB-^La9* zNi06qc9{Almw-AWf>faUh4sN-8jE7&XWE2AR{}YPwE{CXFm|T)L0G(z%wXkvCKd3& zuxUKz6s6``#ZMFH+KjOg--3V=q{wOL01|b5MUVcwfkC0q=)oeP4`E8P6$z#b@ zx3R(ys*kNn4zm9529=@Wh`qMRK1QJsaKxA_hE(INjGp2?B<>yxl=dk`=$}^8Nwpg! zd4c{fTVqVO#Th;XYLvlpBfnD&9+K)pOR-2QFgGxt68)&=GXOrTJ2`?2WO;2yRe+vg znG2c3E%??rTGERpeerTRo+&`@Xj5I&iz0c)?k6=}7NQgY42rygte_`@wilCN45$j- zD)f|IxDl;%D&~=-F|cwF+ng0WV^zaE*P!z_(!OBT0Fy{jQ;j;f(TPtmzfpNrZDAr5 znfW>pM?NYFqC0v8Mgr3zHsHOY3Xbh5tb)ok(bhB}=U`BMWT%hFghXcnt)*fcqQ4dX zH;V6);D3BxYQt7?S_w(I{ThMX6!d?-7TrL-uC7F#epEcV0+kE)axG*_G#h{FO95W^R%F!=d~@+vGNswkgsOk-(K`RlVD{vOIY(p6~r-95-#=$GJpN|v(BvNlYunVDps#`w8 zco6;7u()@zSa9!@&Os1W5zYLJ+-<6ulHm)>IUzT?2x*w%hw#zaA8yw}h?~sRO@gmGKvJT`1Rq@ zFg(Zty6)pNt@rGlXweO*4gl8-VwG*R6+NwCS?yxfXQ0SX(^IeO;nWUEeK_N22&Ro+ zqLqlud;D0FbAfu4WLpo?l3f%QN=RnangU=XRaR?;jWP5)k60Vfl5yZSJb9z)!# zKqk4F|5CM+mZfy)%8=8{!IMP92RnxwF*FmG;Na#8=@JwRW2=o4iHLQz@G9A-q)>8~ zXCew!B?gN6R*1x4l<3wO)E+Eci9*a!N#QCNK)5wdlNqms=^zAwoD-4mpkptABb~2D zFNq}ee>Ws)XPvlwA9_qVW@EfL3!GRBo8Gv1tNY7v?)Lg2$ ziEjc|)WKaSh$NjW175!nq0DD=|A=&nHO=9-Br@DoPepc%oQ?8rX}2KCJc1oRlR?U~ z70EFuG4-XvC{2jQ^t9lvHJ^mgZFcQW1<)yiO2-upTOY)yWHypw^>I10@2Gc&FCf9x zx=>gX`ri}JK_lvvJY)KA(TTnRW-GO*qtEaI(B+xVkX=?XLnE}IFnK@Kn#u91V zzor+~cg4^Wa881mg>;qks1{nUdRXs5!YM~aTg{53W(qcNOaPJ(3Z-;;=yp*WgVNIw zi|;ohLB+_YD_N1`9CFsU);?NQ@a-8{?vM(4@oTx%ES8F%Q)fiYuQq+Dd}<2ivK%z z5;V&ME+&B(XfP~%o3cV$Aw<3fcUDJ0X%@68LqpGebxI3Ok`z=e3OQdV0uK5W=%QY6 zvRw)(3hgnbfS`shP&@6r0V|9I`_oK&W*I4*bXIg3p#kfd01W64iX#?vCu?NL-nj{r zRc)3K$+{c3P6#}Ib&s{kL37*|)kP{&CZPsIb!tVf#Hv?FE}<%tII3A-%RmDfW4>S7 zo>kyPOUiJa*+}}BY?1f{@5`YgBfoLG7~1NfD0jG){I(_- zFLH=GX=qXb*Va*)<)Rp&*7G%kxRCqls!~`{`}WSRHCcOH7sA8@pVg900O?GdWeQ5H zyGw0CE~xBA_7AM5pmeqw5wQqT!smizrEOo<@11Kt)Jon=?%f|U{!}+lw%IMo=mVv9 zY?>~w$_It@V(w@P1xu5WVEI`YRv`(to)8Om1W?TjL0Mp?hV8lb5MdBQoO6NrvbL|u z!=T+w+SJje2K6A1*(M=2G?g}uYrXLfOP=zjr}?&bI0vG5sq{?AEH|^k+L<4(Wc5*P zD~Y{D0i8oq*+iwZ-J&b!`H7@yc)35#`MMW<7n4e})J2kIs3(lPtx*hvK948@<_TwYL7pwQ%6*H_K_` zAG?o#{@$ma^5#U^G52>MU#5}6C>x8(o(y;BC*SIhXpx>;1RkmL*>#UBK zC#1_ohjC>r8Bruzv;fV3TOJr^Zg|jK#cg;^@BwRL^0tT@4$BQnA(lLa?$y8<5d`Hz zQT$(72UIuoJo6S>Lx(jIvKIBHn;H~jN#1Feua)o^mDZK&BshFGIK_iP?34l){e{gm zQC1aGn$N)yn3EE?Ascke$SCMG(bA8AA#|8bu_*&vCS;{sq+o{;V!YJ?l}yQ)E2BHr ztSSK`m^ZxA>`k@!h4q-HV?Pkui#2*c7pjLg!Tn@VCs9_ETli$q%KF&boM1t{906?( z6)Pa)ji)|{Q?2(`npdG!mI=pKsSzG{CSIZe-s zFq%)vW3!^Tp53_kY7{3;Xq9_bqLb$^#jyq_X1rW!9u?+G5nByPB8tZdP*S-feT{8* zo!3^hGV+zqcaa)npItqi*FDdr2}(Cgta~qoS$4kD;?YK%x~t(_6=JNJ`(Fn!dcPaQ z_|@^cKTMX}g|0&35q$o)dw18l*XsEn-I3_^2jhOr8waD7JMOuye$VZ+dZY2UHxPc; z9d+;Re0|ONPd&GLcmC5YT)VIT4V?ef-8laR@AvM`f4PP0X3u|Wz^qO=p8+AN^A3Y* zz(S=|oRbh@yYcl$Lse%&HCL*N&|w28 zHO;cQy1&0K`81Gh7LN9kbh4lA&-WKV-u6G9eRTgZ9k&nPJ?%enpMQG!>}dXC{P^MF z(%JhkpP(x8!p@{v5Q3}FP7nZr9p}-QH?)!05W;8TBqVm8OkOI?eSx>|kUDAQV0Gy? z5q5&x4Jo4j=l}hG@-aC}RG&6QQQ!=wCqOU}!3R0x{7FGIEOf;7B|Arta}@9w{?fRn zAt#Xmcgrb7#V0Uprhx<9S+u0!WXcKe2RuW!*~y$rF(%g+B9Bvi`Jg4!TRwyb+~%H!s11igP`%Ac}%b=$%Y0$Vi<=SQnr7Z4c_S*Xg#~%|iOpnO!>Y#9qT= ze% z9zGeqdMDm(S!GPcl4eGOf9(0|tnu1jGd`k$QhQX6C?0&lgwtRofznvaSQLS`%Hige zIno^4rpW6!gMqU)MTKK_fEL3gPZTIbE$g#07Y_H)Om8p@ zlZ7HFfl#iZTsMSW=Gv#H-0^hcN8|zrge>^8Ws`Y&lE8hJOyB{KUFt7EqDp?O=JhB` zc+(!i(?G9u{#j0RO;8On%AAr5HPWMZEecM$G7ttglr5yvQCb~AHW)tUf+!9l5RwmB znM7SqG|%gp^6WBmup1$JVR>E(?WRjO%dVvpB{}LkMo1-Pum)l0kJK`6Ez)2(RiO_= z7}0**S;A2jILevX)3{Ewj@z~S?KPq;S-4pd@DXhcL_?3IzOzQZb?!c(?Z#lG7k)}; z#13>j%+%!l^e}#U{7bk0yd6DPZ_|VLMeq2-lTErk<}}6xl2iltz*BvSwjBB~=gng( z6sL~sI8$vmHx`)Q5G+<_=$u|$l+t)wNr;FhA zi1DKBym}cOPSbyUI6nIMUH_+7-RbmgCsduyg?ObWD$V195SNtKX%G6nrd2ji=K+Ld zy^M1{wGDImA&*a&6d2S80C1%002~7$7)zgXkSS=KEclEWKLsx}%ha4)aasn2BWdjr z1Y(7iE=^`GHAif0al&)aGuBega*0&czb5Ue!ju8$1ZYpkq!8+zCZ__eYxB(Xfnox* zJ%?zj$+G2_Tn4A$N~|L19TPR^<85FgK!JcKmx6m!g&mSedM~V7)wj8R-e{Qb(GhQ{J0~}*ZVB#WeG^2lH z!V1c<;xG~ESRDgC^v@*z&0~&EX{(zJuVM?iHqfLF#R6+a6ygWWHyi>m7oqqEDVD6s znLi3eMp!D^i$?p{I3LpN3Nu+*iqLzmk1{0uctNLbm`$u~Pb^8SDxOi&sgeBPPp#Lf z#CL6oD3jO;pm_=d8xTD0f1}nCN?b1ZJHQ}0frBhdGQgp+x2|)~8+6xr`_Op3Ae5Em z?Dw2HE5F@oE%T2~_}2dLqcc2s_Tp&z>0xL2deC_r&OZg4Yxy)+BOP@j`(-SwC;mu# z9z#sJPbmcNWu%Dt$b%@Hgs@1mm#N{_X+q|a_=r$XW;ijA=TSb=IETO}P;awJOT(VuQU*=@+q*#nrr;AJ>3WOGcvp30za`T=iIqn{1FbiWW zYj4*Cd&Jh7!VrdjNhbq+iDj5qLT4L;K`U{ytiiMmW?j<)OBK{m#{ zr1PV6*cUTI;{LMU@bNU2v}vhsG(Sz!G)cG1?t6dUYZdY+xpwsFH zLbQBW_ygA)^hWOB-p6mye5@$|9?8?hry!r}-ZzhV&Kuf8Ft)uH$Wq|IqvQ zZsEG)e~{yS+v#-y1(e$jbArR*$hEBD(0T|4)Gp+A{nl%Ed*5q5#wSU__A)tS zjr+YlchGF)^8ioVuHET$`i|GO>8A$**klu!zB&zwtkHQe!_AKEbVv_?7`#UP81h)} z^?piz!#DT?%Ma)sZCz}xRN%P%p02>Ls|wD4ztbj@klt^vzfT@xHR7tEKSDVuCb)%8 zZI?lG3fC2IWvv8>psD2T$Bp|Q&qnPq4j$NS4zrvfFZQxv1{31V|E5U{6flH04MX?= z%|78|53IwtZ-&ob9sFRuCioN1ub_RFu!BV~jgu&uEFW0?)*Oq|pv^IHP7ryXvMFDz zCOt_)N;+%2cxhws5Boot30GwA!`xpi7JDPO(4&002QYS@PNdlnO$9CAZ;gt9wsdp* znsoN&|5t|pVYfZI(|6mh2TuM5l=Tev`1v=%Z2li_Ui@@$KoV4+l3%nbXOq3rXvsSw z18(&pdhjq`p1eKM{)gJvhuLO|n=@^mD4$&o@@+H~gU)QPcC-bs0nAKZC#(paO?u4DHKD&^KRw+DqSEpg;G8XST4HH!T*QMbriQ&?YKqO}dk2(ddVnr@b*v?Lb-EF%M zogE2#a8{#i-Q2z=r&Y@K+8_i&+4}jlQ8ra)+T5gU&_>^_C|jnQrXH2Etw81PBpr=7 z%tM)29D}qT(yupKM-+KTFCzEOip(A$BHAOu>Rv?eFq`cy^zNr)Y0)-&N`842BYd+; zOYL<+dQ4ds6gEWk3Wy8`PncRT03bsIOFYm=kR1M$WWMX#33G7bYx7zSdNY} z@YghEZ50&Oj}^t3MP^NSHb?whm|MtjOJ`~%iFrJ}95tGH zS``zi))_5oi+%m7N+>MsvMcvnwVTjbWoj!QG2SlbMQht7YM2hN6npN z-5YFm)m~Z$B#0jk+B?c&Z;QJ|@%lfH9PU;{j%cHvA0g_xiy$6u@zU%)@7-lxvNl{Y zXGfPTZn4x9gZ37&$z)JaBx<*B%Tiisa|lN0F$`w7DrZVTkl9@Zg0&yb{#-#wQ8M zM}b6$FlBi>7Ww#mp|E z0#X|V9h@0Dcwnj6pSKYDKF-roLe!MdAvuVS)Badl>or~luZVOXVTGARCp|o%gw>RC za^~m}R1FAusRkuH)W8F<*;%4{yqo#iFzyx;w+({-!U>#ItJf_gvfc3>ov|YolVSO}DtAx7@zh zIfsZ@wOa?t99H3RGQT=W6QbF>Bk6*qpC|F873m%R78^Xv%aJUfL!2yClYab@Pc>md zi@$U>Bz+aLB%=?n4~Kf2$4sA`b?MN~?lj)ATi#hKU}`}`eGA1Cr4Kqj=P&TdXMl-raTG7svfi@?3`M-c4O!k^e=zUGDO~ZsDq}|9>e+xm{I=qO7Y7iAC<)mJ#nWMu6E^fN|e4i*-zJ3$p1D#$vgRf z3zu2{ef{@jOuvD2@0zc#SbyM2-rf4&!d0yQP3eBRzM%iX>dyN5J3ZUJ)Bm?{?X0}# zdSea12e@&$(yBP%-0Cu_{#nSlEp0@o{R;VJgZ#=gq2ayJKdMu{gJ=53f$;1< z6~DWZrEM~|Mf^)H@{79G!>rlVHQJ{{zmCO4DJdO5wFqD9!2uw+po%`w^PlyPAl}Ss zH2Bv_8<&~ujx)gjR_EH@nCqir)4;VJ{dAcLr`S!$z0!nt?7r(cf0PM#J+EiGs}t^A zG~qXLJl18V-t}DA;a8e^4Ayl!t5bhjyZ`#9-Wzz1*ZY!OsZIUOY%QtXuI(-<>i_5G z%dCwD5ox!tv^J=sYxl2fZRlr`d0{RRBfo>l?(KD5fXm%0Ocht*^_gb{?;6F3%?wGrmC`8zs}i$mJ)C?+0`svGS+$JwLAGUltp5&c*S34mUw=h}UTk0aB}z5N+g rf5?(&oq@Xd+k-vFh0urp{HHop_wKs8?ykG*U*P&*M>VQg0I&c6s+?VG literal 0 HcmV?d00001 diff --git a/meilisearch/tests/dumps/mod.rs b/meilisearch/tests/dumps/mod.rs index 06cb21f20..cbc895f32 100644 --- a/meilisearch/tests/dumps/mod.rs +++ b/meilisearch/tests/dumps/mod.rs @@ -98,14 +98,14 @@ async fn import_dump_v1_movie_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({ "displayedAttributes": ["genres", "id", "overview", "poster", "release_date", "title"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["typo", "words", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) + json!({ "displayedAttributes": ["genres", "id", "overview", "poster", "release_date", "title"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": ["genres"], "rankingRules": ["typo", "words", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } }) ); let (tasks, code) = index.list_tasks().await; assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["genres", "id", "overview", "poster", "release_date", "title"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT7.288826907S", "enqueuedAt": "2021-09-08T09:34:40.882977Z", "startedAt": "2021-09-08T09:34:40.883073093Z", "finishedAt": "2021-09-08T09:34:48.1719Z"}, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31968 }, "error": null, "duration": "PT9.090735774S", "enqueuedAt": "2021-09-08T09:34:16.036101Z", "startedAt": "2021-09-08T09:34:16.261191226Z", "finishedAt": "2021-09-08T09:34:25.351927Z" }], "limit": 20, "from": 1, "next": null }) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["genres", "id", "overview", "poster", "release_date", "title"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT7.288826907S", "enqueuedAt": "2021-09-08T09:34:40.882977Z", "startedAt": "2021-09-08T09:34:40.883073093Z", "finishedAt": "2021-09-08T09:34:48.1719Z"}, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31968 }, "error": null, "duration": "PT9.090735774S", "enqueuedAt": "2021-09-08T09:34:16.036101Z", "startedAt": "2021-09-08T09:34:16.261191226Z", "finishedAt": "2021-09-08T09:34:25.351927Z" }], "limit": 20, "from": 1, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -161,7 +161,7 @@ async fn import_dump_v1_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( settings, - json!({"displayedAttributes": ["description", "id", "name", "summary", "total_downloads", "version"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 }}) + json!({"displayedAttributes": ["description", "id", "name", "summary", "total_downloads", "version"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": ["version"], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 }}) ); let (tasks, code) = index.list_tasks().await; From 6be9a828fa5630b9082245dbed7ec138cf0f3b2a Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 31 Jan 2023 12:49:35 +0100 Subject: [PATCH 097/186] makes clippy happy --- dump/src/reader/compat/v1_to_v2.rs | 4 ++-- dump/src/reader/v2/settings.rs | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/dump/src/reader/compat/v1_to_v2.rs b/dump/src/reader/compat/v1_to_v2.rs index baadd2104..789e8e0b1 100644 --- a/dump/src/reader/compat/v1_to_v2.rs +++ b/dump/src/reader/compat/v1_to_v2.rs @@ -266,7 +266,7 @@ impl From for v2::Settings { ranking_rules .into_iter() // filter out the WordsPosition ranking rule that exists in v1 but not v2 - .filter_map(|ranking_rule| Option::::from(ranking_rule)) + .filter_map(Option::::from) .map(|criterion| criterion.to_string()) .collect() }); @@ -348,7 +348,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"ad6245d98d1a8e30535f3339a9a8d223"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2298010973ee98cf4670787314176a3a"); assert_eq!(update_files.len(), 9); assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dumps v1 diff --git a/dump/src/reader/v2/settings.rs b/dump/src/reader/v2/settings.rs index 9cd363ca5..c7ecb7f9b 100644 --- a/dump/src/reader/v2/settings.rs +++ b/dump/src/reader/v2/settings.rs @@ -237,13 +237,13 @@ impl FromStr for AscDesc { "desc" => Ok(AscDesc::Desc(field_name.to_string())), _ => Err(()), } - } else if text.starts_with("asc(") && text.ends_with(")") { + } else if text.starts_with("asc(") && text.ends_with(')') { Ok(AscDesc::Asc( - text.strip_prefix("asc(").unwrap().strip_suffix(")").unwrap().to_string(), + text.strip_prefix("asc(").unwrap().strip_suffix(')').unwrap().to_string(), )) - } else if text.starts_with("desc(") && text.ends_with(")") { + } else if text.starts_with("desc(") && text.ends_with(')') { Ok(AscDesc::Desc( - text.strip_prefix("desc(").unwrap().strip_suffix(")").unwrap().to_string(), + text.strip_prefix("desc(").unwrap().strip_suffix(')').unwrap().to_string(), )) } else { Err(()) From 2a1a7ef00a68d49265d86d663b4e1ec7e4706c60 Mon Sep 17 00:00:00 2001 From: Vibhav Bobade Date: Fri, 14 Oct 2022 17:36:10 +0530 Subject: [PATCH 098/186] Integrate Uffizzi --- .github/uffizzi/Dockerfile | 47 +++++++++ .github/uffizzi/docker-compose.uffizzi.yml | 26 +++++ .github/uffizzi/nginx/nginx.conf | 28 +++++ .github/workflows/uffizzi-build.yml | 100 ++++++++++++++++++ .github/workflows/uffizzi-preview-deploy.yml | 103 +++++++++++++++++++ 5 files changed, 304 insertions(+) create mode 100644 .github/uffizzi/Dockerfile create mode 100644 .github/uffizzi/docker-compose.uffizzi.yml create mode 100644 .github/uffizzi/nginx/nginx.conf create mode 100644 .github/workflows/uffizzi-build.yml create mode 100644 .github/workflows/uffizzi-preview-deploy.yml diff --git a/.github/uffizzi/Dockerfile b/.github/uffizzi/Dockerfile new file mode 100644 index 000000000..ae2b8231e --- /dev/null +++ b/.github/uffizzi/Dockerfile @@ -0,0 +1,47 @@ +# Compile +FROM rust:alpine3.16 AS compiler + +RUN apk add -q --update-cache --no-cache build-base openssl-dev + +WORKDIR /meilisearch + +ARG COMMIT_SHA +ARG COMMIT_DATE +ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE} +ENV RUSTFLAGS="-C target-feature=-crt-static" + +COPY . . +RUN set -eux; \ + apkArch="$(apk --print-arch)"; \ + if [ "$apkArch" = "aarch64" ]; then \ + export JEMALLOC_SYS_WITH_LG_PAGE=16; \ + fi && \ + cargo build --release + +# Run +FROM uffizzi/ttyd:alpine + +ENV MEILI_HTTP_ADDR 0.0.0.0:7700 +ENV MEILI_SERVER_PROVIDER docker +ENV MEILI_NO_ANALYTICS true + +RUN apk update --quiet \ + && apk add -q --no-cache libgcc tini curl + +# add meilisearch to the `/bin` so you can run it from anywhere and it's easy +# to find. +COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch +# To stay compatible with the older version of the container (pre v0.27.0) we're +# going to symlink the meilisearch binary in the path to `/meilisearch` +RUN ln -s /bin/meilisearch /meilisearch + +# This directory should hold all the data related to meilisearch so we're going +# to move our PWD in there. +# We don't want to put the meilisearch binary +WORKDIR /meili_data + + +EXPOSE 7700/tcp + +ENTRYPOINT ["tini", "--"] +CMD ["ttyd", "/bin/zsh"] diff --git a/.github/uffizzi/docker-compose.uffizzi.yml b/.github/uffizzi/docker-compose.uffizzi.yml new file mode 100644 index 000000000..17f241238 --- /dev/null +++ b/.github/uffizzi/docker-compose.uffizzi.yml @@ -0,0 +1,26 @@ +version: "3" + +x-uffizzi: + ingress: + service: nginx + port: 8081 + +services: + meilisearch: + image: "${MEILISEARCH_IMAGE}" + restart: unless-stopped + ports: + - "7681:7681" + - "7700:7700" + deploy: + resources: + limits: + memory: 500M + + nginx: + image: nginx:alpine + restart: unless-stopped + ports: + - "8081:8081" + volumes: + - ./.github/uffizzi/nginx:/etc/nginx diff --git a/.github/uffizzi/nginx/nginx.conf b/.github/uffizzi/nginx/nginx.conf new file mode 100644 index 000000000..6eca6b6f0 --- /dev/null +++ b/.github/uffizzi/nginx/nginx.conf @@ -0,0 +1,28 @@ + +events { + worker_connections 4096; ## Default: 1024 +} + +http { + map $http_upgrade $connection_upgrade { + default upgrade; + '' close; + } + + server { + listen 8081; + + location / { + proxy_pass http://localhost:7681; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + } + + location /meilisearch/ { + # rewrite /meilisearch/(.*) /$1 break; + proxy_pass http://localhost:7700/; + } + } +} + diff --git a/.github/workflows/uffizzi-build.yml b/.github/workflows/uffizzi-build.yml new file mode 100644 index 000000000..6051b0cdd --- /dev/null +++ b/.github/workflows/uffizzi-build.yml @@ -0,0 +1,100 @@ +name: Uffizzi - Build PR Image +on: + pull_request: + types: [opened,synchronize,reopened,closed] + +jobs: + build-meilisearch: + name: Build and push `meilisearch` + runs-on: ubuntu-latest + outputs: + tags: ${{ steps.meta.outputs.tags }} + if: ${{ github.event.action != 'closed' }} + steps: + - name: checkout + uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Generate UUID image name + id: uuid + run: echo "UUID_TAG=$(uuidgen)" >> $GITHUB_ENV + + - name: Docker metadata + id: meta + uses: docker/metadata-action@v3 + with: + images: registry.uffizzi.com/${{ env.UUID_TAG }} + tags: | + type=raw,value=60d + + - name: Build Image + uses: docker/build-push-action@v3 + with: + context: ./ + file: .github/uffizzi/Dockerfile + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + push: true + cache-from: type=gha + cache-to: type=gha,mode=max + + render-compose-file: + name: Render Docker Compose File + # Pass output of this workflow to another triggered by `workflow_run` event. + runs-on: ubuntu-latest + needs: + - build-meilisearch + outputs: + compose-file-cache-key: ${{ env.COMPOSE_FILE_HASH }} + steps: + - name: Checkout git repo + uses: actions/checkout@v3 + - name: Render Compose File + run: | + MEILISEARCH_IMAGE=$(echo ${{ needs.build-meilisearch.outputs.tags }}) + export MEILISEARCH_IMAGE + # Render simple template from environment variables. + envsubst < .github/uffizzi/docker-compose.uffizzi.yml > docker-compose.rendered.yml + cat docker-compose.rendered.yml + - name: Upload Rendered Compose File as Artifact + uses: actions/upload-artifact@v3 + with: + name: preview-spec + path: docker-compose.rendered.yml + retention-days: 2 + - name: Serialize PR Event to File + run: | + cat << EOF > event.json + ${{ toJSON(github.event) }} + + EOF + - name: Upload PR Event as Artifact + uses: actions/upload-artifact@v3 + with: + name: preview-spec + path: event.json + retention-days: 2 + + delete-preview: + name: Call for Preview Deletion + runs-on: ubuntu-latest + if: ${{ github.event.action == 'closed' }} + steps: + # If this PR is closing, we will not render a compose file nor pass it to the next workflow. + - name: Serialize PR Event to File + run: | + cat << EOF > event.json + ${{ toJSON(github.event) }} + + EOF + - name: Upload PR Event as Artifact + uses: actions/upload-artifact@v3 + with: + name: preview-spec + path: event.json + retention-days: 2 diff --git a/.github/workflows/uffizzi-preview-deploy.yml b/.github/workflows/uffizzi-preview-deploy.yml new file mode 100644 index 000000000..8b3fdde96 --- /dev/null +++ b/.github/workflows/uffizzi-preview-deploy.yml @@ -0,0 +1,103 @@ +name: Uffizzi - Deploy Preview + +on: + workflow_run: + workflows: + - "Uffizzi - Build PR Image" + types: + - completed + +jobs: + cache-compose-file: + name: Cache Compose File + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + outputs: + compose-file-cache-key: ${{ env.COMPOSE_FILE_HASH }} + pr-number: ${{ env.PR_NUMBER }} + expected-url: ${{ env.EXPECTED_URL }} + steps: + - name: 'Download artifacts' + # Fetch output (zip archive) from the workflow run that triggered this workflow. + uses: actions/github-script@v6 + with: + script: | + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: context.payload.workflow_run.id, + }); + let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { + return artifact.name == "preview-spec" + })[0]; + let download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + let fs = require('fs'); + fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/preview-spec.zip`, Buffer.from(download.data)); + + - name: 'Unzip artifact' + run: unzip preview-spec.zip + + - name: Read Event into ENV + run: | + echo 'EVENT_JSON<> $GITHUB_ENV + cat event.json >> $GITHUB_ENV + echo 'EOF' >> $GITHUB_ENV + + - name: Hash Rendered Compose File + id: hash + # If the previous workflow was triggered by a PR close event, we will not have a compose file artifact. + if: ${{ fromJSON(env.EVENT_JSON).action != 'closed' }} + run: echo "COMPOSE_FILE_HASH=$(md5sum docker-compose.rendered.yml | awk '{ print $1 }')" >> $GITHUB_ENV + + - name: Cache Rendered Compose File + if: ${{ fromJSON(env.EVENT_JSON).action != 'closed' }} + uses: actions/cache@v3 + with: + path: docker-compose.rendered.yml + key: ${{ env.COMPOSE_FILE_HASH }} + + - name: Read PR Number From Event Object + id: pr + run: echo "PR_NUMBER=${{ fromJSON(env.EVENT_JSON).number }}" >> $GITHUB_ENV + + - name: DEBUG - Print Job Outputs + if: ${{ runner.debug }} + run: | + echo "PR number: ${{ env.PR_NUMBER }}" + echo "Compose file hash: ${{ env.COMPOSE_FILE_HASH }}" + cat event.json + + - name: Add expected URL env var + if: ${{ runner.debug }} + run: | + REPO=$(echo ${{ github.repository }} | sed 's/\./+/g') + echo "EXPECTED_URL=${{ inputs.server }}/github.com/$REPO/pull/${{ env.PR_NUMBER }}" >> $GITHUB_ENV + + deploy-uffizzi-preview: + name: Use Remote Workflow to Preview on Uffizzi + needs: + - cache-compose-file + uses: UffizziCloud/preview-action/.github/workflows/reusable.yaml@desc + with: + # If this workflow was triggered by a PR close event, cache-key will be an empty string + # and this reusable workflow will delete the preview deployment. + compose-file-cache-key: ${{ needs.cache-compose-file.outputs.compose-file-cache-key }} + compose-file-cache-path: docker-compose.rendered.yml + server: https://app.uffizzi.com + pr-number: ${{ needs.cache-compose-file.outputs.pr-number }} + description: | + The meilisearch preview environment contains a web terminal from where you can run the + `meilisearch` command. You should be able to access this instance of meilisearch running in + the preview from the link Meilisearch Endpoint link given below. + + Web Terminal Endpoint : ${{ needs.cache-compose-file.outputs.expected-url }} + Meilisearch Endpoint : ${{ needs.cache-compose-file.outputs.expected-url }}/meilisearch + permissions: + contents: read + pull-requests: write + id-token: write \ No newline at end of file From 231067a1c4cbeb66b7db955bc2f1bab108597745 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Wed, 1 Feb 2023 11:53:39 +0100 Subject: [PATCH 099/186] Bump milli to v0.41.1 --- Cargo.lock | 16 ++++++++-------- meilisearch-types/Cargo.toml | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bbd41a5e3..aed197e8b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1300,8 +1300,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.40.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.0#4e4d8dfda72e9301d66a637dabaee619ec0d1a02" +version = "0.41.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3" dependencies = [ "nom", "nom_locate", @@ -1319,8 +1319,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.40.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.0#4e4d8dfda72e9301d66a637dabaee619ec0d1a02" +version = "0.41.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3" dependencies = [ "serde_json", ] @@ -1884,8 +1884,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.40.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.0#4e4d8dfda72e9301d66a637dabaee619ec0d1a02" +version = "0.41.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3" dependencies = [ "serde_json", ] @@ -2433,8 +2433,8 @@ dependencies = [ [[package]] name = "milli" -version = "0.40.0" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.0#4e4d8dfda72e9301d66a637dabaee619ec0d1a02" +version = "0.41.1" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3" dependencies = [ "bimap", "bincode", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 5c3c13af5..54efe1f56 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -16,7 +16,7 @@ file-store = { path = "../file-store" } flate2 = "1.0.24" fst = "0.4.7" memmap2 = "0.5.7" -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.41.0", default-features = false } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.41.1", default-features = false } roaring = { version = "0.10.0", features = ["serde"] } serde = { version = "1.0.145", features = ["derive"] } serde-cs = "0.2.4" From d563ed8a39a81b4aebb0899be3963100bac66aba Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 25 Jan 2023 17:22:32 +0100 Subject: [PATCH 100/186] Making it work with index uid patterns --- Cargo.lock | 1 + meilisearch-auth/Cargo.toml | 1 + meilisearch-auth/src/lib.rs | 57 +++++++------------ meilisearch-types/src/index_uid_pattern.rs | 29 ++++++++-- meilisearch-types/src/keys.rs | 8 +-- .../src/extractors/authentication/mod.rs | 5 +- 6 files changed, 53 insertions(+), 48 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dd7d828da..0fc88e802 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2535,6 +2535,7 @@ dependencies = [ "base64 0.13.1", "enum-iterator", "hmac", + "maplit", "meilisearch-types", "rand", "roaring", diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 383be69cf..a42cbae02 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" base64 = "0.13.1" enum-iterator = "1.1.3" hmac = "0.12.1" +maplit = "1.0.2" meilisearch-types = { path = "../meilisearch-types" } rand = "0.8.5" roaring = { version = "0.10.0", features = ["serde"] } diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index c81f9f20b..287dda0ce 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -8,6 +8,7 @@ use std::path::Path; use std::sync::Arc; use error::{AuthControllerError, Result}; +use maplit::hashset; use meilisearch_types::index_uid_pattern::IndexUidPattern; use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey}; use meilisearch_types::star_or::StarOr; @@ -75,31 +76,12 @@ impl AuthController { search_rules: Option, ) -> Result { let mut filters = AuthFilter::default(); - let key = self - .store - .get_api_key(uid)? - .ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?; + let key = self.get_key(uid)?; - if !key.indexes.iter().any(|i| i == &StarOr::Star) { - filters.search_rules = match search_rules { - // Intersect search_rules with parent key authorized indexes. - Some(search_rules) => SearchRules::Map( - key.indexes - .into_iter() - .filter_map(|index| { - search_rules.get_index_search_rules(index.deref()).map( - |index_search_rules| { - (String::from(index), Some(index_search_rules)) - }, - ) - }) - .collect(), - ), - None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()), - }; - } else if let Some(search_rules) = search_rules { - filters.search_rules = search_rules; - } + filters.search_rules = match search_rules { + Some(search_rules) => search_rules, + None => SearchRules::Set(key.indexes.into_iter().collect()), + }; filters.allow_index_creation = self.is_key_authorized(uid, Action::IndexesAdd, None)?; @@ -182,13 +164,13 @@ impl Default for AuthFilter { #[derive(Debug, Serialize, Deserialize, Clone)] #[serde(untagged)] pub enum SearchRules { - Set(HashSet), - Map(HashMap>), + Set(HashSet), + Map(HashMap>), } impl Default for SearchRules { fn default() -> Self { - Self::Set(Some("*".to_string()).into_iter().collect()) + Self::Set(hashset! { IndexUidPattern::all() }) } } @@ -198,16 +180,12 @@ impl SearchRules { Self::Set(set) => { set.contains("*") || set.contains(index) - || set - .iter() // We must store the IndexUidPattern in the Set - .any(|pattern| IndexUidPattern::new_unchecked(pattern).matches_str(index)) + || set.iter().any(|pattern| pattern.matches_str(index)) } Self::Map(map) => { map.contains_key("*") || map.contains_key(index) - || map - .keys() // We must store the IndexUidPattern in the Map - .any(|pattern| IndexUidPattern::new_unchecked(pattern).matches_str(index)) + || map.keys().any(|pattern| pattern.matches_str(index)) } } } @@ -215,21 +193,26 @@ impl SearchRules { pub fn get_index_search_rules(&self, index: &str) -> Option { match self { Self::Set(set) => { - if set.contains("*") || set.contains(index) { + if self.is_index_authorized(index) { Some(IndexSearchRules::default()) } else { None } } Self::Map(map) => { - map.get(index).or_else(|| map.get("*")).map(|isr| isr.clone().unwrap_or_default()) + // We must take the most retrictive rule of this index uid patterns set of rules. + map.iter() + .filter(|(pattern, _)| pattern.matches_str(index)) + .max_by_key(|(pattern, _)| (pattern.is_exact(), pattern.len())) + .map(|(_, rule)| rule.clone()) + .flatten() } } } /// Return the list of indexes such that `self.is_index_authorized(index) == true`, /// or `None` if all indexes satisfy this condition. - pub fn authorized_indexes(&self) -> Option> { + pub fn authorized_indexes(&self) -> Option> { match self { SearchRules::Set(set) => { if set.contains("*") { @@ -250,7 +233,7 @@ impl SearchRules { } impl IntoIterator for SearchRules { - type Item = (String, IndexSearchRules); + type Item = (IndexUidPattern, IndexSearchRules); type IntoIter = Box>; fn into_iter(self) -> Self::IntoIter { diff --git a/meilisearch-types/src/index_uid_pattern.rs b/meilisearch-types/src/index_uid_pattern.rs index 8cb50fee9..bc12da351 100644 --- a/meilisearch-types/src/index_uid_pattern.rs +++ b/meilisearch-types/src/index_uid_pattern.rs @@ -1,7 +1,10 @@ +use std::borrow::Borrow; use std::error::Error; use std::fmt; +use std::ops::Deref; use std::str::FromStr; +use deserr::DeserializeFromValue; use serde::{Deserialize, Serialize}; use crate::error::{Code, ErrorCode}; @@ -9,17 +12,25 @@ use crate::index_uid::{IndexUid, IndexUidFormatError}; /// An index uid pattern is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long and optionally ending with a *. -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))] -pub struct IndexUidPattern( - #[cfg_attr(feature = "test-traits", proptest(regex("[a-zA-Z0-9_-]{1,400}\\*?")))] String, -); +#[derive(Serialize, Deserialize, DeserializeFromValue, Debug, Clone, PartialEq, Eq, Hash)] +#[deserr(from(&String) = FromStr::from_str -> IndexUidPatternFormatError)] +pub struct IndexUidPattern(String); impl IndexUidPattern { pub fn new_unchecked(s: impl AsRef) -> Self { Self(s.as_ref().to_string()) } + /// Matches any index name. + pub fn all() -> Self { + IndexUidPattern::from_str("*").unwrap() + } + + /// Returns `true` if the pattern matches a specific index name. + pub fn is_exact(&self) -> bool { + !self.0.ends_with('*') + } + /// Returns wether this index uid matches this index uid pattern. pub fn matches(&self, uid: &IndexUid) -> bool { self.matches_str(uid.as_str()) @@ -34,7 +45,7 @@ impl IndexUidPattern { } } -impl std::ops::Deref for IndexUidPattern { +impl Deref for IndexUidPattern { type Target = str; fn deref(&self) -> &Self::Target { @@ -42,6 +53,12 @@ impl std::ops::Deref for IndexUidPattern { } } +impl Borrow for IndexUidPattern { + fn borrow(&self) -> &str { + &self.0 + } +} + impl TryFrom for IndexUidPattern { type Error = IndexUidPatternFormatError; diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index 50afa755c..a9e2e0889 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -47,7 +47,7 @@ pub struct CreateApiKey { #[deserr(error = DeserrError)] pub actions: Vec, #[deserr(error = DeserrError)] - pub indexes: Vec>, + pub indexes: Vec, #[deserr(error = DeserrError, default = None, from(&String) = parse_expiration_date -> TakeErrorMessage)] pub expires_at: Option, } @@ -109,7 +109,7 @@ pub struct Key { pub name: Option, pub uid: KeyId, pub actions: Vec, - pub indexes: Vec>, + pub indexes: Vec, #[serde(with = "time::serde::rfc3339::option")] pub expires_at: Option, #[serde(with = "time::serde::rfc3339")] @@ -127,7 +127,7 @@ impl Key { description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()), uid, actions: vec![Action::All], - indexes: vec![StarOr::Star], + indexes: vec![IndexUidPattern::all()], expires_at: None, created_at: now, updated_at: now, @@ -142,7 +142,7 @@ impl Key { description: Some("Use it to search from the frontend".to_string()), uid, actions: vec![Action::Search], - indexes: vec![StarOr::Star], + indexes: vec![IndexUidPattern::all()], expires_at: None, created_at: now, updated_at: now, diff --git a/meilisearch/src/extractors/authentication/mod.rs b/meilisearch/src/extractors/authentication/mod.rs index 8944b60d3..f1efdf9aa 100644 --- a/meilisearch/src/extractors/authentication/mod.rs +++ b/meilisearch/src/extractors/authentication/mod.rs @@ -230,7 +230,10 @@ pub mod policies { } } - return auth.get_key_filters(uid, Some(data.claims.search_rules)).ok(); + match auth.get_key_filters(uid, Some(data.claims.search_rules)) { + Ok(auth) if auth.search_rules.is_index_authorized() => Some(auth), + _ => None, + } } None From 5672165e444887e356bef5092265fbcb1b9fbadc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Feb 2023 17:02:17 +0000 Subject: [PATCH 101/186] Bump docker/build-push-action from 3 to 4 Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 3 to 4. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v3...v4) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/publish-docker-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index 3dd93b6eb..5d1b50f79 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -84,7 +84,7 @@ jobs: type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }} - name: Build and push - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: push: true platforms: linux/amd64,linux/arm64 From a36b1dbd7054de44af48af813c2823648d4b190c Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 1 Feb 2023 18:21:45 +0100 Subject: [PATCH 102/186] Fix the tasks with the new patterns --- dump/src/reader/compat/v5_to_v6.rs | 6 ++-- dump/src/reader/v6/mod.rs | 1 - index-scheduler/src/lib.rs | 30 ++++++++++++++----- meilisearch-auth/src/lib.rs | 4 +-- meilisearch-auth/src/store.rs | 3 +- meilisearch-types/src/index_uid_pattern.rs | 5 ++++ meilisearch-types/src/keys.rs | 1 - .../src/extractors/authentication/mod.rs | 17 ++++++----- 8 files changed, 41 insertions(+), 26 deletions(-) diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index 51858450e..3b348492d 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -181,10 +181,8 @@ impl CompatV5ToV6 { .indexes .into_iter() .map(|index| match index { - v5::StarOr::Star => v6::StarOr::Star, - v5::StarOr::Other(uid) => { - v6::StarOr::Other(v6::IndexUidPattern::new_unchecked(uid.as_str())) - } + v5::StarOr::Star => v6::IndexUidPattern::all(), + v5::StarOr::Other(uid) => v6::IndexUidPattern::new_unchecked(uid.as_str()), }) .collect(), expires_at: key.expires_at, diff --git a/dump/src/reader/v6/mod.rs b/dump/src/reader/v6/mod.rs index 77d7a52bc..f0ad81116 100644 --- a/dump/src/reader/v6/mod.rs +++ b/dump/src/reader/v6/mod.rs @@ -34,7 +34,6 @@ pub type PaginationSettings = meilisearch_types::settings::PaginationSettings; // everything related to the api keys pub type Action = meilisearch_types::keys::Action; -pub type StarOr = meilisearch_types::star_or::StarOr; pub type IndexUidPattern = meilisearch_types::index_uid_pattern::IndexUidPattern; // everything related to the errors diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 0b9e856d2..3599ac36a 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -43,6 +43,7 @@ use file_store::FileStore; use meilisearch_types::error::ResponseError; use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str}; use meilisearch_types::heed::{self, Database, Env, RoTxn}; +use meilisearch_types::index_uid_pattern::IndexUidPattern; use meilisearch_types::milli; use meilisearch_types::milli::documents::DocumentsBatchBuilder; use meilisearch_types::milli::update::IndexerConfig; @@ -617,7 +618,7 @@ impl IndexScheduler { &self, rtxn: &RoTxn, query: &Query, - authorized_indexes: &Option>, + authorized_indexes: &Option>, ) -> Result { let mut tasks = self.get_task_ids(rtxn, query)?; @@ -635,7 +636,7 @@ impl IndexScheduler { let all_indexes_iter = self.index_tasks.iter(rtxn)?; for result in all_indexes_iter { let (index, index_tasks) = result?; - if !authorized_indexes.contains(&index.to_owned()) { + if !authorized_indexes.iter().any(|p| p.matches_str(index)) { tasks -= index_tasks; } } @@ -655,7 +656,7 @@ impl IndexScheduler { pub fn get_tasks_from_authorized_indexes( &self, query: Query, - authorized_indexes: Option>, + authorized_indexes: Option>, ) -> Result> { let rtxn = self.env.read_txn()?; @@ -2503,7 +2504,11 @@ mod tests { let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() }; let tasks = index_scheduler - .get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()])) + .get_task_ids_from_authorized_indexes( + &rtxn, + &query, + &Some(vec![IndexUidPattern::new_unchecked("doggo")]), + ) .unwrap(); // we have asked for only the tasks associated with catto, but are only authorized to retrieve the tasks // associated with doggo -> empty result @@ -2511,7 +2516,11 @@ mod tests { let query = Query::default(); let tasks = index_scheduler - .get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()])) + .get_task_ids_from_authorized_indexes( + &rtxn, + &query, + &Some(vec![IndexUidPattern::new_unchecked("doggo")]), + ) .unwrap(); // we asked for all the tasks, but we are only authorized to retrieve the doggo tasks // -> only the index creation of doggo should be returned @@ -2522,7 +2531,10 @@ mod tests { .get_task_ids_from_authorized_indexes( &rtxn, &query, - &Some(vec!["catto".to_owned(), "doggo".to_owned()]), + &Some(vec![ + IndexUidPattern::new_unchecked("catto"), + IndexUidPattern::new_unchecked("doggo"), + ]), ) .unwrap(); // we asked for all the tasks, but we are only authorized to retrieve the doggo and catto tasks @@ -2570,7 +2582,11 @@ mod tests { let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() }; let tasks = index_scheduler - .get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_string()])) + .get_task_ids_from_authorized_indexes( + &rtxn, + &query, + &Some(vec![IndexUidPattern::new_unchecked("doggo")]), + ) .unwrap(); // Return only 1 because the user is not authorized to see task 2 snapshot!(snapshot_bitmap(&tasks), @"[1,]"); diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 287dda0ce..6b8b6ef9e 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -3,7 +3,6 @@ pub mod error; mod store; use std::collections::{HashMap, HashSet}; -use std::ops::Deref; use std::path::Path; use std::sync::Arc; @@ -11,7 +10,6 @@ use error::{AuthControllerError, Result}; use maplit::hashset; use meilisearch_types::index_uid_pattern::IndexUidPattern; use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey}; -use meilisearch_types::star_or::StarOr; use serde::{Deserialize, Serialize}; pub use store::open_auth_store_env; use store::{generate_key_as_hexa, HeedAuthStore}; @@ -192,7 +190,7 @@ impl SearchRules { pub fn get_index_search_rules(&self, index: &str) -> Option { match self { - Self::Set(set) => { + Self::Set(_) => { if self.is_index_authorized(index) { Some(IndexSearchRules::default()) } else { diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index d1c2562c1..79a1631a4 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -14,7 +14,6 @@ use meilisearch_types::keys::KeyId; use meilisearch_types::milli; use meilisearch_types::milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; use meilisearch_types::milli::heed::{Database, Env, EnvOpenOptions, RwTxn}; -use meilisearch_types::star_or::StarOr; use sha2::Sha256; use time::OffsetDateTime; use uuid::fmt::Hyphenated; @@ -126,7 +125,7 @@ impl HeedAuthStore { } } - let no_index_restriction = key.indexes.contains(&StarOr::Star); + let no_index_restriction = key.indexes.iter().any(|p| p.matches_all()); for action in actions { if no_index_restriction { // If there is no index restriction we put None. diff --git a/meilisearch-types/src/index_uid_pattern.rs b/meilisearch-types/src/index_uid_pattern.rs index bc12da351..88e0292f2 100644 --- a/meilisearch-types/src/index_uid_pattern.rs +++ b/meilisearch-types/src/index_uid_pattern.rs @@ -26,6 +26,11 @@ impl IndexUidPattern { IndexUidPattern::from_str("*").unwrap() } + /// Returns `true` if it matches any index. + pub fn matches_all(&self) -> bool { + self.0 == "*" + } + /// Returns `true` if the pattern matches a specific index name. pub fn is_exact(&self) -> bool { !self.0.ends_with('*') diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index a9e2e0889..f594640d9 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -13,7 +13,6 @@ use uuid::Uuid; use crate::error::deserr_codes::*; use crate::error::{unwrap_any, Code, DeserrError, ErrorCode, TakeErrorMessage}; use crate::index_uid_pattern::{IndexUidPattern, IndexUidPatternFormatError}; -use crate::star_or::StarOr; pub type KeyId = Uuid; diff --git a/meilisearch/src/extractors/authentication/mod.rs b/meilisearch/src/extractors/authentication/mod.rs index f1efdf9aa..4836679a9 100644 --- a/meilisearch/src/extractors/authentication/mod.rs +++ b/meilisearch/src/extractors/authentication/mod.rs @@ -199,6 +199,9 @@ pub mod policies { token: &str, index: Option<&str>, ) -> Option { + // Tenant token will always define an index. + let index = index?; + // Only search action can be accessed by a tenant token. if A != actions::SEARCH { return None; @@ -206,7 +209,7 @@ pub mod policies { let uid = extract_key_id(token)?; // check if parent key is authorized to do the action. - if auth.is_key_authorized(uid, Action::Search, index).ok()? { + if auth.is_key_authorized(uid, Action::Search, Some(index)).ok()? { // Check if tenant token is valid. let key = auth.generate_key(uid)?; let data = decode::( @@ -217,10 +220,8 @@ pub mod policies { .ok()?; // Check index access if an index restriction is provided. - if let Some(index) = index { - if !data.claims.search_rules.is_index_authorized(index) { - return None; - } + if !data.claims.search_rules.is_index_authorized(index) { + return None; } // Check if token is expired. @@ -230,10 +231,10 @@ pub mod policies { } } - match auth.get_key_filters(uid, Some(data.claims.search_rules)) { - Ok(auth) if auth.search_rules.is_index_authorized() => Some(auth), + return match auth.get_key_filters(uid, Some(data.claims.search_rules)) { + Ok(auth) if auth.search_rules.is_index_authorized(index) => Some(auth), _ => None, - } + }; } None From 5c525168a08ec6a7194cc9897a9720adc55eb026 Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Fri, 28 Oct 2022 13:42:21 +0200 Subject: [PATCH 103/186] Add _geoBoundingBox parser --- filter-parser/src/lib.rs | 51 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 04037d061..65c4c56f8 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -18,6 +18,7 @@ //! doubleQuoted = "\"" .* all but double quotes "\"" //! word = (alphanumeric | _ | - | .)+ //! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")" +//! geoBoundingBox = "_geoBoundingBox((" WS * float WS* "," WS* float WS* "), (" WS* float WS* "," WS* float WS* ")") //! ``` //! //! Other BNF grammar used to handle some specific errors: @@ -130,6 +131,7 @@ pub enum FilterCondition<'a> { Or(Vec), And(Vec), GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> }, + GeoBoundingBox { top_left_point: [Token<'a>; 2], bottom_right_point: [Token<'a>; 2]}, } impl<'a> FilterCondition<'a> { @@ -325,6 +327,49 @@ fn parse_geo_radius(input: Span) -> IResult { Ok((input, res)) } +/// geoBoundingBox = WS* "_geoBoundingBox((float WS* "," WS* float WS* "), (float WS* "," WS* float WS* ")") +/// If we parse `_geoBoundingBox` we MUST parse the rest of the expression. +fn parse_geo_bounding_box(input: Span) -> IResult { + // we want to allow space BEFORE the _geoBoundingBox but not after + let parsed = preceded( + tuple((multispace0, word_exact("_geoBoundingBox"))), + // if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure + cut( + delimited( + char('('), + separated_list1( + tag(","), + ws( + delimited( + char('('), + separated_list1( + tag(","), + ws(recognize_float) + ), + char(')') + ) + ) + ), + char(')') + ) + ), + )(input) + .map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::Geo))); + + let (input, args) = parsed?; + + if args.len() != 2 { + return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::Geo))); + } + + //TODO: Check sub array length + let res = FilterCondition::GeoBoundingBox { + top_left_point: [args[0][0].into(), args[0][1].into()], + bottom_right_point: [args[1][0].into(), args[1][1].into()] + }; + Ok((input, res)) +} + /// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float) fn parse_geo_point(input: Span) -> IResult { // we want to forbid space BEFORE the _geoPoint but not after @@ -367,6 +412,7 @@ fn parse_primary(input: Span, depth: usize) -> IResult { }), ), parse_geo_radius, + parse_geo_bounding_box, parse_in, parse_not_in, parse_condition, @@ -512,7 +558,7 @@ pub mod tests { insta::assert_display_snapshot!(p("channel = "), @r###" Was expecting a value but instead got nothing. - 14:14 channel = + 14:14 channel = "###); insta::assert_display_snapshot!(p("channel = 🐻"), @r###" @@ -715,6 +761,9 @@ impl<'a> std::fmt::Display for FilterCondition<'a> { FilterCondition::GeoLowerThan { point, radius } => { write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius) } + FilterCondition::GeoBoundingBox { top_left_point, bottom_right_point } => { + write!(f, "_geoBoundingBox(({}, {}), ({}, {}))", top_left_point[0], top_left_point[1], bottom_right_point[0], bottom_right_point[1]) + } } } } From b078477d806a6e076b273802653daafb7b598b46 Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Fri, 28 Oct 2022 15:30:53 +0200 Subject: [PATCH 104/186] Add error handling and earth lap collision with bounding box --- filter-parser/src/error.rs | 16 +++-- filter-parser/src/lib.rs | 55 ++++++++-------- filter-parser/src/value.rs | 18 ++++-- milli/src/asc_desc.rs | 3 + milli/src/criterion.rs | 2 + milli/src/search/facet/filter.rs | 108 +++++++++++++++++++++++++++++++ 6 files changed, 165 insertions(+), 37 deletions(-) diff --git a/filter-parser/src/error.rs b/filter-parser/src/error.rs index ea95caba7..aaf1a2e36 100644 --- a/filter-parser/src/error.rs +++ b/filter-parser/src/error.rs @@ -57,8 +57,10 @@ pub enum ExpectedValueKind { #[derive(Debug)] pub enum ErrorKind<'a> { ReservedGeo(&'a str), - Geo, - MisusedGeo, + GeoRadius, + GeoBoundingBox, + MisusedGeoRadius, + MisusedGeoBoundingBox, InvalidPrimary, ExpectedEof, ExpectedValue(ExpectedValueKind), @@ -150,15 +152,21 @@ impl<'a> Display for Error<'a> { ErrorKind::ExpectedEof => { writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)? } - ErrorKind::Geo => { + ErrorKind::GeoRadius => { writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")? } + ErrorKind::GeoBoundingBox => { + writeln!(f, "The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`.")? + } ErrorKind::ReservedGeo(name) => { writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance) built-in rule to filter on `_geo` coordinates.", name.escape_debug())? } - ErrorKind::MisusedGeo => { + ErrorKind::MisusedGeoRadius => { writeln!(f, "The `_geoRadius` filter is an operation and can't be used as a value.")? } + ErrorKind::MisusedGeoBoundingBox => { + writeln!(f, "The `_geoBoundingBox` filter is an operation and can't be used as a value.")? + } ErrorKind::ReservedKeyword(word) => { writeln!(f, "`{word}` is a reserved keyword and thus cannot be used as a field name unless it is put inside quotes. Use \"{word}\" or \'{word}\' instead.")? } diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 65c4c56f8..89e80a267 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -45,6 +45,7 @@ mod error; mod value; use std::fmt::Debug; +use std::str::FromStr; pub use condition::{parse_condition, parse_to, Condition}; use condition::{parse_exists, parse_not_exists}; @@ -71,7 +72,7 @@ const MAX_FILTER_DEPTH: usize = 200; #[derive(Debug, Clone, Eq)] pub struct Token<'a> { /// The token in the original input, it should be used when possible. - span: Span<'a>, + pub span: Span<'a>, /// If you need to modify the original input you can use the `value` field /// to store your modified input. value: Option, @@ -101,7 +102,7 @@ impl<'a> Token<'a> { } pub fn parse_finite_float(&self) -> Result { - let value: f64 = self.span.parse().map_err(|e| self.as_external_error(e))?; + let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?; if value.is_finite() { Ok(value) } else { @@ -131,7 +132,7 @@ pub enum FilterCondition<'a> { Or(Vec), And(Vec), GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> }, - GeoBoundingBox { top_left_point: [Token<'a>; 2], bottom_right_point: [Token<'a>; 2]}, + GeoBoundingBox { top_left_point: [Token<'a>; 2], bottom_right_point: [Token<'a>; 2] }, } impl<'a> FilterCondition<'a> { @@ -312,12 +313,12 @@ fn parse_geo_radius(input: Span) -> IResult { // if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure cut(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))), )(input) - .map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::Geo))); + .map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoRadius))); let (input, args) = parsed?; if args.len() != 3 { - return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::Geo))); + return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoRadius))); } let res = FilterCondition::GeoLowerThan { @@ -334,38 +335,27 @@ fn parse_geo_bounding_box(input: Span) -> IResult { let parsed = preceded( tuple((multispace0, word_exact("_geoBoundingBox"))), // if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure - cut( - delimited( - char('('), - separated_list1( - tag(","), - ws( - delimited( - char('('), - separated_list1( - tag(","), - ws(recognize_float) - ), - char(')') - ) - ) - ), - char(')') - ) - ), + cut(delimited( + char('('), + separated_list1( + tag(","), + ws(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))), + ), + char(')'), + )), )(input) - .map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::Geo))); + .map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoBoundingBox))); let (input, args) = parsed?; - if args.len() != 2 { - return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::Geo))); + if args.len() != 2 || args[0].len() != 2 || args[1].len() != 2 { + return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoBoundingBox))); } //TODO: Check sub array length let res = FilterCondition::GeoBoundingBox { top_left_point: [args[0][0].into(), args[0][1].into()], - bottom_right_point: [args[1][0].into(), args[1][1].into()] + bottom_right_point: [args[1][0].into(), args[1][1].into()], }; Ok((input, res)) } @@ -762,7 +752,14 @@ impl<'a> std::fmt::Display for FilterCondition<'a> { write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius) } FilterCondition::GeoBoundingBox { top_left_point, bottom_right_point } => { - write!(f, "_geoBoundingBox(({}, {}), ({}, {}))", top_left_point[0], top_left_point[1], bottom_right_point[0], bottom_right_point[1]) + write!( + f, + "_geoBoundingBox(({}, {}), ({}, {}))", + top_left_point[0], + top_left_point[1], + bottom_right_point[0], + bottom_right_point[1] + ) } } } diff --git a/filter-parser/src/value.rs b/filter-parser/src/value.rs index 73ef61480..abdc10439 100644 --- a/filter-parser/src/value.rs +++ b/filter-parser/src/value.rs @@ -6,7 +6,7 @@ use nom::sequence::{delimited, terminated}; use nom::{InputIter, InputLength, InputTake, Slice}; use crate::error::{ExpectedValueKind, NomErrorExt}; -use crate::{parse_geo_point, parse_geo_radius, Error, ErrorKind, IResult, Span, Token}; +use crate::{parse_geo_point, parse_geo_radius, parse_geo_bounding_box, Error, ErrorKind, IResult, Span, Token}; /// This function goes through all characters in the [Span] if it finds any escaped character (`\`). /// It generates a new string with all `\` removed from the [Span]. @@ -91,11 +91,21 @@ pub fn parse_value(input: Span) -> IResult { } } match parse_geo_radius(input) { - Ok(_) => return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeo))), + Ok(_) => return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius))), // if we encountered a failure it means the user badly wrote a _geoRadius filter. // But instead of showing him how to fix his syntax we are going to tell him he should not use this filter as a value. Err(e) if e.is_failure() => { - return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeo))) + return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius))) + } + _ => (), + } + + match parse_geo_bounding_box(input) { + Ok(_) => return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoBoundingBox))), + // if we encountered a failure it means the user badly wrote a _geoBoundingBox filter. + // But instead of showing him how to fix his syntax we are going to tell him he should not use this filter as a value. + Err(e) if e.is_failure() => { + return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoBoundingBox))) } _ => (), } @@ -155,7 +165,7 @@ fn is_syntax_component(c: char) -> bool { } fn is_keyword(s: &str) -> bool { - matches!(s, "AND" | "OR" | "IN" | "NOT" | "TO" | "EXISTS" | "_geoRadius") + matches!(s, "AND" | "OR" | "IN" | "NOT" | "TO" | "EXISTS" | "_geoRadius" | "_geoBoundingBox") } #[cfg(test)] diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index 21065da36..826290c8a 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -55,6 +55,9 @@ impl From for CriterionError { AscDescError::ReservedKeyword { name } if name.starts_with("_geoRadius") => { CriterionError::ReservedNameForFilter { name: "_geoRadius".to_string() } } + AscDescError::ReservedKeyword { name } if name.starts_with("_geoBoundingBox") => { + CriterionError::ReservedNameForFilter { name: "_geoBoundingBox".to_string() } + } AscDescError::ReservedKeyword { name } => CriterionError::ReservedName { name }, } } diff --git a/milli/src/criterion.rs b/milli/src/criterion.rs index c02cd2525..4544a97ac 100644 --- a/milli/src/criterion.rs +++ b/milli/src/criterion.rs @@ -159,6 +159,8 @@ mod tests { ("_geoPoint(42, 75):asc", ReservedNameForSort { name: S("_geoPoint") }), ("_geoRadius:asc", ReservedNameForFilter { name: S("_geoRadius") }), ("_geoRadius(42, 75, 59):asc", ReservedNameForFilter { name: S("_geoRadius") }), + ("_geoBoundingBox:asc", ReservedNameForFilter { name: S("_geoBoundingBox") }), + ("_geoBoundinxBox((42, 75), (75, 59)):asc", ReservedNameForFilter { name: S("_geoBoundingBox") }), ]; for (input, expected) in invalid_criteria { diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 23cbb280c..b44db29e4 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -385,6 +385,114 @@ impl<'a> Filter<'a> { }))? } } + FilterCondition::GeoBoundingBox { top_left_point, bottom_right_point } => { + if filterable_fields.contains("_geo") { + let top_left: [f64; 2] = [ + top_left_point[0].parse_finite_float()?, + top_left_point[1].parse_finite_float()?, + ]; + let bottom_right: [f64; 2] = [ + bottom_right_point[0].parse_finite_float()?, + bottom_right_point[1].parse_finite_float()?, + ]; + if !(-90.0..=90.0).contains(&top_left[0]) { + return Err(top_left_point[0] + .as_external_error(FilterError::BadGeoLat(top_left[0])))?; + } + if !(-180.0..=180.0).contains(&top_left[1]) { + return Err(top_left_point[1] + .as_external_error(FilterError::BadGeoLng(top_left[1])))?; + } + if !(-90.0..=90.0).contains(&bottom_right[0]) { + return Err(bottom_right_point[0] + .as_external_error(FilterError::BadGeoLat(bottom_right[0])))?; + } + if !(-180.0..=180.0).contains(&bottom_right[1]) { + return Err(bottom_right_point[1] + .as_external_error(FilterError::BadGeoLng(bottom_right[1])))?; + } + + let geo_lat_token = + Token::new(top_left_point[0].span, Some("_geo.lat".to_string())); + + let condition_lat = FilterCondition::Condition { + fid: geo_lat_token, + op: Condition::Between { + from: bottom_right_point[0].clone(), + to: top_left_point[0].clone(), + }, + }; + + let selected_lat = Filter { condition: condition_lat }.inner_evaluate( + rtxn, + index, + filterable_fields, + )?; + + let geo_lng_token = + Token::new(top_left_point[1].span, Some("_geo.lng".to_string())); + let min_lng_token = + Token::new(top_left_point[1].span, Some("-180.0".to_string())); + let max_lng_token = + Token::new(top_left_point[1].span, Some("180.0".to_string())); + + let selected_lng = if top_left[1] > bottom_right[1] { + dbg!("test"); + + let condition_left = FilterCondition::Condition { + fid: geo_lng_token.clone(), + op: Condition::Between { + from: dbg!(top_left_point[1].clone()), + to: max_lng_token, + }, + }; + let left = Filter { condition: condition_left }.inner_evaluate( + rtxn, + index, + filterable_fields, + )?; + + let condition_right = FilterCondition::Condition { + fid: geo_lng_token, + op: Condition::Between { + from: dbg!(min_lng_token), + to: dbg!(bottom_right_point[1].clone()), + }, + }; + let right = Filter { condition: condition_right }.inner_evaluate( + rtxn, + index, + filterable_fields, + )?; + + dbg!(&left); + dbg!(&right); + dbg!(left | right) + } else { + let condition_lng = FilterCondition::Condition { + fid: geo_lng_token, + op: Condition::Between { + from: top_left_point[1].clone(), + to: bottom_right_point[1].clone(), + }, + }; + Filter { condition: condition_lng }.inner_evaluate( + rtxn, + index, + filterable_fields, + )? + }; + + dbg!(&selected_lng); + + Ok(selected_lat & selected_lng) + } else { + Err(top_left_point[0].as_external_error(FilterError::AttributeNotFilterable { + attribute: "_geo", + filterable_fields: filterable_fields.clone(), + }))? + } + } } } } From 426d63b01bb0e282bcffce518909416314d7e1ef Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Fri, 28 Oct 2022 18:11:11 +0200 Subject: [PATCH 105/186] Update insta test suite --- filter-parser/src/error.rs | 6 +++--- filter-parser/src/lib.rs | 34 +++++++++++++++++++++++------- meilisearch/tests/search/errors.rs | 4 ++-- milli/src/asc_desc.rs | 3 +++ 4 files changed, 34 insertions(+), 13 deletions(-) diff --git a/filter-parser/src/error.rs b/filter-parser/src/error.rs index aaf1a2e36..70018c3d9 100644 --- a/filter-parser/src/error.rs +++ b/filter-parser/src/error.rs @@ -144,10 +144,10 @@ impl<'a> Display for Error<'a> { writeln!(f, "Expression `{}` is missing the following closing delimiter: `{}`.", escaped_input, c)? } ErrorKind::InvalidPrimary if input.trim().is_empty() => { - writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` but instead got nothing.")? + writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.")? } ErrorKind::InvalidPrimary => { - writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `{}`.", escaped_input)? + writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `{}`.", escaped_input)? } ErrorKind::ExpectedEof => { writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)? @@ -159,7 +159,7 @@ impl<'a> Display for Error<'a> { writeln!(f, "The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`.")? } ErrorKind::ReservedGeo(name) => { - writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance) built-in rule to filter on `_geo` coordinates.", name.escape_debug())? + writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox((latitude, longitude), (latitude, longitude)) built-in rules to filter on `_geo` coordinates.", name.escape_debug())? } ErrorKind::MisusedGeoRadius => { writeln!(f, "The `_geoRadius` filter is an operation and can't be used as a value.")? diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 89e80a267..6274964bd 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -45,7 +45,6 @@ mod error; mod value; use std::fmt::Debug; -use std::str::FromStr; pub use condition::{parse_condition, parse_to, Condition}; use condition::{parse_exists, parse_not_exists}; @@ -505,6 +504,10 @@ pub mod tests { insta::assert_display_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})"); insta::assert_display_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))"); + // Test geo bounding box + insta::assert_display_snapshot!(p("_geoBoundingBox((12, 13), (14, 15))"), @"_geoBoundingBox(({12}, {13}), ({14}, {15}))"); + insta::assert_display_snapshot!(p("NOT _geoBoundingBox((12, 13), (14, 15))"), @"NOT (_geoBoundingBox(({12}, {13}), ({14}, {15})))"); + // Test OR + AND insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]"); insta::assert_display_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]"); @@ -562,7 +565,7 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("'OR'"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `\'OR\'`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`. 1:5 'OR' "###); @@ -572,12 +575,12 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("channel Ponce"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `channel Ponce`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`. 1:14 channel Ponce "###); insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` but instead got nothing. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing. 19:19 channel = Ponce OR "###); @@ -591,13 +594,28 @@ pub mod tests { 1:16 _geoRadius = 12 "###); + insta::assert_display_snapshot!(p("_geoBoundingBox"), @r###" + The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + 1:16 _geoBoundingBox + "###); + + insta::assert_display_snapshot!(p("_geoBoundingBox = 12"), @r###" + The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + 1:21 _geoBoundingBox = 12 + "###); + + insta::assert_display_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###" + The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + 1:26 _geoBoundingBox(1.0, 1.0) + "###); + insta::assert_display_snapshot!(p("_geoPoint(12, 13, 14)"), @r###" - `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance) built-in rule to filter on `_geo` coordinates. + `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox((latitude, longitude), (latitude, longitude)) built-in rules to filter on `_geo` coordinates. 1:22 _geoPoint(12, 13, 14) "###); insta::assert_display_snapshot!(p("position <= _geoPoint(12, 13, 14)"), @r###" - `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance) built-in rule to filter on `_geo` coordinates. + `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox((latitude, longitude), (latitude, longitude)) built-in rules to filter on `_geo` coordinates. 13:34 position <= _geoPoint(12, 13, 14) "###); @@ -627,12 +645,12 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `colour NOT EXIST`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`. 1:17 colour NOT EXIST "###); insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `subscribers 100 TO1000`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`. 1:23 subscribers 100 TO1000 "###); diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index d582a3672..2c02dc0a3 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -415,7 +415,7 @@ async fn filter_invalid_syntax_object() { index.wait_task(1).await; let expected_response = json!({ - "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass", + "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-filter" @@ -440,7 +440,7 @@ async fn filter_invalid_syntax_array() { index.wait_task(1).await; let expected_response = json!({ - "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass", + "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-filter" diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index 826290c8a..a460be503 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -193,6 +193,9 @@ impl From for SortError { AscDescError::ReservedKeyword { name } if name.starts_with("_geoRadius") => { SortError::ReservedNameForFilter { name: String::from("_geoRadius") } } + AscDescError::ReservedKeyword { name } if name.starts_with("_geoBoundingBox") => { + SortError::ReservedNameForFilter { name: String::from("_geoBoundingBox") } + } AscDescError::ReservedKeyword { name } => SortError::ReservedName { name }, } } From 65a3086cf11461054e9eb0de5d5d31dfd80c3f9e Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Fri, 28 Oct 2022 18:22:26 +0200 Subject: [PATCH 106/186] fix test --- milli/src/asc_desc.rs | 2 +- milli/src/criterion.rs | 2 +- milli/src/error.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index a460be503..444819c34 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -92,7 +92,7 @@ impl FromStr for Member { Ok(Member::Geo([lat, lng])) } None => { - if is_reserved_keyword(text) || text.starts_with("_geoRadius(") { + if is_reserved_keyword(text) || text.starts_with("_geoRadius(") || text.starts_with("_geoBoundingBox(") { return Err(AscDescError::ReservedKeyword { name: text.to_string() })?; } Ok(Member::Field(text.to_string())) diff --git a/milli/src/criterion.rs b/milli/src/criterion.rs index 4544a97ac..23e2ac7f1 100644 --- a/milli/src/criterion.rs +++ b/milli/src/criterion.rs @@ -160,7 +160,7 @@ mod tests { ("_geoRadius:asc", ReservedNameForFilter { name: S("_geoRadius") }), ("_geoRadius(42, 75, 59):asc", ReservedNameForFilter { name: S("_geoRadius") }), ("_geoBoundingBox:asc", ReservedNameForFilter { name: S("_geoBoundingBox") }), - ("_geoBoundinxBox((42, 75), (75, 59)):asc", ReservedNameForFilter { name: S("_geoBoundingBox") }), + ("_geoBoundingBox((42, 75), (75, 59)):asc", ReservedNameForFilter { name: S("_geoBoundingBox") }), ]; for (input, expected) in invalid_criteria { diff --git a/milli/src/error.rs b/milli/src/error.rs index 8734cb540..92c238814 100644 --- a/milli/src/error.rs +++ b/milli/src/error.rs @@ -11,7 +11,7 @@ use crate::documents::{self, DocumentsBatchCursorError}; use crate::{CriterionError, DocumentId, FieldId, Object, SortError}; pub fn is_reserved_keyword(keyword: &str) -> bool { - ["_geo", "_geoDistance", "_geoPoint", "_geoRadius"].contains(&keyword) + ["_geo", "_geoDistance", "_geoPoint", "_geoRadius", "_geoBoundingBox"].contains(&keyword) } #[derive(Error, Debug)] From b2054d3f6c211cc19c6260a93c7b3a40260f3890 Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Fri, 28 Oct 2022 18:27:43 +0200 Subject: [PATCH 107/186] Add insta test on geo filters whitespacing --- filter-parser/src/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 6274964bd..d30c1b58e 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -503,10 +503,12 @@ pub mod tests { // Test geo radius insta::assert_display_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})"); insta::assert_display_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))"); + insta::assert_display_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})"); // Test geo bounding box insta::assert_display_snapshot!(p("_geoBoundingBox((12, 13), (14, 15))"), @"_geoBoundingBox(({12}, {13}), ({14}, {15}))"); insta::assert_display_snapshot!(p("NOT _geoBoundingBox((12, 13), (14, 15))"), @"NOT (_geoBoundingBox(({12}, {13}), ({14}, {15})))"); + insta::assert_display_snapshot!(p("_geoBoundingBox((12,13),(14,15))"), @"_geoBoundingBox(({12}, {13}), ({14}, {15}))"); // Test OR + AND insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]"); From 0d71c80ba69a3a0e3758d8df63009eca632a7425 Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Fri, 28 Oct 2022 19:01:23 +0200 Subject: [PATCH 108/186] add tests --- milli/src/search/facet/filter.rs | 112 ++++++++++++++++++++++++++++--- 1 file changed, 102 insertions(+), 10 deletions(-) diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index b44db29e4..15edb1249 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -437,12 +437,10 @@ impl<'a> Filter<'a> { Token::new(top_left_point[1].span, Some("180.0".to_string())); let selected_lng = if top_left[1] > bottom_right[1] { - dbg!("test"); - let condition_left = FilterCondition::Condition { fid: geo_lng_token.clone(), op: Condition::Between { - from: dbg!(top_left_point[1].clone()), + from: top_left_point[1].clone(), to: max_lng_token, }, }; @@ -455,8 +453,8 @@ impl<'a> Filter<'a> { let condition_right = FilterCondition::Condition { fid: geo_lng_token, op: Condition::Between { - from: dbg!(min_lng_token), - to: dbg!(bottom_right_point[1].clone()), + from: min_lng_token, + to: bottom_right_point[1].clone(), }, }; let right = Filter { condition: condition_right }.inner_evaluate( @@ -465,9 +463,7 @@ impl<'a> Filter<'a> { filterable_fields, )?; - dbg!(&left); - dbg!(&right); - dbg!(left | right) + left | right } else { let condition_lng = FilterCondition::Condition { fid: geo_lng_token, @@ -483,8 +479,6 @@ impl<'a> Filter<'a> { )? }; - dbg!(&selected_lng); - Ok(selected_lat & selected_lng) } else { Err(top_left_point[0].as_external_error(FilterError::AttributeNotFilterable { @@ -610,6 +604,12 @@ mod tests { "Attribute `_geo` is not filterable. This index does not have configured filterable attributes." )); + let filter = Filter::from_str("_geoBoundingBox((42, 150), (30, 10))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!(error.to_string().starts_with( + "Attribute `_geo` is not filterable. This index does not have configured filterable attributes." + )); + let filter = Filter::from_str("dog = \"bernese mountain\"").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().starts_with( @@ -632,6 +632,12 @@ mod tests { "Attribute `_geo` is not filterable. Available filterable attributes are: `title`." )); + let filter = Filter::from_str("_geoBoundingBox((42, 150), (30, 10))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!(error.to_string().starts_with( + "Attribute `_geo` is not filterable. Available filterable attributes are: `title`." + )); + let filter = Filter::from_str("name = 12").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().starts_with( @@ -783,6 +789,92 @@ mod tests { )); } + #[test] + fn geo_bounding_box_error() { + let index = TempIndex::new(); + + index + .update_settings(|settings| { + settings.set_searchable_fields(vec![S("_geo"), S("price")]); // to keep the fields order + settings.set_filterable_fields(hashset! { S("_geo"), S("price") }); + }) + .unwrap(); + + let rtxn = index.read_txn().unwrap(); + + // geoboundingbox top left coord have a bad latitude + let filter = + Filter::from_str("_geoBoundingBox((-90.0000001, 150), (30, 10))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!( + error.to_string().starts_with( + "Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees." + ), + "{}", + error.to_string() + ); + + // geoboundingbox top left coord have a bad latitude + let filter = + Filter::from_str("_geoBoundingBox((90.0000001, 150), (30, 10))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!( + error.to_string().starts_with( + "Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees." + ), + "{}", + error.to_string() + ); + + // geoboundingbox bottom right coord have a bad latitude + let filter = + Filter::from_str("_geoBoundingBox((30, 10), (-90.0000001, 150))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!(error.to_string().contains( + "Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees." + )); + + // geoboundingbox bottom right coord have a bad latitude + let filter = + Filter::from_str("_geoBoundingBox((30, 10), (90.0000001, 150))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!(error.to_string().contains( + "Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees." + )); + + // geoboundingbox top left coord have a bad longitude + let filter = + Filter::from_str("_geoBoundingBox((-10, 180.000001), (30, 10))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!(error.to_string().contains( + "Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees." + )); + + // geoboundingbox top left coord have a bad longitude + let filter = + Filter::from_str("_geoBoundingBox((-10, -180.000001), (30, 10))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!(error.to_string().contains( + "Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees." + )); + + // geoboundingbox bottom right coord have a bad longitude + let filter = + Filter::from_str("_geoBoundingBox((30, 10), (-10, -180.000001))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!(error.to_string().contains( + "Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees." + )); + + // geoboundingbox bottom right coord have a bad longitude + let filter = + Filter::from_str("_geoBoundingBox((30, 10), (-10, 180.000001))").unwrap().unwrap(); + let error = filter.evaluate(&rtxn, &index).unwrap_err(); + assert!(error.to_string().contains( + "Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees." + )); + } + #[test] fn filter_depth() { // generates a big (2 MiB) filter with too much of ORs. From b297b5deb0b0f93d89e4b1bb3a535770e84c5e8d Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Fri, 28 Oct 2022 19:10:58 +0200 Subject: [PATCH 109/186] cargo fmt --- filter-parser/src/value.rs | 21 +++++++++++++++++---- milli/src/asc_desc.rs | 5 ++++- milli/src/criterion.rs | 5 ++++- 3 files changed, 25 insertions(+), 6 deletions(-) diff --git a/filter-parser/src/value.rs b/filter-parser/src/value.rs index abdc10439..d08a12a92 100644 --- a/filter-parser/src/value.rs +++ b/filter-parser/src/value.rs @@ -6,7 +6,10 @@ use nom::sequence::{delimited, terminated}; use nom::{InputIter, InputLength, InputTake, Slice}; use crate::error::{ExpectedValueKind, NomErrorExt}; -use crate::{parse_geo_point, parse_geo_radius, parse_geo_bounding_box, Error, ErrorKind, IResult, Span, Token}; +use crate::{ + parse_geo_bounding_box, parse_geo_point, parse_geo_radius, Error, ErrorKind, IResult, Span, + Token, +}; /// This function goes through all characters in the [Span] if it finds any escaped character (`\`). /// It generates a new string with all `\` removed from the [Span]. @@ -91,7 +94,9 @@ pub fn parse_value(input: Span) -> IResult { } } match parse_geo_radius(input) { - Ok(_) => return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius))), + Ok(_) => { + return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius))) + } // if we encountered a failure it means the user badly wrote a _geoRadius filter. // But instead of showing him how to fix his syntax we are going to tell him he should not use this filter as a value. Err(e) if e.is_failure() => { @@ -101,11 +106,19 @@ pub fn parse_value(input: Span) -> IResult { } match parse_geo_bounding_box(input) { - Ok(_) => return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoBoundingBox))), + Ok(_) => { + return Err(nom::Err::Failure(Error::new_from_kind( + input, + ErrorKind::MisusedGeoBoundingBox, + ))) + } // if we encountered a failure it means the user badly wrote a _geoBoundingBox filter. // But instead of showing him how to fix his syntax we are going to tell him he should not use this filter as a value. Err(e) if e.is_failure() => { - return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoBoundingBox))) + return Err(nom::Err::Failure(Error::new_from_kind( + input, + ErrorKind::MisusedGeoBoundingBox, + ))) } _ => (), } diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index 444819c34..ebb28c27d 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -92,7 +92,10 @@ impl FromStr for Member { Ok(Member::Geo([lat, lng])) } None => { - if is_reserved_keyword(text) || text.starts_with("_geoRadius(") || text.starts_with("_geoBoundingBox(") { + if is_reserved_keyword(text) + || text.starts_with("_geoRadius(") + || text.starts_with("_geoBoundingBox(") + { return Err(AscDescError::ReservedKeyword { name: text.to_string() })?; } Ok(Member::Field(text.to_string())) diff --git a/milli/src/criterion.rs b/milli/src/criterion.rs index 23e2ac7f1..9a6e2be4a 100644 --- a/milli/src/criterion.rs +++ b/milli/src/criterion.rs @@ -160,7 +160,10 @@ mod tests { ("_geoRadius:asc", ReservedNameForFilter { name: S("_geoRadius") }), ("_geoRadius(42, 75, 59):asc", ReservedNameForFilter { name: S("_geoRadius") }), ("_geoBoundingBox:asc", ReservedNameForFilter { name: S("_geoBoundingBox") }), - ("_geoBoundingBox((42, 75), (75, 59)):asc", ReservedNameForFilter { name: S("_geoBoundingBox") }), + ( + "_geoBoundingBox((42, 75), (75, 59)):asc", + ReservedNameForFilter { name: S("_geoBoundingBox") }, + ), ]; for (input, expected) in invalid_criteria { From 2d66fdc8e91a8fdec18aff8d3dfddfbcb6fbc91a Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Tue, 1 Nov 2022 09:56:38 +0100 Subject: [PATCH 110/186] Apply review comments --- filter-parser/src/error.rs | 2 +- filter-parser/src/lib.rs | 1 - filter-parser/src/value.rs | 4 ++-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/filter-parser/src/error.rs b/filter-parser/src/error.rs index 70018c3d9..0e416091f 100644 --- a/filter-parser/src/error.rs +++ b/filter-parser/src/error.rs @@ -156,7 +156,7 @@ impl<'a> Display for Error<'a> { writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")? } ErrorKind::GeoBoundingBox => { - writeln!(f, "The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`.")? + writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`.")? } ErrorKind::ReservedGeo(name) => { writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox((latitude, longitude), (latitude, longitude)) built-in rules to filter on `_geo` coordinates.", name.escape_debug())? diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index d30c1b58e..3e1c8baea 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -351,7 +351,6 @@ fn parse_geo_bounding_box(input: Span) -> IResult { return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoBoundingBox))); } - //TODO: Check sub array length let res = FilterCondition::GeoBoundingBox { top_left_point: [args[0][0].into(), args[0][1].into()], bottom_right_point: [args[1][0].into(), args[1][1].into()], diff --git a/filter-parser/src/value.rs b/filter-parser/src/value.rs index d08a12a92..2296c0769 100644 --- a/filter-parser/src/value.rs +++ b/filter-parser/src/value.rs @@ -98,7 +98,7 @@ pub fn parse_value(input: Span) -> IResult { return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius))) } // if we encountered a failure it means the user badly wrote a _geoRadius filter. - // But instead of showing him how to fix his syntax we are going to tell him he should not use this filter as a value. + // But instead of showing them how to fix his syntax we are going to tell them they should not use this filter as a value. Err(e) if e.is_failure() => { return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius))) } @@ -113,7 +113,7 @@ pub fn parse_value(input: Span) -> IResult { ))) } // if we encountered a failure it means the user badly wrote a _geoBoundingBox filter. - // But instead of showing him how to fix his syntax we are going to tell him he should not use this filter as a value. + // But instead of showing them how to fix his syntax we are going to tell them they should not use this filter as a value. Err(e) if e.is_failure() => { return Err(nom::Err::Failure(Error::new_from_kind( input, From d80ce00623d7b1c1c3619b718cb2e4b79dc0e2f8 Mon Sep 17 00:00:00 2001 From: Guillaume Mourier Date: Tue, 1 Nov 2022 15:27:44 +0100 Subject: [PATCH 111/186] Update insta test --- filter-parser/src/lib.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 3e1c8baea..231050401 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -596,17 +596,17 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("_geoBoundingBox"), @r###" - The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. 1:16 _geoBoundingBox "###); insta::assert_display_snapshot!(p("_geoBoundingBox = 12"), @r###" - The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. 1:21 _geoBoundingBox = 12 "###); insta::assert_display_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###" - The `_geoBoundingBox` filter expects two pair of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. 1:26 _geoBoundingBox(1.0, 1.0) "###); From ae8660e585a7fc50515e61c9ff5b1e1806d7f9b7 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 2 Feb 2023 15:03:34 +0100 Subject: [PATCH 112/186] Add Token::original_span rather than making Token::span pub --- filter-parser/src/lib.rs | 7 ++++++- milli/src/search/facet/filter.rs | 8 ++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 231050401..3f6b81ea4 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -71,7 +71,7 @@ const MAX_FILTER_DEPTH: usize = 200; #[derive(Debug, Clone, Eq)] pub struct Token<'a> { /// The token in the original input, it should be used when possible. - pub span: Span<'a>, + span: Span<'a>, /// If you need to modify the original input you can use the `value` field /// to store your modified input. value: Option, @@ -100,6 +100,11 @@ impl<'a> Token<'a> { Error::new_from_external(self.span, error) } + /// Returns a copy of the span this token was created with. + pub fn original_span(&self) -> Span<'a> { + self.span + } + pub fn parse_finite_float(&self) -> Result { let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?; if value.is_finite() { diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 15edb1249..903f4fa94 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -413,7 +413,7 @@ impl<'a> Filter<'a> { } let geo_lat_token = - Token::new(top_left_point[0].span, Some("_geo.lat".to_string())); + Token::new(top_left_point[0].original_span(), Some("_geo.lat".to_string())); let condition_lat = FilterCondition::Condition { fid: geo_lat_token, @@ -430,11 +430,11 @@ impl<'a> Filter<'a> { )?; let geo_lng_token = - Token::new(top_left_point[1].span, Some("_geo.lng".to_string())); + Token::new(top_left_point[1].original_span(), Some("_geo.lng".to_string())); let min_lng_token = - Token::new(top_left_point[1].span, Some("-180.0".to_string())); + Token::new(top_left_point[1].original_span(), Some("-180.0".to_string())); let max_lng_token = - Token::new(top_left_point[1].span, Some("180.0".to_string())); + Token::new(top_left_point[1].original_span(), Some("180.0".to_string())); let selected_lng = if top_left[1] > bottom_right[1] { let condition_left = FilterCondition::Condition { From 781691191a49eb989f55b95a4c766365aca101db Mon Sep 17 00:00:00 2001 From: curquiza Date: Thu, 2 Feb 2023 15:22:58 +0100 Subject: [PATCH 113/186] Pin Rust version in Clippy job --- .github/workflows/rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index cec364eef..1752739bc 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -100,7 +100,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: 1.67.0 override: true components: clippy # - name: Cache dependencies From a11d992923a0654e547355b195592d9275db56f2 Mon Sep 17 00:00:00 2001 From: curquiza Date: Thu, 2 Feb 2023 15:33:38 +0100 Subject: [PATCH 114/186] Update issue description for the dependency updates --- .github/workflows/create-issue-dependencies.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/create-issue-dependencies.yml b/.github/workflows/create-issue-dependencies.yml index f6b9c018c..160ddc0a9 100644 --- a/.github/workflows/create-issue-dependencies.yml +++ b/.github/workflows/create-issue-dependencies.yml @@ -15,9 +15,13 @@ jobs: github_token: ${{ secrets.MEILI_BOT_GH_PAT }} title: Upgrade dependencies body: | - We need to update the dependencies of the Meilisearch repository, and, if possible, the dependencies of all the engine-team repositories that Meilisearch depends on (charabia, heed...). + This issue is about updating Meilisearch dependencies: + - [ ] Cargo toml dependencies of Meilisearch; but also the main engine-team repositories that Meilisearch depends on (charabia, heed...) + - [ ] If new Rust versions has been release, update the Rust version in the Clippy job of this [GitHub Action file](./.github/workflows/rust.yml) - ⚠️ This issue should only be done at the beginning of the sprint! + ⚠️ To avoid last minute bugs, this issue should only be done at the beginning of the sprint! + + The GitHub action dependencies are managed by [Dependabot](./.github/dependabot.yml) labels: | dependencies maintenance From 1ca7778e6ac46cdec814e27dd1849e8ae105037f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Thu, 2 Feb 2023 15:54:33 +0100 Subject: [PATCH 115/186] Update .github/workflows/create-issue-dependencies.yml Co-authored-by: Louis Dureuil --- .github/workflows/create-issue-dependencies.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-issue-dependencies.yml b/.github/workflows/create-issue-dependencies.yml index 160ddc0a9..006af6833 100644 --- a/.github/workflows/create-issue-dependencies.yml +++ b/.github/workflows/create-issue-dependencies.yml @@ -17,7 +17,7 @@ jobs: body: | This issue is about updating Meilisearch dependencies: - [ ] Cargo toml dependencies of Meilisearch; but also the main engine-team repositories that Meilisearch depends on (charabia, heed...) - - [ ] If new Rust versions has been release, update the Rust version in the Clippy job of this [GitHub Action file](./.github/workflows/rust.yml) + - [ ] If new Rust versions have been released, update the Rust version in the Clippy job of this [GitHub Action file](./.github/workflows/rust.yml) ⚠️ To avoid last minute bugs, this issue should only be done at the beginning of the sprint! From 69fcd3d05ed05b36babe59e5a1a26af519058b2d Mon Sep 17 00:00:00 2001 From: curquiza Date: Thu, 2 Feb 2023 15:58:03 +0100 Subject: [PATCH 116/186] Add comment information about the cron job --- .github/workflows/create-issue-dependencies.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/create-issue-dependencies.yml b/.github/workflows/create-issue-dependencies.yml index 006af6833..3ad1be910 100644 --- a/.github/workflows/create-issue-dependencies.yml +++ b/.github/workflows/create-issue-dependencies.yml @@ -1,6 +1,7 @@ name: Create issue to upgrade dependencies on: schedule: + # Run the first of the month, every 3 month - cron: '0 0 1 */3 *' workflow_dispatch: From fcb09ccc3d6ee35e7b23353a09e40878b6251748 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 2 Feb 2023 18:19:56 +0100 Subject: [PATCH 117/186] add tests on the geoBoundingBox --- milli/src/index.rs | 97 +++++++++++++++++++++++++++++++- milli/src/search/facet/filter.rs | 14 +++-- 2 files changed, 105 insertions(+), 6 deletions(-) diff --git a/milli/src/index.rs b/milli/src/index.rs index 31311d318..b166ab2d9 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -1206,7 +1206,7 @@ pub(crate) mod tests { self, DeleteDocuments, DeletionStrategy, IndexDocuments, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig, Settings, }; - use crate::{db_snap, obkv_to_json, Index, Search, SearchResult}; + use crate::{db_snap, obkv_to_json, Filter, Index, Search, SearchResult}; pub(crate) struct TempIndex { pub inner: Index, @@ -1504,6 +1504,101 @@ pub(crate) mod tests { assert_eq!(user_defined, &["doggo", "name"]); } + #[test] + fn test_basic_geo_bounding_box() { + let index = TempIndex::new(); + + index + .update_settings(|settings| { + settings.set_filterable_fields(hashset! { S("_geo") }); + }) + .unwrap(); + index + .add_documents(documents!([ + { "id": 0, "_geo": { "lat": 0, "lng": 0 } }, + { "id": 1, "_geo": { "lat": 0, "lng": -175 } }, + { "id": 2, "_geo": { "lat": 0, "lng": 175 } }, + { "id": 3, "_geo": { "lat": 85, "lng": 0 } }, + { "id": 4, "_geo": { "lat": -85, "lng": 0 } }, + ])) + .unwrap(); + + // ensure we get the right real searchable fields + user defined searchable fields + let rtxn = index.read_txn().unwrap(); + let mut search = index.search(&rtxn); + + // exact match a document + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((0, 0), (0, 0))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[0]>"); + + // match a document in the middle of the rectangle + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((10, -10), (-10, 10))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[0]>"); + + // select everything + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((90, -180), (-90, 180))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[0, 1, 2, 3, 4]>"); + + // go on the edge of the longitude + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((0, 180), (0, -170))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[1]>"); + + // go on the other edge of the longitude + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((0, 170), (0, -180))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[2]>"); + + // wrap around the longitude + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((0, 170), (0, -170))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[1, 2]>"); + + // go on the edge of the latitude + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((90, 0), (80, 0))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[3]>"); + + // go on the edge of the latitude + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((-80, 0), (-90, 0))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[4]>"); + + // try to wrap around the latitude + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((-80, 0), (80, 0))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[]>"); + + // the request that doesn't make sense + // send a top latitude lower than the bottow latitude + let search_result = search + .filter(Filter::from_str("_geoBoundingBox((-10, 0), (10, 0))").unwrap().unwrap()) + .execute() + .unwrap(); + insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[]>"); + } + #[test] fn replace_documents_external_ids_and_soft_deletion_check() { use big_s::S; diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 903f4fa94..962b6bab1 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -431,12 +431,16 @@ impl<'a> Filter<'a> { let geo_lng_token = Token::new(top_left_point[1].original_span(), Some("_geo.lng".to_string())); - let min_lng_token = - Token::new(top_left_point[1].original_span(), Some("-180.0".to_string())); - let max_lng_token = - Token::new(top_left_point[1].original_span(), Some("180.0".to_string())); - let selected_lng = if top_left[1] > bottom_right[1] { + let min_lng_token = Token::new( + top_left_point[1].original_span(), + Some("-180.0".to_string()), + ); + let max_lng_token = Token::new( + top_left_point[1].original_span(), + Some("180.0".to_string()), + ); + let condition_left = FilterCondition::Condition { fid: geo_lng_token.clone(), op: Condition::Between { From d27007005e1a2b12fedf8054eed16d50880f186a Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 6 Feb 2023 11:36:49 +0100 Subject: [PATCH 118/186] comments the geoboundingbox + forbid the usage of the lexeme method which could introduce bugs --- filter-parser/src/lib.rs | 5 +++++ milli/src/search/facet/filter.rs | 13 ++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 3f6b81ea4..385e6f623 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -88,10 +88,15 @@ impl<'a> Token<'a> { Self { span, value } } + /// Returns the string contained in the span of the `Token`. + /// This is only useful in the tests. You should always use + /// the value. + #[cfg(test)] pub fn lexeme(&self) -> &str { &self.span } + /// Return the string contained in the token. pub fn value(&self) -> &str { self.value.as_ref().map_or(&self.span, |value| value) } diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 962b6bab1..74350275a 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -294,7 +294,7 @@ impl<'a> Filter<'a> { Ok(RoaringBitmap::new()) } } else { - match fid.lexeme() { + match fid.value() { attribute @ "_geo" => { Err(fid.as_external_error(FilterError::BadGeo(attribute)))? } @@ -412,6 +412,12 @@ impl<'a> Filter<'a> { .as_external_error(FilterError::BadGeoLng(bottom_right[1])))?; } + // Instead of writing a custom `GeoBoundingBox` filter we're simply going to re-use the range + // filter to create the following filter; + // `_geo.lat {top_left[0]} TO {bottom_right[0]} AND _geo.lng {top_left[1]} TO {bottom_right[1]}` + // As we can see, we need to use a bunch of tokens that doesn't exists in the original filter, + // thus we're going to create tokens that points to a random spans but contains our text. + let geo_lat_token = Token::new(top_left_point[0].original_span(), Some("_geo.lat".to_string())); @@ -432,6 +438,11 @@ impl<'a> Filter<'a> { let geo_lng_token = Token::new(top_left_point[1].original_span(), Some("_geo.lng".to_string())); let selected_lng = if top_left[1] > bottom_right[1] { + // In this case the bounding box is wrapping around the earth (going from 180 to -180). + // We need to update the lng part of the filter from; + // `_geo.lng {top_left[1]} TO {bottom_right[1]}` to + // `_geo.lng {top_left[1]} TO 180 AND _geo.lng -180 TO {bottom_right[1]}` + let min_lng_token = Token::new( top_left_point[1].original_span(), Some("-180.0".to_string()), From 3ebc99473f6aea4f3bf91815b7b6c3cf1765058d Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 6 Feb 2023 13:29:37 +0100 Subject: [PATCH 119/186] Apply suggestions from code review Co-authored-by: Louis Dureuil --- milli/src/search/facet/filter.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 74350275a..d148d707a 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -415,8 +415,8 @@ impl<'a> Filter<'a> { // Instead of writing a custom `GeoBoundingBox` filter we're simply going to re-use the range // filter to create the following filter; // `_geo.lat {top_left[0]} TO {bottom_right[0]} AND _geo.lng {top_left[1]} TO {bottom_right[1]}` - // As we can see, we need to use a bunch of tokens that doesn't exists in the original filter, - // thus we're going to create tokens that points to a random spans but contains our text. + // As we can see, we need to use a bunch of tokens that don't exist in the original filter, + // thus we're going to create tokens that point to a random span but contain our text. let geo_lat_token = Token::new(top_left_point[0].original_span(), Some("_geo.lat".to_string())); From 41cbaad1cb030154833052c54cee7f40629fd42e Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 6 Feb 2023 16:42:16 +0100 Subject: [PATCH 120/186] Revert "Add git config about ownershio in Docker CI" This reverts commit e269027cdd9bde88b012c5248c010a0a323c17e6. --- .github/workflows/publish-docker-images.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index 5a79aa06a..5d1b50f79 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -52,9 +52,6 @@ jobs: - name: Set build-args for Docker buildx id: build-metadata run: | - # Define ownership - git config --global --add safe.directory /home/meili/actions-runner/_work/meilisearch/meilisearch - # Extract commit date commit_date=$(git show -s --format=%cd --date=iso-strict ${{ github.sha }}) From a377a492185169c3bc272758df66ea2eddd46653 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 6 Feb 2023 16:44:43 +0100 Subject: [PATCH 121/186] Make meiliserach depend on the local milli --- Cargo.lock | 81 +++--------------------------------- meilisearch-types/Cargo.toml | 2 +- 2 files changed, 6 insertions(+), 77 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 87f243e70..66b1fd10d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -415,7 +415,7 @@ dependencies = [ "criterion", "csv", "flate2", - "milli 1.0.0", + "milli", "mimalloc", "rand", "rand_chacha", @@ -1422,15 +1422,6 @@ dependencies = [ "windows-sys", ] -[[package]] -name = "filter-parser" -version = "0.41.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3" -dependencies = [ - "nom", - "nom_locate", -] - [[package]] name = "filter-parser" version = "1.0.0" @@ -1451,14 +1442,6 @@ dependencies = [ "miniz_oxide", ] -[[package]] -name = "flatten-serde-json" -version = "0.41.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3" -dependencies = [ - "serde_json", -] - [[package]] name = "flatten-serde-json" version = "1.0.0" @@ -2083,14 +2066,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "json-depth-checker" -version = "0.41.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3" -dependencies = [ - "serde_json", -] - [[package]] name = "json-depth-checker" version = "1.0.0" @@ -2603,7 +2578,7 @@ dependencies = [ "insta", "meili-snap", "memmap2", - "milli 0.41.1", + "milli", "roaring", "serde", "serde-cs", @@ -2640,52 +2615,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "milli" -version = "0.41.1" -source = "git+https://github.com/meilisearch/milli.git?tag=v0.41.1#758b4acea7cecd689650bee65949b49cf09ddaa3" -dependencies = [ - "bimap", - "bincode", - "bstr 1.1.0", - "byteorder", - "charabia", - "concat-arrays", - "crossbeam-channel", - "csv", - "deserr 0.3.0", - "either", - "filter-parser 0.41.1", - "flatten-serde-json 0.41.1", - "fst", - "fxhash", - "geoutils", - "grenad", - "heed", - "itertools", - "json-depth-checker 0.41.1", - "levenshtein_automata", - "log", - "logging_timer", - "memmap2", - "obkv", - "once_cell", - "ordered-float", - "rayon", - "roaring", - "rstar", - "serde", - "serde_json", - "slice-group-by", - "smallstr", - "smallvec", - "smartstring", - "tempfile", - "thiserror", - "time", - "uuid 1.2.2", -] - [[package]] name = "milli" version = "1.0.0" @@ -2701,8 +2630,8 @@ dependencies = [ "csv", "deserr 0.1.4", "either", - "filter-parser 1.0.0", - "flatten-serde-json 1.0.0", + "filter-parser", + "flatten-serde-json", "fst", "fuzzcheck", "fxhash", @@ -2711,7 +2640,7 @@ dependencies = [ "heed", "insta", "itertools", - "json-depth-checker 1.0.0", + "json-depth-checker", "levenshtein_automata", "log", "logging_timer", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 54efe1f56..7c30a34c5 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -16,7 +16,7 @@ file-store = { path = "../file-store" } flate2 = "1.0.24" fst = "0.4.7" memmap2 = "0.5.7" -milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.41.1", default-features = false } +milli = { path = "../milli", default-features = false } roaring = { version = "0.10.0", features = ["serde"] } serde = { version = "1.0.145", features = ["derive"] } serde-cs = "0.2.4" From 1b005f697dfd73eeb7f4aa6064897f7ab58b12ba Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 6 Feb 2023 16:50:27 +0100 Subject: [PATCH 122/186] update the syntax of the geoboundingbox filter to uses brackets instead of parens around lat and lng --- filter-parser/src/error.rs | 4 ++-- filter-parser/src/lib.rs | 24 ++++++++++++------------ milli/src/criterion.rs | 2 +- milli/src/index.rs | 20 ++++++++++---------- milli/src/search/facet/filter.rs | 20 ++++++++++---------- 5 files changed, 35 insertions(+), 35 deletions(-) diff --git a/filter-parser/src/error.rs b/filter-parser/src/error.rs index 0e416091f..4d9d89859 100644 --- a/filter-parser/src/error.rs +++ b/filter-parser/src/error.rs @@ -156,10 +156,10 @@ impl<'a> Display for Error<'a> { writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")? } ErrorKind::GeoBoundingBox => { - writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`.")? + writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")? } ErrorKind::ReservedGeo(name) => { - writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox((latitude, longitude), (latitude, longitude)) built-in rules to filter on `_geo` coordinates.", name.escape_debug())? + writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox([latitude, longitude], [latitude, longitude]) built-in rules to filter on `_geo` coordinates.", name.escape_debug())? } ErrorKind::MisusedGeoRadius => { writeln!(f, "The `_geoRadius` filter is an operation and can't be used as a value.")? diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 385e6f623..8e21ff6be 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -18,7 +18,7 @@ //! doubleQuoted = "\"" .* all but double quotes "\"" //! word = (alphanumeric | _ | - | .)+ //! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")" -//! geoBoundingBox = "_geoBoundingBox((" WS * float WS* "," WS* float WS* "), (" WS* float WS* "," WS* float WS* ")") +//! geoBoundingBox = "_geoBoundingBox([" WS * float WS* "," WS* float WS* "], [" WS* float WS* "," WS* float WS* "]") //! ``` //! //! Other BNF grammar used to handle some specific errors: @@ -337,7 +337,7 @@ fn parse_geo_radius(input: Span) -> IResult { Ok((input, res)) } -/// geoBoundingBox = WS* "_geoBoundingBox((float WS* "," WS* float WS* "), (float WS* "," WS* float WS* ")") +/// geoBoundingBox = WS* "_geoBoundingBox([float WS* "," WS* float WS* "], [float WS* "," WS* float WS* "]") /// If we parse `_geoBoundingBox` we MUST parse the rest of the expression. fn parse_geo_bounding_box(input: Span) -> IResult { // we want to allow space BEFORE the _geoBoundingBox but not after @@ -348,7 +348,7 @@ fn parse_geo_bounding_box(input: Span) -> IResult { char('('), separated_list1( tag(","), - ws(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))), + ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))), ), char(')'), )), @@ -515,9 +515,9 @@ pub mod tests { insta::assert_display_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})"); // Test geo bounding box - insta::assert_display_snapshot!(p("_geoBoundingBox((12, 13), (14, 15))"), @"_geoBoundingBox(({12}, {13}), ({14}, {15}))"); - insta::assert_display_snapshot!(p("NOT _geoBoundingBox((12, 13), (14, 15))"), @"NOT (_geoBoundingBox(({12}, {13}), ({14}, {15})))"); - insta::assert_display_snapshot!(p("_geoBoundingBox((12,13),(14,15))"), @"_geoBoundingBox(({12}, {13}), ({14}, {15}))"); + insta::assert_display_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])"); + insta::assert_display_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))"); + insta::assert_display_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])"); // Test OR + AND insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]"); @@ -606,27 +606,27 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("_geoBoundingBox"), @r###" - The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`. 1:16 _geoBoundingBox "###); insta::assert_display_snapshot!(p("_geoBoundingBox = 12"), @r###" - The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`. 1:21 _geoBoundingBox = 12 "###); insta::assert_display_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###" - The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox((latitude, longitude), (latitude, longitude))`. + The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`. 1:26 _geoBoundingBox(1.0, 1.0) "###); insta::assert_display_snapshot!(p("_geoPoint(12, 13, 14)"), @r###" - `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox((latitude, longitude), (latitude, longitude)) built-in rules to filter on `_geo` coordinates. + `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox([latitude, longitude], [latitude, longitude]) built-in rules to filter on `_geo` coordinates. 1:22 _geoPoint(12, 13, 14) "###); insta::assert_display_snapshot!(p("position <= _geoPoint(12, 13, 14)"), @r###" - `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox((latitude, longitude), (latitude, longitude)) built-in rules to filter on `_geo` coordinates. + `_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox([latitude, longitude], [latitude, longitude]) built-in rules to filter on `_geo` coordinates. 13:34 position <= _geoPoint(12, 13, 14) "###); @@ -783,7 +783,7 @@ impl<'a> std::fmt::Display for FilterCondition<'a> { FilterCondition::GeoBoundingBox { top_left_point, bottom_right_point } => { write!( f, - "_geoBoundingBox(({}, {}), ({}, {}))", + "_geoBoundingBox([{}, {}], [{}, {}])", top_left_point[0], top_left_point[1], bottom_right_point[0], diff --git a/milli/src/criterion.rs b/milli/src/criterion.rs index 9a6e2be4a..45cbfe63d 100644 --- a/milli/src/criterion.rs +++ b/milli/src/criterion.rs @@ -161,7 +161,7 @@ mod tests { ("_geoRadius(42, 75, 59):asc", ReservedNameForFilter { name: S("_geoRadius") }), ("_geoBoundingBox:asc", ReservedNameForFilter { name: S("_geoBoundingBox") }), ( - "_geoBoundingBox((42, 75), (75, 59)):asc", + "_geoBoundingBox([42, 75], [75, 59]):asc", ReservedNameForFilter { name: S("_geoBoundingBox") }, ), ]; diff --git a/milli/src/index.rs b/milli/src/index.rs index b166ab2d9..1c0482bca 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -1529,63 +1529,63 @@ pub(crate) mod tests { // exact match a document let search_result = search - .filter(Filter::from_str("_geoBoundingBox((0, 0), (0, 0))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([0, 0], [0, 0])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[0]>"); // match a document in the middle of the rectangle let search_result = search - .filter(Filter::from_str("_geoBoundingBox((10, -10), (-10, 10))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([10, -10], [-10, 10])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[0]>"); // select everything let search_result = search - .filter(Filter::from_str("_geoBoundingBox((90, -180), (-90, 180))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([90, -180], [-90, 180])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[0, 1, 2, 3, 4]>"); // go on the edge of the longitude let search_result = search - .filter(Filter::from_str("_geoBoundingBox((0, 180), (0, -170))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([0, 180], [0, -170])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[1]>"); // go on the other edge of the longitude let search_result = search - .filter(Filter::from_str("_geoBoundingBox((0, 170), (0, -180))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([0, 170], [0, -180])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[2]>"); // wrap around the longitude let search_result = search - .filter(Filter::from_str("_geoBoundingBox((0, 170), (0, -170))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([0, 170], [0, -170])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[1, 2]>"); // go on the edge of the latitude let search_result = search - .filter(Filter::from_str("_geoBoundingBox((90, 0), (80, 0))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([90, 0], [80, 0])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[3]>"); // go on the edge of the latitude let search_result = search - .filter(Filter::from_str("_geoBoundingBox((-80, 0), (-90, 0))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([-80, 0], [-90, 0])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[4]>"); // try to wrap around the latitude let search_result = search - .filter(Filter::from_str("_geoBoundingBox((-80, 0), (80, 0))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([-80, 0], [80, 0])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[]>"); @@ -1593,7 +1593,7 @@ pub(crate) mod tests { // the request that doesn't make sense // send a top latitude lower than the bottow latitude let search_result = search - .filter(Filter::from_str("_geoBoundingBox((-10, 0), (10, 0))").unwrap().unwrap()) + .filter(Filter::from_str("_geoBoundingBox([-10, 0], [10, 0])").unwrap().unwrap()) .execute() .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[]>"); diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index d148d707a..82073c66b 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -619,7 +619,7 @@ mod tests { "Attribute `_geo` is not filterable. This index does not have configured filterable attributes." )); - let filter = Filter::from_str("_geoBoundingBox((42, 150), (30, 10))").unwrap().unwrap(); + let filter = Filter::from_str("_geoBoundingBox([42, 150], [30, 10])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().starts_with( "Attribute `_geo` is not filterable. This index does not have configured filterable attributes." @@ -647,7 +647,7 @@ mod tests { "Attribute `_geo` is not filterable. Available filterable attributes are: `title`." )); - let filter = Filter::from_str("_geoBoundingBox((42, 150), (30, 10))").unwrap().unwrap(); + let filter = Filter::from_str("_geoBoundingBox([42, 150], [30, 10])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().starts_with( "Attribute `_geo` is not filterable. Available filterable attributes are: `title`." @@ -819,7 +819,7 @@ mod tests { // geoboundingbox top left coord have a bad latitude let filter = - Filter::from_str("_geoBoundingBox((-90.0000001, 150), (30, 10))").unwrap().unwrap(); + Filter::from_str("_geoBoundingBox([-90.0000001, 150], [30, 10])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!( error.to_string().starts_with( @@ -831,7 +831,7 @@ mod tests { // geoboundingbox top left coord have a bad latitude let filter = - Filter::from_str("_geoBoundingBox((90.0000001, 150), (30, 10))").unwrap().unwrap(); + Filter::from_str("_geoBoundingBox([90.0000001, 150], [30, 10])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!( error.to_string().starts_with( @@ -843,7 +843,7 @@ mod tests { // geoboundingbox bottom right coord have a bad latitude let filter = - Filter::from_str("_geoBoundingBox((30, 10), (-90.0000001, 150))").unwrap().unwrap(); + Filter::from_str("_geoBoundingBox([30, 10], [-90.0000001, 150])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().contains( "Bad latitude `-90.0000001`. Latitude must be contained between -90 and 90 degrees." @@ -851,7 +851,7 @@ mod tests { // geoboundingbox bottom right coord have a bad latitude let filter = - Filter::from_str("_geoBoundingBox((30, 10), (90.0000001, 150))").unwrap().unwrap(); + Filter::from_str("_geoBoundingBox([30, 10], [90.0000001, 150])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().contains( "Bad latitude `90.0000001`. Latitude must be contained between -90 and 90 degrees." @@ -859,7 +859,7 @@ mod tests { // geoboundingbox top left coord have a bad longitude let filter = - Filter::from_str("_geoBoundingBox((-10, 180.000001), (30, 10))").unwrap().unwrap(); + Filter::from_str("_geoBoundingBox([-10, 180.000001], [30, 10])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().contains( "Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees." @@ -867,7 +867,7 @@ mod tests { // geoboundingbox top left coord have a bad longitude let filter = - Filter::from_str("_geoBoundingBox((-10, -180.000001), (30, 10))").unwrap().unwrap(); + Filter::from_str("_geoBoundingBox([-10, -180.000001], [30, 10])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().contains( "Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees." @@ -875,7 +875,7 @@ mod tests { // geoboundingbox bottom right coord have a bad longitude let filter = - Filter::from_str("_geoBoundingBox((30, 10), (-10, -180.000001))").unwrap().unwrap(); + Filter::from_str("_geoBoundingBox([30, 10], [-10, -180.000001])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().contains( "Bad longitude `-180.000001`. Longitude must be contained between -180 and 180 degrees." @@ -883,7 +883,7 @@ mod tests { // geoboundingbox bottom right coord have a bad longitude let filter = - Filter::from_str("_geoBoundingBox((30, 10), (-10, 180.000001))").unwrap().unwrap(); + Filter::from_str("_geoBoundingBox([30, 10], [-10, 180.000001])").unwrap().unwrap(); let error = filter.evaluate(&rtxn, &index).unwrap_err(); assert!(error.to_string().contains( "Bad longitude `180.000001`. Longitude must be contained between -180 and 180 degrees." From 7a38fe624f5c8424230aae5c54a4cde35fa43cc7 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 6 Feb 2023 17:50:47 +0100 Subject: [PATCH 123/186] throw an error if the top left corner is found below the bottom right corner --- meilisearch/tests/search/errors.rs | 4 ++-- milli/src/index.rs | 21 ++++++++++++++------- milli/src/search/facet/filter.rs | 12 +++++++++++- milli/src/search/mod.rs | 2 +- 4 files changed, 28 insertions(+), 11 deletions(-) diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index 2c02dc0a3..3ef342171 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -540,7 +540,7 @@ async fn filter_reserved_geo_attribute_array() { index.wait_task(1).await; let expected_response = json!({ - "message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass", + "message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.\n1:5 _geo = Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-filter" @@ -565,7 +565,7 @@ async fn filter_reserved_geo_attribute_string() { index.wait_task(1).await; let expected_response = json!({ - "message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass", + "message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.\n1:5 _geo = Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-search-filter" diff --git a/milli/src/index.rs b/milli/src/index.rs index 1c0482bca..972ed789e 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -1583,20 +1583,27 @@ pub(crate) mod tests { .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[4]>"); + // the requests that doesn't make sense + // try to wrap around the latitude - let search_result = search + let error = search .filter(Filter::from_str("_geoBoundingBox([-80, 0], [80, 0])").unwrap().unwrap()) .execute() - .unwrap(); - insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[]>"); + .unwrap_err(); + insta::assert_display_snapshot!(error, @r###" + The top latitude `-80` is below the bottom latitude `80`. + 32:33 _geoBoundingBox([-80, 0], [80, 0]) + "###); - // the request that doesn't make sense // send a top latitude lower than the bottow latitude - let search_result = search + let error = search .filter(Filter::from_str("_geoBoundingBox([-10, 0], [10, 0])").unwrap().unwrap()) .execute() - .unwrap(); - insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[]>"); + .unwrap_err(); + insta::assert_display_snapshot!(error, @r###" + The top latitude `-10` is below the bottom latitude `10`. + 32:33 _geoBoundingBox([-10, 0], [10, 0]) + "###); } #[test] diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 82073c66b..3cf11819f 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -27,6 +27,7 @@ enum FilterError<'a> { BadGeo(&'a str), BadGeoLat(f64), BadGeoLng(f64), + BadGeoBoundingBoxTopIsBelowBottom(f64, f64), Reserved(&'a str), TooDeep, } @@ -62,7 +63,8 @@ impl<'a> Display for FilterError<'a> { "`{}` is a reserved keyword and thus can't be used as a filter expression.", keyword ), - Self::BadGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.", keyword), + Self::BadGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword), + Self::BadGeoBoundingBoxTopIsBelowBottom(top, bottom) => write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`."), Self::BadGeoLat(lat) => write!(f, "Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ", lat), Self::BadGeoLng(lng) => write!(f, "Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ", lng), } @@ -411,6 +413,14 @@ impl<'a> Filter<'a> { return Err(bottom_right_point[1] .as_external_error(FilterError::BadGeoLng(bottom_right[1])))?; } + if top_left[0] < bottom_right[0] { + return Err(bottom_right_point[1].as_external_error( + FilterError::BadGeoBoundingBoxTopIsBelowBottom( + top_left[0], + bottom_right[0], + ), + ))?; + } // Instead of writing a custom `GeoBoundingBox` filter we're simply going to re-use the range // filter to create the following filter; diff --git a/milli/src/search/mod.rs b/milli/src/search/mod.rs index df59634bb..dc48e04a8 100644 --- a/milli/src/search/mod.rs +++ b/milli/src/search/mod.rs @@ -319,7 +319,7 @@ impl fmt::Debug for Search<'_> { } } -#[derive(Default)] +#[derive(Default, Debug)] pub struct SearchResult { pub matching_words: MatchingWords, pub candidates: RoaringBitmap, From 42114325cdee1ea41ac5a15f04b38494fb52ca3b Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 6 Feb 2023 18:07:00 +0100 Subject: [PATCH 124/186] Apply suggestions from code review Co-authored-by: Louis Dureuil --- milli/src/index.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/milli/src/index.rs b/milli/src/index.rs index 972ed789e..9f5b30cd6 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -1583,7 +1583,7 @@ pub(crate) mod tests { .unwrap(); insta::assert_debug_snapshot!(search_result.candidates, @"RoaringBitmap<[4]>"); - // the requests that doesn't make sense + // the requests that don't make sense // try to wrap around the latitude let error = search From 5f56e6dd581fba34eb02e443b76a08d917751ff4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Feb 2023 12:14:05 +0000 Subject: [PATCH 125/186] Bump tokio from 1.24.1 to 1.24.2 Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.24.1 to 1.24.2. - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/commits) --- updated-dependencies: - dependency-name: tokio dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- meilisearch-types/Cargo.toml | 2 +- meilisearch/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 147e19ccf..a1ae6f766 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3867,9 +3867,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.24.1" +version = "1.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d9f76183f91ecfb55e1d7d5602bd1d979e38a3a522fe900241cf195624d67ae" +checksum = "597a12a59981d9e3c38d216785b0c37399f6e415e8d0712047620f189371b0bb" dependencies = [ "autocfg", "bytes", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 7c30a34c5..f62202f76 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -25,7 +25,7 @@ tar = "0.4.38" tempfile = "3.3.0" thiserror = "1.0.30" time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } -tokio = "1.0" +tokio = "1.24" uuid = { version = "1.1.2", features = ["serde", "v4"] } [dev-dependencies] diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index 0c95142aa..dba645665 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -65,7 +65,7 @@ tar = "0.4.38" tempfile = "3.3.0" thiserror = "1.0.37" time = { version = "0.3.15", features = ["serde-well-known", "formatting", "parsing", "macros"] } -tokio = { version = "1.21.2", features = ["full"] } +tokio = { version = "1.24.2", features = ["full"] } tokio-stream = "0.1.10" toml = "0.5.9" uuid = { version = "1.1.2", features = ["serde", "v4"] } From 8f64fba1cebe968036ec562ecc79f77eec9a396c Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 8 Feb 2023 12:53:38 +0100 Subject: [PATCH 126/186] rewrite the current transform to handle a new byte specifying the kind of operation it's merging --- milli/src/update/index_documents/transform.rs | 102 +++++++++++++++--- 1 file changed, 89 insertions(+), 13 deletions(-) diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 9e07e78ad..fe8f06b6c 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -12,7 +12,9 @@ use roaring::RoaringBitmap; use serde_json::Value; use smartstring::SmartString; -use super::helpers::{create_sorter, create_writer, keep_latest_obkv, merge_obkvs, MergeFn}; +use super::helpers::{ + create_sorter, create_writer, keep_latest_obkv, merge_obkvs, merge_two_obkvs, MergeFn, +}; use super::{IndexDocumentsMethod, IndexerConfig}; use crate::documents::{DocumentsBatchIndex, EnrichedDocument, EnrichedDocumentsBatchReader}; use crate::error::{Error, InternalError, UserError}; @@ -59,6 +61,12 @@ pub struct Transform<'a, 'i> { documents_count: usize, } +#[repr(u8)] +enum Operation { + Addition, + Deletion, +} + /// Create a mapping between the field ids found in the document batch and the one that were /// already present in the index. /// @@ -94,7 +102,7 @@ impl<'a, 'i> Transform<'a, 'i> { // with the same user id must be merged or fully replaced in the same batch. let merge_function = match index_documents_method { IndexDocumentsMethod::ReplaceDocuments => keep_latest_obkv, - IndexDocumentsMethod::UpdateDocuments => merge_obkvs, + IndexDocumentsMethod::UpdateDocuments => merge_obkvs_and_operations, }; // We initialize the sorter with the user indexing settings. @@ -161,6 +169,7 @@ impl<'a, 'i> Transform<'a, 'i> { self.fields_ids_map.insert(&primary_key).ok_or(UserError::AttributeLimitReached)?; let mut obkv_buffer = Vec::new(); + let mut document_sorter_buffer = Vec::new(); let mut documents_count = 0; let mut docid_buffer: Vec = Vec::new(); let mut field_buffer: Vec<(u16, Cow<[u8]>)> = Vec::new(); @@ -248,26 +257,46 @@ impl<'a, 'i> Transform<'a, 'i> { skip_insertion = true; } else { // we associate the base document with the new key, everything will get merged later. - self.original_sorter.insert(docid.to_be_bytes(), base_obkv)?; + document_sorter_buffer.clear(); + document_sorter_buffer.push(Operation::Addition as u8); + document_sorter_buffer.extend_from_slice(base_obkv); + self.original_sorter.insert(docid.to_be_bytes(), &document_sorter_buffer)?; match self.flatten_from_fields_ids_map(KvReader::new(base_obkv))? { - Some(buffer) => { - self.flattened_sorter.insert(docid.to_be_bytes(), &buffer)? + Some(flattened_obkv) => { + // we recreate our buffer with the flattened documents + document_sorter_buffer.clear(); + document_sorter_buffer.push(Operation::Addition as u8); + document_sorter_buffer.extend_from_slice(&flattened_obkv); + self.flattened_sorter + .insert(docid.to_be_bytes(), &document_sorter_buffer)? } - None => self.flattened_sorter.insert(docid.to_be_bytes(), base_obkv)?, + None => self + .flattened_sorter + .insert(docid.to_be_bytes(), &document_sorter_buffer)?, } } } if !skip_insertion { self.new_documents_ids.insert(docid); + + document_sorter_buffer.clear(); + document_sorter_buffer.push(Operation::Addition as u8); + document_sorter_buffer.extend_from_slice(&obkv_buffer); // We use the extracted/generated user id as the key for this document. - self.original_sorter.insert(docid.to_be_bytes(), obkv_buffer.clone())?; + self.original_sorter.insert(docid.to_be_bytes(), &document_sorter_buffer)?; match self.flatten_from_fields_ids_map(KvReader::new(&obkv_buffer))? { - Some(buffer) => self.flattened_sorter.insert(docid.to_be_bytes(), &buffer)?, - None => { - self.flattened_sorter.insert(docid.to_be_bytes(), obkv_buffer.clone())? + Some(flattened_obkv) => { + document_sorter_buffer.clear(); + document_sorter_buffer.push(Operation::Addition as u8); + document_sorter_buffer.extend_from_slice(&flattened_obkv); + self.flattened_sorter + .insert(docid.to_be_bytes(), &document_sorter_buffer)? } + None => self + .flattened_sorter + .insert(docid.to_be_bytes(), &document_sorter_buffer)?, } } documents_count += 1; @@ -487,6 +516,11 @@ impl<'a, 'i> Transform<'a, 'i> { let mut documents_count = 0; while let Some((key, val)) = iter.next()? { + if val[0] == Operation::Deletion as u8 { + continue; + } + let val = &val[1..]; + // send a callback to show at which step we are documents_count += 1; progress_callback(UpdateIndexingStep::ComputeIdsAndMergeDocuments { @@ -518,9 +552,18 @@ impl<'a, 'i> Transform<'a, 'i> { self.indexer_settings.chunk_compression_level, tempfile::tempfile()?, ); - // Once we have written all the documents into the final sorter, we write the documents - // into this writer, extract the file and reset the seek to be able to read it again. - self.flattened_sorter.write_into_stream_writer(&mut writer)?; + + // Once we have written all the documents into the final sorter, we write the nested documents + // into this writer. + // We get rids of the `Operation` byte and skip the deleted documents as well. + let mut iter = self.flattened_sorter.into_stream_merger_iter()?; + while let Some((key, val)) = iter.next()? { + if val[0] == Operation::Deletion as u8 { + continue; + } + let val = &val[1..]; + writer.insert(key, val)?; + } let mut flattened_documents = writer.into_inner()?; flattened_documents.rewind()?; @@ -677,6 +720,39 @@ impl<'a, 'i> Transform<'a, 'i> { } } +/// Merge all the obks in the order we see them. +fn merge_obkvs_and_operations<'a>(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Result> { + // [add, add, delete, add, add] + // we can ignore everything that happened before the last delete. + let starting_position = obkvs + .iter() + .rev() + .position(|obkv| obkv[0] == Operation::Deletion as u8) + .map_or(0, |pos| obkvs.len() - pos); + + // [add, add, delete] + // if the last operation was a deletion then we simply return the deletion + if starting_position == obkvs.len() { + return Ok(obkvs[obkvs.len() - 1].clone()); + } + let mut buffer = Vec::new(); + + // (add, add, delete) [add, add] + // in the other case, no deletion will be encountered during the merge + Ok(obkvs[starting_position..] + .iter() + .cloned() + .reduce(|acc, current| { + let first = obkv::KvReader::new(&acc[1..]); + let second = obkv::KvReader::new(¤t[1..]); + merge_two_obkvs(first, second, &mut buffer); + // TODO: do this only once at the end + buffer.insert(0, Operation::Addition as u8); + Cow::from(buffer.clone()) + }) + .unwrap()) +} + /// Drops all the value of type `U` in vec, and reuses the allocation to create a `Vec`. /// /// The size and alignment of T and U must match. From 7b4b57ecc82563fbc842e1d1eee25b8a9468896b Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 8 Feb 2023 14:54:05 +0100 Subject: [PATCH 127/186] Fix the current tests --- dump/src/lib.rs | 7 +++---- meilisearch-types/src/index_uid_pattern.rs | 1 + meilisearch/tests/auth/api_keys.rs | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/dump/src/lib.rs b/dump/src/lib.rs index 7a7b9a5b7..8f7c28b8a 100644 --- a/dump/src/lib.rs +++ b/dump/src/lib.rs @@ -203,12 +203,11 @@ pub(crate) mod test { use big_s::S; use maplit::btreeset; - use meilisearch_types::index_uid::IndexUid; + use meilisearch_types::index_uid_pattern::IndexUidPattern; use meilisearch_types::keys::{Action, Key}; use meilisearch_types::milli::update::Setting; use meilisearch_types::milli::{self}; use meilisearch_types::settings::{Checked, Settings}; - use meilisearch_types::star_or::StarOr; use meilisearch_types::tasks::{Details, Status}; use serde_json::{json, Map, Value}; use time::macros::datetime; @@ -341,7 +340,7 @@ pub(crate) mod test { name: Some(S("doggos_key")), uid: Uuid::from_str("9f8a34da-b6b2-42f0-939b-dbd4c3448655").unwrap(), actions: vec![Action::DocumentsAll], - indexes: vec![StarOr::Other(IndexUid::from_str("doggos").unwrap())], + indexes: vec![IndexUidPattern::from_str("doggos").unwrap()], expires_at: Some(datetime!(4130-03-14 12:21 UTC)), created_at: datetime!(1960-11-15 0:00 UTC), updated_at: datetime!(2022-11-10 0:00 UTC), @@ -351,7 +350,7 @@ pub(crate) mod test { name: Some(S("master_key")), uid: Uuid::from_str("4622f717-1c00-47bb-a494-39d76a49b591").unwrap(), actions: vec![Action::All], - indexes: vec![StarOr::Star], + indexes: vec![IndexUidPattern::all()], expires_at: None, created_at: datetime!(0000-01-01 00:01 UTC), updated_at: datetime!(1964-05-04 17:25 UTC), diff --git a/meilisearch-types/src/index_uid_pattern.rs b/meilisearch-types/src/index_uid_pattern.rs index 88e0292f2..9f49c06ea 100644 --- a/meilisearch-types/src/index_uid_pattern.rs +++ b/meilisearch-types/src/index_uid_pattern.rs @@ -69,6 +69,7 @@ impl TryFrom for IndexUidPattern { fn try_from(uid: String) -> Result { let result = match uid.strip_suffix('*') { + Some("") => Ok(IndexUidPattern(uid)), Some(prefix) => IndexUid::from_str(prefix).map(|_| IndexUidPattern(uid)), None => IndexUid::try_from(uid).map(IndexUid::into_inner).map(IndexUidPattern), }; diff --git a/meilisearch/tests/auth/api_keys.rs b/meilisearch/tests/auth/api_keys.rs index 0a14107a8..abc0aea53 100644 --- a/meilisearch/tests/auth/api_keys.rs +++ b/meilisearch/tests/auth/api_keys.rs @@ -386,7 +386,7 @@ async fn error_add_api_key_invalid_index_uids() { meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###" { - "message": "`invalid index # / \\name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_). at `.indexes[0]`.", + "message": "`invalid index # / \\name with spaces` is not a valid index uid pattern. Index uid patterns can be an integer or a string containing only alphanumeric characters, hyphens (-), underscores (_), and optionally end with a star (*). at `.indexes[0]`.", "code": "invalid_api_key_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid-api-key-indexes" From 421a9cf05e5d70c41edeb64222ab0cfc62a25692 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 8 Feb 2023 16:06:09 +0100 Subject: [PATCH 128/186] provide a new method on the transform to remove documents --- milli/src/snapshot_tests.rs | 20 +- milli/src/update/index_documents/mod.rs | 225 ++++++++++++++++++ milli/src/update/index_documents/transform.rs | 60 ++++- 3 files changed, 302 insertions(+), 3 deletions(-) diff --git a/milli/src/snapshot_tests.rs b/milli/src/snapshot_tests.rs index 49f9fbe92..f7f1a97e6 100644 --- a/milli/src/snapshot_tests.rs +++ b/milli/src/snapshot_tests.rs @@ -6,7 +6,7 @@ use roaring::RoaringBitmap; use crate::facet::FacetType; use crate::heed_codec::facet::{FacetGroupKey, FacetGroupValue}; -use crate::{make_db_snap_from_iter, ExternalDocumentsIds, Index}; +use crate::{make_db_snap_from_iter, obkv_to_json, ExternalDocumentsIds, Index}; #[track_caller] pub fn default_db_snapshot_settings_for_test(name: Option<&str>) -> (insta::Settings, String) { @@ -427,8 +427,26 @@ pub fn snap_settings(index: &Index) -> String { snap } +pub fn snap_documents(index: &Index) -> String { + let mut snap = String::new(); + let rtxn = index.read_txn().unwrap(); + let fields_ids_map = index.fields_ids_map(&rtxn).unwrap(); + let display = fields_ids_map.ids().collect::>(); + + for document in index.all_documents(&rtxn).unwrap() { + let doc = obkv_to_json(&display, &fields_ids_map, document.unwrap().1).unwrap(); + snap.push_str(&serde_json::to_string(&doc).unwrap()); + snap.push('\n'); + } + + snap +} + #[macro_export] macro_rules! full_snap_of_db { + ($index:ident, documents) => {{ + $crate::snapshot_tests::snap_documents(&$index) + }}; ($index:ident, settings) => {{ $crate::snapshot_tests::snap_settings(&$index) }}; diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index 3e9edf3a2..87c96818e 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -79,6 +79,7 @@ pub struct IndexDocuments<'t, 'u, 'i, 'a, FP, FA> { progress: FP, should_abort: FA, added_documents: u64, + deleted_documents: u64, } #[derive(Default, Debug, Clone)] @@ -122,6 +123,7 @@ where wtxn, index, added_documents: 0, + deleted_documents: 0, }) } @@ -166,6 +168,30 @@ where Ok((self, Ok(indexed_documents))) } + /// Remove a batch of documents from the current builder. + /// + /// Returns the number of documents deleted from the builder. + pub fn remove_documents( + mut self, + to_delete: Vec, + ) -> Result<(Self, StdResult)> { + // Early return when there is no document to add + if to_delete.is_empty() { + return Ok((self, Ok(0))); + } + + let deleted_documents = self + .transform + .as_mut() + .expect("Invalid document deletion state") + .remove_documents(to_delete, self.wtxn, &self.should_abort)? + as u64; + + self.deleted_documents += deleted_documents; + + Ok((self, Ok(deleted_documents))) + } + #[logging_timer::time("IndexDocuments::{}")] pub fn execute(mut self) -> Result { if self.added_documents == 0 { @@ -1879,4 +1905,203 @@ mod tests { index.add_documents(doc1).unwrap(); } + + #[test] + fn add_and_delete_documents_in_single_transform() { + let index = TempIndex::new(); + let mut wtxn = index.write_txn().unwrap(); + let builder = IndexDocuments::new( + &mut wtxn, + &index, + &index.indexer_config, + index.index_documents_config.clone(), + |_| (), + || false, + ) + .unwrap(); + + let documents = documents!([ + { "id": 1, "doggo": "kevin" }, + { "id": 2, "doggo": { "name": "bob", "age": 20 } }, + { "id": 3, "name": "jean", "age": 25 }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"3"); + + let (builder, removed) = builder.remove_documents(vec![S("2")]).unwrap(); + insta::assert_display_snapshot!(removed.unwrap(), @"1"); + + let addition = builder.execute().unwrap(); + insta::assert_debug_snapshot!(addition, @r###" + DocumentAdditionResult { + indexed_documents: 3, + number_of_documents: 2, + } + "###); + wtxn.commit().unwrap(); + + db_snap!(index, documents, @r###" + {"id":1,"doggo":"kevin"} + {"id":3,"name":"jean","age":25} + "###); + } + + #[test] + fn add_update_and_delete_documents_in_single_transform() { + let index = TempIndex::new(); + let mut wtxn = index.write_txn().unwrap(); + let builder = IndexDocuments::new( + &mut wtxn, + &index, + &index.indexer_config, + index.index_documents_config.clone(), + |_| (), + || false, + ) + .unwrap(); + + let documents = documents!([ + { "id": 1, "doggo": "kevin" }, + { "id": 2, "doggo": { "name": "bob", "age": 20 } }, + { "id": 3, "name": "jean", "age": 25 }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"3"); + + let documents = documents!([ + { "id": 2, "doggo": { "name": "jean", "age": 20 } }, + { "id": 3, "name": "bob", "age": 25 }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"2"); + + let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap(); + insta::assert_display_snapshot!(removed.unwrap(), @"2"); + + let addition = builder.execute().unwrap(); + insta::assert_debug_snapshot!(addition, @r###" + DocumentAdditionResult { + indexed_documents: 5, + number_of_documents: 1, + } + "###); + wtxn.commit().unwrap(); + + db_snap!(index, documents, @r###" + {"id":3,"name":"bob","age":25} + "###); + } + + #[test] + fn add_document_and_in_another_transform_update_and_delete_documents() { + let index = TempIndex::new(); + let mut wtxn = index.write_txn().unwrap(); + let builder = IndexDocuments::new( + &mut wtxn, + &index, + &index.indexer_config, + index.index_documents_config.clone(), + |_| (), + || false, + ) + .unwrap(); + + let documents = documents!([ + { "id": 1, "doggo": "kevin" }, + { "id": 2, "doggo": { "name": "bob", "age": 20 } }, + { "id": 3, "name": "jean", "age": 25 }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"3"); + + let addition = builder.execute().unwrap(); + insta::assert_debug_snapshot!(addition, @r###" + DocumentAdditionResult { + indexed_documents: 3, + number_of_documents: 3, + } + "###); + wtxn.commit().unwrap(); + + db_snap!(index, documents, @r###" + {"id":1,"doggo":"kevin"} + {"id":2,"doggo":{"name":"bob","age":20}} + {"id":3,"name":"jean","age":25} + "###); + + // A first batch of documents has been inserted + + let mut wtxn = index.write_txn().unwrap(); + let builder = IndexDocuments::new( + &mut wtxn, + &index, + &index.indexer_config, + index.index_documents_config.clone(), + |_| (), + || false, + ) + .unwrap(); + + let documents = documents!([ + { "id": 2, "doggo": { "name": "jean", "age": 20 } }, + { "id": 3, "name": "bob", "age": 25 }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"2"); + + let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap(); + insta::assert_display_snapshot!(removed.unwrap(), @"2"); + + let addition = builder.execute().unwrap(); + insta::assert_debug_snapshot!(addition, @r###" + DocumentAdditionResult { + indexed_documents: 2, + number_of_documents: 1, + } + "###); + wtxn.commit().unwrap(); + + db_snap!(index, documents, @r###" + {"id":3,"name":"bob","age":25} + "###); + } + + #[test] + fn delete_document_and_then_add_documents_in_the_same_transform() { + let index = TempIndex::new(); + let mut wtxn = index.write_txn().unwrap(); + let builder = IndexDocuments::new( + &mut wtxn, + &index, + &index.indexer_config, + index.index_documents_config.clone(), + |_| (), + || false, + ) + .unwrap(); + + let (builder, removed) = builder.remove_documents(vec![S("1"), S("2")]).unwrap(); + insta::assert_display_snapshot!(removed.unwrap(), @"0"); + + let documents = documents!([ + { "id": 2, "doggo": { "name": "jean", "age": 20 } }, + { "id": 3, "name": "bob", "age": 25 }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"2"); + + let addition = builder.execute().unwrap(); + insta::assert_debug_snapshot!(addition, @r###" + DocumentAdditionResult { + indexed_documents: 2, + number_of_documents: 2, + } + "###); + wtxn.commit().unwrap(); + + db_snap!(index, documents, @r###" + {"id":2,"doggo":{"name":"jean","age":20}} + {"id":3,"name":"bob","age":25} + "###); + } } diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index fe8f06b6c..50d3d4248 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -159,9 +159,7 @@ impl<'a, 'i> Transform<'a, 'i> { FA: Fn() -> bool + Sync, { let (mut cursor, fields_index) = reader.into_cursor_and_fields_index(); - let external_documents_ids = self.index.external_documents_ids(wtxn)?; - let mapping = create_fields_mapping(&mut self.fields_ids_map, &fields_index)?; let primary_key = cursor.primary_key().to_string(); @@ -322,6 +320,64 @@ impl<'a, 'i> Transform<'a, 'i> { Ok(documents_count) } + /// The counter part of `read_documents` that removes documents that may have been inserted into the transform previously. + pub fn remove_documents( + &mut self, + mut to_remove: Vec, + wtxn: &mut heed::RwTxn, + should_abort: FA, + ) -> Result + where + FA: Fn() -> bool + Sync, + { + // there may be duplicates in the documents to remove. + to_remove.sort_unstable(); + to_remove.dedup(); + + let external_documents_ids = self.index.external_documents_ids(wtxn)?; + + let mut documents_deleted = 0; + for to_remove in to_remove { + if should_abort() { + return Err(Error::InternalError(InternalError::AbortedIndexation)); + } + + match self.new_external_documents_ids_builder.entry((*to_remove).into()) { + // if the document was added in a previous iteration of the transform we make it as deleted in the sorters. + Entry::Occupied(entry) => { + let doc_id = *entry.get() as u32; + self.original_sorter + .insert(doc_id.to_be_bytes(), &[Operation::Deletion as u8])?; + self.flattened_sorter + .insert(doc_id.to_be_bytes(), &[Operation::Deletion as u8])?; + + // we must NOT update the list of replaced_documents_ids + // Either: + // 1. It's already in it and there is nothing to do + // 2. It wasn't in it because the document was created by a previous batch and since + // we're removing it there is nothing to do. + self.new_documents_ids.remove(doc_id); + entry.remove_entry(); + } + Entry::Vacant(entry) => { + // If the document was already in the db we mark it as a `to_delete` document. + // It'll be deleted later. We don't need to push anything to the sorters. + if let Some(docid) = external_documents_ids.get(entry.key()) { + self.replaced_documents_ids.insert(docid); + } else { + // if the document is nowehere to be found, there is nothing to do and we must NOT + // increment the count of documents_deleted + continue; + } + } + }; + + documents_deleted += 1; + } + + Ok(documents_deleted) + } + // Flatten a document from the fields ids map contained in self and insert the new // created fields. Returns `None` if the document doesn't need to be flattened. fn flatten_from_fields_ids_map(&mut self, obkv: KvReader) -> Result>> { From 2db6347686c70caf2d851f000e937da6aa326d26 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 8 Feb 2023 18:07:59 +0100 Subject: [PATCH 129/186] update the autobatcher to batch the addition and deletion together --- index-scheduler/src/autobatcher.rs | 302 +++++++++++++++++------------ index-scheduler/src/batch.rs | 10 +- 2 files changed, 183 insertions(+), 129 deletions(-) diff --git a/index-scheduler/src/autobatcher.rs b/index-scheduler/src/autobatcher.rs index e1e48ab90..bfe6be9ad 100644 --- a/index-scheduler/src/autobatcher.rs +++ b/index-scheduler/src/autobatcher.rs @@ -88,11 +88,11 @@ pub enum BatchKind { DocumentClear { ids: Vec, }, - DocumentImport { + DocumentOperation { method: IndexDocumentsMethod, allow_index_creation: bool, primary_key: Option, - import_ids: Vec, + operation_ids: Vec, }, DocumentDeletion { deletion_ids: Vec, @@ -102,12 +102,12 @@ pub enum BatchKind { allow_index_creation: bool, settings_ids: Vec, }, - SettingsAndDocumentImport { + SettingsAndDocumentOperation { settings_ids: Vec, method: IndexDocumentsMethod, allow_index_creation: bool, primary_key: Option, - import_ids: Vec, + operation_ids: Vec, }, Settings { allow_index_creation: bool, @@ -131,9 +131,9 @@ impl BatchKind { #[rustfmt::skip] fn allow_index_creation(&self) -> Option { match self { - BatchKind::DocumentImport { allow_index_creation, .. } + BatchKind::DocumentOperation { allow_index_creation, .. } | BatchKind::ClearAndSettings { allow_index_creation, .. } - | BatchKind::SettingsAndDocumentImport { allow_index_creation, .. } + | BatchKind::SettingsAndDocumentOperation { allow_index_creation, .. } | BatchKind::Settings { allow_index_creation, .. } => Some(*allow_index_creation), _ => None, } @@ -141,8 +141,8 @@ impl BatchKind { fn primary_key(&self) -> Option> { match self { - BatchKind::DocumentImport { primary_key, .. } - | BatchKind::SettingsAndDocumentImport { primary_key, .. } => { + BatchKind::DocumentOperation { primary_key, .. } + | BatchKind::SettingsAndDocumentOperation { primary_key, .. } => { Some(primary_key.as_deref()) } _ => None, @@ -173,22 +173,22 @@ impl BatchKind { if primary_key.is_none() || pk.is_none() || primary_key == pk.as_deref() => { ( - Continue(BatchKind::DocumentImport { + Continue(BatchKind::DocumentOperation { method, allow_index_creation, primary_key: pk, - import_ids: vec![task_id], + operation_ids: vec![task_id], }), allow_index_creation, ) } // if the primary key set in the task was different than ours we should stop and make this batch fail asap. K::DocumentImport { method, allow_index_creation, primary_key } => ( - Break(BatchKind::DocumentImport { + Break(BatchKind::DocumentOperation { method, allow_index_creation, primary_key, - import_ids: vec![task_id], + operation_ids: vec![task_id], }), allow_index_creation, ), @@ -249,7 +249,7 @@ impl BatchKind { ( BatchKind::DocumentClear { mut ids } | BatchKind::DocumentDeletion { deletion_ids: mut ids } - | BatchKind::DocumentImport { method: _, allow_index_creation: _, primary_key: _, import_ids: mut ids } + | BatchKind::DocumentOperation { method: _, allow_index_creation: _, primary_key: _, operation_ids: mut ids } | BatchKind::Settings { allow_index_creation: _, settings_ids: mut ids }, K::IndexDeletion, ) => { @@ -258,7 +258,7 @@ impl BatchKind { } ( BatchKind::ClearAndSettings { settings_ids: mut ids, allow_index_creation: _, mut other } - | BatchKind::SettingsAndDocumentImport { import_ids: mut ids, method: _, allow_index_creation: _, primary_key: _, settings_ids: mut other }, + | BatchKind::SettingsAndDocumentOperation { operation_ids: mut ids, method: _, allow_index_creation: _, primary_key: _, settings_ids: mut other }, K::IndexDeletion, ) => { ids.push(id); @@ -278,7 +278,7 @@ impl BatchKind { K::DocumentImport { .. } | K::Settings { .. }, ) => Break(this), ( - BatchKind::DocumentImport { method: _, allow_index_creation: _, primary_key: _, import_ids: mut ids }, + BatchKind::DocumentOperation { method: _, allow_index_creation: _, primary_key: _, operation_ids: mut ids }, K::DocumentClear, ) => { ids.push(id); @@ -287,54 +287,77 @@ impl BatchKind { // we can autobatch the same kind of document additions / updates ( - BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, primary_key: _, mut import_ids }, + BatchKind::DocumentOperation { method: ReplaceDocuments, allow_index_creation, primary_key: _, operation_ids: mut import_ids }, K::DocumentImport { method: ReplaceDocuments, primary_key: pk, .. }, ) => { import_ids.push(id); - Continue(BatchKind::DocumentImport { + Continue(BatchKind::DocumentOperation { method: ReplaceDocuments, allow_index_creation, - import_ids, + operation_ids: import_ids, primary_key: pk, }) } ( - BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, primary_key: _, mut import_ids }, + BatchKind::DocumentOperation { method: UpdateDocuments, allow_index_creation, primary_key: _, operation_ids: mut import_ids }, K::DocumentImport { method: UpdateDocuments, primary_key: pk, .. }, ) => { - import_ids.push(id); - Continue(BatchKind::DocumentImport { + Continue(BatchKind::DocumentOperation { method: UpdateDocuments, allow_index_creation, primary_key: pk, - import_ids, + operation_ids: import_ids, }) } + ( + BatchKind::DocumentOperation { method, allow_index_creation, primary_key, mut operation_ids }, + K::DocumentDeletion, + ) => { + operation_ids.push(id); + Continue(BatchKind::DocumentOperation { + method, + allow_index_creation, + primary_key, + operation_ids, + }) + } // but we can't autobatch documents if it's not the same kind // this match branch MUST be AFTER the previous one ( - this @ BatchKind::DocumentImport { .. }, - K::DocumentDeletion | K::DocumentImport { .. }, + this @ BatchKind::DocumentOperation { .. }, + K::DocumentImport { .. }, ) => Break(this), ( - BatchKind::DocumentImport { method, allow_index_creation, primary_key, import_ids }, + BatchKind::DocumentOperation { method, allow_index_creation, primary_key, operation_ids: import_ids }, K::Settings { .. }, - ) => Continue(BatchKind::SettingsAndDocumentImport { + ) => Continue(BatchKind::SettingsAndDocumentOperation { settings_ids: vec![id], method, allow_index_creation, primary_key, - import_ids, + operation_ids: import_ids, }), (BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentClear) => { deletion_ids.push(id); Continue(BatchKind::DocumentClear { ids: deletion_ids }) } - (this @ BatchKind::DocumentDeletion { .. }, K::DocumentImport { .. }) => Break(this), + ( + BatchKind::DocumentDeletion { mut deletion_ids }, + K::DocumentImport { method, allow_index_creation, primary_key } + ) => { + deletion_ids.push(id); + + Continue(BatchKind::DocumentOperation { + method, + allow_index_creation, + primary_key, + operation_ids: deletion_ids, + }) + } (BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentDeletion) => { deletion_ids.push(id); Continue(BatchKind::DocumentDeletion { deletion_ids }) @@ -403,7 +426,7 @@ impl BatchKind { }) } ( - BatchKind::SettingsAndDocumentImport { settings_ids, method: _, import_ids: mut other, allow_index_creation, primary_key: _ }, + BatchKind::SettingsAndDocumentOperation { settings_ids, method: _, operation_ids: mut other, allow_index_creation, primary_key: _ }, K::DocumentClear, ) => { other.push(id); @@ -415,48 +438,48 @@ impl BatchKind { } ( - BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, mut import_ids, allow_index_creation, primary_key: _}, + BatchKind::SettingsAndDocumentOperation { settings_ids, method: ReplaceDocuments, operation_ids: mut import_ids, allow_index_creation, primary_key: _}, K::DocumentImport { method: ReplaceDocuments, primary_key: pk2, .. }, ) => { import_ids.push(id); - Continue(BatchKind::SettingsAndDocumentImport { + Continue(BatchKind::SettingsAndDocumentOperation { settings_ids, method: ReplaceDocuments, allow_index_creation, primary_key: pk2, - import_ids, + operation_ids: import_ids, }) } ( - BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: _, mut import_ids }, + BatchKind::SettingsAndDocumentOperation { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: _, operation_ids: mut import_ids }, K::DocumentImport { method: UpdateDocuments, primary_key: pk2, .. }, ) => { import_ids.push(id); - Continue(BatchKind::SettingsAndDocumentImport { + Continue(BatchKind::SettingsAndDocumentOperation { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: pk2, - import_ids, + operation_ids: import_ids, }) } // But we can't batch a settings and a doc op with another doc op // this MUST be AFTER the two previous branch ( - this @ BatchKind::SettingsAndDocumentImport { .. }, + this @ BatchKind::SettingsAndDocumentOperation { .. }, K::DocumentDeletion | K::DocumentImport { .. }, ) => Break(this), ( - BatchKind::SettingsAndDocumentImport { mut settings_ids, method, allow_index_creation,primary_key, import_ids }, + BatchKind::SettingsAndDocumentOperation { mut settings_ids, method, allow_index_creation,primary_key, operation_ids: import_ids }, K::Settings { .. }, ) => { settings_ids.push(id); - Continue(BatchKind::SettingsAndDocumentImport { + Continue(BatchKind::SettingsAndDocumentOperation { settings_ids, method, allow_index_creation, primary_key, - import_ids, + operation_ids: import_ids, }) } ( @@ -588,29 +611,29 @@ mod tests { fn autobatch_simple_operation_together() { // we can autobatch one or multiple `ReplaceDocuments` together. // if the index exists. - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, false , None), doc_imp(ReplaceDocuments, false , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, false , None), doc_imp(ReplaceDocuments, false , None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1, 2] }, false))"); // if it doesn't exists. - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); // we can autobatch one or multiple `UpdateDocuments` together. // if the index exists. - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1, 2] }, false))"); // if it doesn't exists. - debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1, 2] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1, 2] }, false))"); // we can autobatch one or multiple DocumentDeletion together debug_snapshot!(autobatch_from(true, None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); @@ -628,56 +651,87 @@ mod tests { debug_snapshot!(autobatch_from(false,None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))"); debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))"); debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))"); + + // We can autobatch document addition with document deletion + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + // And the other way around + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); } #[test] fn simple_document_operation_dont_autobatch_with_other() { // addition, updates and deletion can't batch together - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_create()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_create()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_update()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_update()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_swap()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_swap()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); } #[test] fn document_addition_batch_with_settings() { // simple case - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); // multiple settings and doc addition - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); // addition and setting unordered - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 2] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 2] }, true))"); // We ensure this kind of batch doesn't batch with forbidden operations - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); } #[test] @@ -789,67 +843,67 @@ mod tests { debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))"); // The third and final case is when the first task doesn't create an index but is directly followed by a task creating an index. In this case we can't batch whith what // follows because we first need to process the erronous batch. - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); } #[test] fn allowed_and_disallowed_index_creation() { // `DocumentImport` can't be mixed with those disallowed to do so except if the index already exists. - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); } #[test] fn autobatch_primary_key() { // ==> If I have a pk // With a single update - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###); // With a multiple updates - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other"))]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0, 1] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###); // ==> If I don't have a pk // With a single update - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###); // With a multiple updates - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###); } } diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index 8a479a12b..50833c0b7 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -206,7 +206,7 @@ impl IndexScheduler { }, must_create_index, })), - BatchKind::DocumentImport { method, import_ids, .. } => { + BatchKind::DocumentOperation { method, operation_ids: import_ids, .. } => { let tasks = self.get_existing_tasks(rtxn, import_ids)?; let primary_key = match &tasks[0].kind { KindWithContent::DocumentAdditionOrUpdate { primary_key, .. } => { @@ -322,12 +322,12 @@ impl IndexScheduler { must_create_index, })) } - BatchKind::SettingsAndDocumentImport { + BatchKind::SettingsAndDocumentOperation { settings_ids, method, allow_index_creation, primary_key, - import_ids, + operation_ids: import_ids, } => { let settings = self.create_next_batch_index( rtxn, @@ -339,11 +339,11 @@ impl IndexScheduler { let document_import = self.create_next_batch_index( rtxn, index_uid.clone(), - BatchKind::DocumentImport { + BatchKind::DocumentOperation { method, allow_index_creation, primary_key, - import_ids, + operation_ids: import_ids, }, must_create_index, )?; From 67dda0678f8fc06f3f376f0ebb12523bae6239bb Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 8 Feb 2023 18:10:59 +0100 Subject: [PATCH 130/186] cleanup the autobatcher a little bit --- index-scheduler/src/autobatcher.rs | 44 +++++++++++++++--------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/index-scheduler/src/autobatcher.rs b/index-scheduler/src/autobatcher.rs index bfe6be9ad..9948bfc5b 100644 --- a/index-scheduler/src/autobatcher.rs +++ b/index-scheduler/src/autobatcher.rs @@ -278,36 +278,36 @@ impl BatchKind { K::DocumentImport { .. } | K::Settings { .. }, ) => Break(this), ( - BatchKind::DocumentOperation { method: _, allow_index_creation: _, primary_key: _, operation_ids: mut ids }, + BatchKind::DocumentOperation { method: _, allow_index_creation: _, primary_key: _, mut operation_ids }, K::DocumentClear, ) => { - ids.push(id); - Continue(BatchKind::DocumentClear { ids }) + operation_ids.push(id); + Continue(BatchKind::DocumentClear { ids: operation_ids }) } // we can autobatch the same kind of document additions / updates ( - BatchKind::DocumentOperation { method: ReplaceDocuments, allow_index_creation, primary_key: _, operation_ids: mut import_ids }, + BatchKind::DocumentOperation { method: ReplaceDocuments, allow_index_creation, primary_key: _, mut operation_ids }, K::DocumentImport { method: ReplaceDocuments, primary_key: pk, .. }, ) => { - import_ids.push(id); + operation_ids.push(id); Continue(BatchKind::DocumentOperation { method: ReplaceDocuments, allow_index_creation, - operation_ids: import_ids, + operation_ids, primary_key: pk, }) } ( - BatchKind::DocumentOperation { method: UpdateDocuments, allow_index_creation, primary_key: _, operation_ids: mut import_ids }, + BatchKind::DocumentOperation { method: UpdateDocuments, allow_index_creation, primary_key: _, mut operation_ids }, K::DocumentImport { method: UpdateDocuments, primary_key: pk, .. }, ) => { - import_ids.push(id); + operation_ids.push(id); Continue(BatchKind::DocumentOperation { method: UpdateDocuments, allow_index_creation, primary_key: pk, - operation_ids: import_ids, + operation_ids, }) } ( @@ -331,14 +331,14 @@ impl BatchKind { ) => Break(this), ( - BatchKind::DocumentOperation { method, allow_index_creation, primary_key, operation_ids: import_ids }, + BatchKind::DocumentOperation { method, allow_index_creation, primary_key, operation_ids }, K::Settings { .. }, ) => Continue(BatchKind::SettingsAndDocumentOperation { settings_ids: vec![id], method, allow_index_creation, primary_key, - operation_ids: import_ids, + operation_ids, }), (BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentClear) => { @@ -426,41 +426,41 @@ impl BatchKind { }) } ( - BatchKind::SettingsAndDocumentOperation { settings_ids, method: _, operation_ids: mut other, allow_index_creation, primary_key: _ }, + BatchKind::SettingsAndDocumentOperation { settings_ids, method: _, mut operation_ids, allow_index_creation, primary_key: _ }, K::DocumentClear, ) => { - other.push(id); + operation_ids.push(id); Continue(BatchKind::ClearAndSettings { settings_ids, - other, + other: operation_ids, allow_index_creation, }) } ( - BatchKind::SettingsAndDocumentOperation { settings_ids, method: ReplaceDocuments, operation_ids: mut import_ids, allow_index_creation, primary_key: _}, + BatchKind::SettingsAndDocumentOperation { settings_ids, method: ReplaceDocuments, mut operation_ids, allow_index_creation, primary_key: _}, K::DocumentImport { method: ReplaceDocuments, primary_key: pk2, .. }, ) => { - import_ids.push(id); + operation_ids.push(id); Continue(BatchKind::SettingsAndDocumentOperation { settings_ids, method: ReplaceDocuments, allow_index_creation, primary_key: pk2, - operation_ids: import_ids, + operation_ids, }) } ( - BatchKind::SettingsAndDocumentOperation { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: _, operation_ids: mut import_ids }, + BatchKind::SettingsAndDocumentOperation { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: _, mut operation_ids }, K::DocumentImport { method: UpdateDocuments, primary_key: pk2, .. }, ) => { - import_ids.push(id); + operation_ids.push(id); Continue(BatchKind::SettingsAndDocumentOperation { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: pk2, - operation_ids: import_ids, + operation_ids, }) } // But we can't batch a settings and a doc op with another doc op @@ -470,7 +470,7 @@ impl BatchKind { K::DocumentDeletion | K::DocumentImport { .. }, ) => Break(this), ( - BatchKind::SettingsAndDocumentOperation { mut settings_ids, method, allow_index_creation,primary_key, operation_ids: import_ids }, + BatchKind::SettingsAndDocumentOperation { mut settings_ids, method, allow_index_creation,primary_key, operation_ids }, K::Settings { .. }, ) => { settings_ids.push(id); @@ -479,7 +479,7 @@ impl BatchKind { method, allow_index_creation, primary_key, - operation_ids: import_ids, + operation_ids, }) } ( From 860c993ef782d12b5111cdbb6815eb3c69b558ce Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 8 Feb 2023 20:53:19 +0100 Subject: [PATCH 131/186] Handle the autobatching of deletion and addition in the scheduler --- index-scheduler/src/batch.rs | 182 +++++++++++------- index-scheduler/src/lib.rs | 94 +++++++++ .../after_processing_the_batch.snap | 42 ++++ .../documents.snap | 9 + .../registered_the_first_task.snap | 37 ++++ .../registered_the_second_task.snap | 40 ++++ .../registered_the_first_task.snap | 36 ++++ .../registered_the_second_task.snap | 40 ++++ 8 files changed, 409 insertions(+), 71 deletions(-) create mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/after_processing_the_batch.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/documents.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_second_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_second_task.snap diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index 50833c0b7..c3c4229a5 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -86,15 +86,27 @@ pub(crate) enum Batch { }, } +#[derive(Debug)] +pub(crate) enum DocumentOperation { + Add(Uuid), + Delete(Vec), +} + +#[derive(Debug)] +pub(crate) enum DocumentOperationResult { + Add(DocumentAdditionResult), + Delete(DocumentDeletionResult), +} + /// A [batch](Batch) that combines multiple tasks operating on an index. #[derive(Debug)] pub(crate) enum IndexOperation { - DocumentImport { + DocumentOperation { index_uid: String, primary_key: Option, method: IndexDocumentsMethod, documents_counts: Vec, - content_files: Vec, + operations: Vec, tasks: Vec, }, DocumentDeletion { @@ -121,13 +133,13 @@ pub(crate) enum IndexOperation { settings: Vec<(bool, Settings)>, settings_tasks: Vec, }, - SettingsAndDocumentImport { + SettingsAndDocumentOperation { index_uid: String, primary_key: Option, method: IndexDocumentsMethod, documents_counts: Vec, - content_files: Vec, + operations: Vec, document_import_tasks: Vec, // The boolean indicates if it's a settings deletion or creation. @@ -149,13 +161,13 @@ impl Batch { tasks.iter().map(|task| task.uid).collect() } Batch::IndexOperation { op, .. } => match op { - IndexOperation::DocumentImport { tasks, .. } + IndexOperation::DocumentOperation { tasks, .. } | IndexOperation::DocumentDeletion { tasks, .. } | IndexOperation::Settings { tasks, .. } | IndexOperation::DocumentClear { tasks, .. } => { tasks.iter().map(|task| task.uid).collect() } - IndexOperation::SettingsAndDocumentImport { + IndexOperation::SettingsAndDocumentOperation { document_import_tasks: tasks, settings_tasks: other, .. @@ -174,12 +186,12 @@ impl Batch { impl IndexOperation { pub fn index_uid(&self) -> &str { match self { - IndexOperation::DocumentImport { index_uid, .. } + IndexOperation::DocumentOperation { index_uid, .. } | IndexOperation::DocumentDeletion { index_uid, .. } | IndexOperation::DocumentClear { index_uid, .. } | IndexOperation::Settings { index_uid, .. } | IndexOperation::DocumentClearAndSetting { index_uid, .. } - | IndexOperation::SettingsAndDocumentImport { index_uid, .. } => index_uid, + | IndexOperation::SettingsAndDocumentOperation { index_uid, .. } => index_uid, } } } @@ -206,17 +218,22 @@ impl IndexScheduler { }, must_create_index, })), - BatchKind::DocumentOperation { method, operation_ids: import_ids, .. } => { - let tasks = self.get_existing_tasks(rtxn, import_ids)?; - let primary_key = match &tasks[0].kind { - KindWithContent::DocumentAdditionOrUpdate { primary_key, .. } => { - primary_key.clone() - } - _ => unreachable!(), - }; + BatchKind::DocumentOperation { method, operation_ids, .. } => { + let tasks = self.get_existing_tasks(rtxn, operation_ids)?; + let primary_key = tasks + .iter() + .find_map(|task| match task.kind { + KindWithContent::DocumentAdditionOrUpdate { ref primary_key, .. } => { + // we want to stop on the first document addition + Some(primary_key.clone()) + } + KindWithContent::DocumentDeletion { .. } => None, + _ => unreachable!(), + }) + .flatten(); let mut documents_counts = Vec::new(); - let mut content_files = Vec::new(); + let mut operations = Vec::new(); for task in tasks.iter() { match task.kind { @@ -226,19 +243,23 @@ impl IndexScheduler { .. } => { documents_counts.push(documents_count); - content_files.push(content_file); + operations.push(DocumentOperation::Add(content_file)); + } + KindWithContent::DocumentDeletion { ref documents_ids, .. } => { + documents_counts.push(documents_ids.len() as u64); + operations.push(DocumentOperation::Delete(documents_ids.clone())); } _ => unreachable!(), } } Ok(Some(Batch::IndexOperation { - op: IndexOperation::DocumentImport { + op: IndexOperation::DocumentOperation { index_uid, primary_key, method, documents_counts, - content_files, + operations, tasks, }, must_create_index, @@ -327,7 +348,7 @@ impl IndexScheduler { method, allow_index_creation, primary_key, - operation_ids: import_ids, + operation_ids, } => { let settings = self.create_next_batch_index( rtxn, @@ -343,7 +364,7 @@ impl IndexScheduler { method, allow_index_creation, primary_key, - operation_ids: import_ids, + operation_ids, }, must_create_index, )?; @@ -352,10 +373,10 @@ impl IndexScheduler { ( Some(Batch::IndexOperation { op: - IndexOperation::DocumentImport { + IndexOperation::DocumentOperation { primary_key, documents_counts, - content_files, + operations, tasks: document_import_tasks, .. }, @@ -366,12 +387,12 @@ impl IndexScheduler { .. }), ) => Ok(Some(Batch::IndexOperation { - op: IndexOperation::SettingsAndDocumentImport { + op: IndexOperation::SettingsAndDocumentOperation { index_uid, primary_key, method, documents_counts, - content_files, + operations, document_import_tasks, settings, settings_tasks, @@ -987,12 +1008,12 @@ impl IndexScheduler { Ok(tasks) } - IndexOperation::DocumentImport { + IndexOperation::DocumentOperation { index_uid: _, primary_key, method, documents_counts, - content_files, + operations, mut tasks, } => { let mut primary_key_has_been_set = false; @@ -1037,26 +1058,68 @@ impl IndexScheduler { || must_stop_processing.get(), )?; - let mut results = Vec::new(); - for content_uuid in content_files.into_iter() { - let content_file = self.file_store.get_update(content_uuid)?; - let reader = DocumentsBatchReader::from_reader(content_file) - .map_err(milli::Error::from)?; - let (new_builder, user_result) = builder.add_documents(reader)?; - builder = new_builder; + for (operation, task) in operations.into_iter().zip(tasks.iter_mut()) { + match operation { + DocumentOperation::Add(content_uuid) => { + let content_file = self.file_store.get_update(content_uuid)?; + let reader = DocumentsBatchReader::from_reader(content_file) + .map_err(milli::Error::from)?; + let (new_builder, user_result) = builder.add_documents(reader)?; + builder = new_builder; - let user_result = match user_result { - Ok(count) => Ok(DocumentAdditionResult { - indexed_documents: count, - number_of_documents: count, // TODO: this is wrong, we should use the value stored in the Details. - }), - Err(e) => Err(milli::Error::from(e)), - }; + let Some(Details::DocumentAdditionOrUpdate { received_documents, .. }) = task.details + // In the case of a `documentAdditionOrUpdate` the details MUST be set + else { unreachable!(); }; - results.push(user_result); + match user_result { + Ok(count) => { + task.status = Status::Succeeded; + task.details = Some(Details::DocumentAdditionOrUpdate { + received_documents, + indexed_documents: Some(count), + }) + } + Err(e) => { + task.status = Status::Failed; + task.details = Some(Details::DocumentAdditionOrUpdate { + received_documents, + indexed_documents: Some(0), + }); + task.error = Some(milli::Error::from(e).into()); + } + } + } + DocumentOperation::Delete(document_ids) => { + let (new_builder, user_result) = + builder.remove_documents(document_ids)?; + builder = new_builder; + + let Some(Details::DocumentDeletion { provided_ids, .. }) = task.details + // In the case of a `documentAdditionOrUpdate` the details MUST be set + else { unreachable!(); }; + + match user_result { + Ok(count) => { + task.status = Status::Succeeded; + task.details = Some(Details::DocumentDeletion { + provided_ids, + deleted_documents: Some(count), + }); + } + Err(e) => { + task.status = Status::Failed; + task.details = Some(Details::DocumentDeletion { + provided_ids, + deleted_documents: Some(0), + }); + task.error = Some(milli::Error::from(e).into()); + } + } + } + } } - if results.iter().any(|res| res.is_ok()) { + if !tasks.iter().all(|res| res.error.is_some()) { let addition = builder.execute()?; info!("document addition done: {:?}", addition); } else if primary_key_has_been_set { @@ -1071,29 +1134,6 @@ impl IndexScheduler { )?; } - for (task, (ret, count)) in - tasks.iter_mut().zip(results.into_iter().zip(documents_counts)) - { - match ret { - Ok(DocumentAdditionResult { indexed_documents, number_of_documents }) => { - task.status = Status::Succeeded; - task.details = Some(Details::DocumentAdditionOrUpdate { - received_documents: number_of_documents, - indexed_documents: Some(indexed_documents), - }); - } - Err(error) => { - task.status = Status::Failed; - task.details = Some(Details::DocumentAdditionOrUpdate { - received_documents: count, - // if there was an error we indexed 0 documents. - indexed_documents: Some(0), - }); - task.error = Some(error.into()) - } - } - } - Ok(tasks) } IndexOperation::DocumentDeletion { index_uid: _, documents, mut tasks } => { @@ -1136,12 +1176,12 @@ impl IndexScheduler { Ok(tasks) } - IndexOperation::SettingsAndDocumentImport { + IndexOperation::SettingsAndDocumentOperation { index_uid, primary_key, method, documents_counts, - content_files, + operations, document_import_tasks, settings, settings_tasks, @@ -1159,12 +1199,12 @@ impl IndexScheduler { let mut import_tasks = self.apply_index_operation( index_wtxn, index, - IndexOperation::DocumentImport { + IndexOperation::DocumentOperation { index_uid, primary_key, method, documents_counts, - content_files, + operations, tasks: document_import_tasks, }, )?; diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 387dac2d0..f1b177cb2 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -1679,6 +1679,100 @@ mod tests { snapshot!(snapshot_index_scheduler(&index_scheduler), name: "both_task_succeeded"); } + #[test] + fn document_addition_and_document_deletion() { + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); + + let content = r#"[ + { "id": 1, "doggo": "jean bob" }, + { "id": 2, "catto": "jorts" }, + { "id": 3, "doggo": "bork" } + ]"#; + + let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap(); + let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); + file.persist().unwrap(); + index_scheduler + .register(KindWithContent::DocumentAdditionOrUpdate { + index_uid: S("doggos"), + primary_key: Some(S("id")), + method: ReplaceDocuments, + content_file: uuid, + documents_count, + allow_index_creation: true, + }) + .unwrap(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); + index_scheduler + .register(KindWithContent::DocumentDeletion { + index_uid: S("doggos"), + documents_ids: vec![S("1"), S("2")], + }) + .unwrap(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); + + handle.advance_one_successful_batch(); // The addition AND deletion should've been batched together + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_processing_the_batch"); + + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); + let field_ids = field_ids_map.ids().collect::>(); + let documents = index + .all_documents(&rtxn) + .unwrap() + .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) + .collect::>(); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); + } + + #[test] + fn document_deletion_and_document_addition() { + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); + index_scheduler + .register(KindWithContent::DocumentDeletion { + index_uid: S("doggos"), + documents_ids: vec![S("1"), S("2")], + }) + .unwrap(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); + + let content = r#"[ + { "id": 1, "doggo": "jean bob" }, + { "id": 2, "catto": "jorts" }, + { "id": 3, "doggo": "bork" } + ]"#; + + let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap(); + let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); + file.persist().unwrap(); + index_scheduler + .register(KindWithContent::DocumentAdditionOrUpdate { + index_uid: S("doggos"), + primary_key: Some(S("id")), + method: ReplaceDocuments, + content_file: uuid, + documents_count, + allow_index_creation: true, + }) + .unwrap(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); + + handle.advance_one_successful_batch(); // The deletion AND addition should've been batched together + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_processing_the_batch"); + + let index = index_scheduler.index("doggos").unwrap(); + let rtxn = index.read_txn().unwrap(); + let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); + let field_ids = field_ids_map.ids().collect::>(); + let documents = index + .all_documents(&rtxn) + .unwrap() + .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) + .collect::>(); + snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); + } + #[test] fn do_not_batch_task_of_different_indexes() { let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/after_processing_the_batch.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/after_processing_the_batch.snap new file mode 100644 index 000000000..f70496b81 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/after_processing_the_batch.snap @@ -0,0 +1,42 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} +1 {uid: 1, status: succeeded, details: { received_document_ids: 2, deleted_documents: Some(2) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +"documentDeletion" [1,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,1,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/documents.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/documents.snap new file mode 100644 index 000000000..2b56b71d1 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/documents.snap @@ -0,0 +1,9 @@ +--- +source: index-scheduler/src/lib.rs +--- +[ + { + "id": 3, + "doggo": "bork" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_first_task.snap new file mode 100644 index 000000000..35dc0b41a --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_first_task.snap @@ -0,0 +1,37 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_second_task.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_second_task.snap new file mode 100644 index 000000000..bd65a6d99 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_second_task.snap @@ -0,0 +1,40 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +"documentDeletion" [1,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_first_task.snap new file mode 100644 index 000000000..9356e6dba --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"documentDeletion" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_second_task.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_second_task.snap new file mode 100644 index 000000000..89e341184 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_second_task.snap @@ -0,0 +1,40 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} +1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [1,] +"documentDeletion" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + From 93f130a400b0ff9d4c22c0ee2d8801f85fbd14aa Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 8 Feb 2023 20:57:35 +0100 Subject: [PATCH 132/186] fix all warnings --- index-scheduler/src/batch.rs | 11 ++--------- .../index_documents/helpers/merge_functions.rs | 15 --------------- milli/src/update/index_documents/helpers/mod.rs | 2 +- milli/src/update/index_documents/transform.rs | 4 +--- 4 files changed, 4 insertions(+), 28 deletions(-) diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index c3c4229a5..7ca3bfb20 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -28,8 +28,7 @@ use meilisearch_types::heed::{RoTxn, RwTxn}; use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader}; use meilisearch_types::milli::heed::CompactionOption; use meilisearch_types::milli::update::{ - DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod, - Settings as MilliSettings, + DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod, Settings as MilliSettings, }; use meilisearch_types::milli::{self, BEU32}; use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked}; @@ -92,12 +91,6 @@ pub(crate) enum DocumentOperation { Delete(Vec), } -#[derive(Debug)] -pub(crate) enum DocumentOperationResult { - Add(DocumentAdditionResult), - Delete(DocumentDeletionResult), -} - /// A [batch](Batch) that combines multiple tasks operating on an index. #[derive(Debug)] pub(crate) enum IndexOperation { @@ -1012,7 +1005,7 @@ impl IndexScheduler { index_uid: _, primary_key, method, - documents_counts, + documents_counts: _, operations, mut tasks, } => { diff --git a/milli/src/update/index_documents/helpers/merge_functions.rs b/milli/src/update/index_documents/helpers/merge_functions.rs index 37af7ab6a..6e3aa7ec7 100644 --- a/milli/src/update/index_documents/helpers/merge_functions.rs +++ b/milli/src/update/index_documents/helpers/merge_functions.rs @@ -57,21 +57,6 @@ pub fn keep_latest_obkv<'a>(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Result(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Result> { - Ok(obkvs - .iter() - .cloned() - .reduce(|acc, current| { - let first = obkv::KvReader::new(&acc); - let second = obkv::KvReader::new(¤t); - let mut buffer = Vec::new(); - merge_two_obkvs(first, second, &mut buffer); - Cow::from(buffer) - }) - .unwrap()) -} - pub fn merge_two_obkvs(base: obkv::KvReaderU16, update: obkv::KvReaderU16, buffer: &mut Vec) { use itertools::merge_join_by; use itertools::EitherOrBoth::{Both, Left, Right}; diff --git a/milli/src/update/index_documents/helpers/mod.rs b/milli/src/update/index_documents/helpers/mod.rs index a496ccd6e..3a25851e4 100644 --- a/milli/src/update/index_documents/helpers/mod.rs +++ b/milli/src/update/index_documents/helpers/mod.rs @@ -13,7 +13,7 @@ pub use grenad_helpers::{ GrenadParameters, MergeableReader, }; pub use merge_functions::{ - concat_u32s_array, keep_first, keep_latest_obkv, merge_cbo_roaring_bitmaps, merge_obkvs, + concat_u32s_array, keep_first, keep_latest_obkv, merge_cbo_roaring_bitmaps, merge_roaring_bitmaps, merge_two_obkvs, roaring_bitmap_from_u32s_array, serialize_roaring_bitmap, MergeFn, }; diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 50d3d4248..ebe1608fa 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -12,9 +12,7 @@ use roaring::RoaringBitmap; use serde_json::Value; use smartstring::SmartString; -use super::helpers::{ - create_sorter, create_writer, keep_latest_obkv, merge_obkvs, merge_two_obkvs, MergeFn, -}; +use super::helpers::{create_sorter, create_writer, keep_latest_obkv, merge_two_obkvs, MergeFn}; use super::{IndexDocumentsMethod, IndexerConfig}; use crate::documents::{DocumentsBatchIndex, EnrichedDocument, EnrichedDocumentsBatchReader}; use crate::error::{Error, InternalError, UserError}; From 93db755d57a47f7e0ab36b846e503a3fb0d57fac Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 8 Feb 2023 21:03:34 +0100 Subject: [PATCH 133/186] add a test to ensure we handle correctly a deletion of multiple time the same document --- milli/src/update/index_documents/mod.rs | 44 +++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index 87c96818e..11e1e5811 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -2104,4 +2104,48 @@ mod tests { {"id":3,"name":"bob","age":25} "###); } + + #[test] + fn delete_the_same_document_multiple_time() { + let index = TempIndex::new(); + let mut wtxn = index.write_txn().unwrap(); + let builder = IndexDocuments::new( + &mut wtxn, + &index, + &index.indexer_config, + index.index_documents_config.clone(), + |_| (), + || false, + ) + .unwrap(); + + let (builder, removed) = + builder.remove_documents(vec![S("1"), S("2"), S("1"), S("2")]).unwrap(); + insta::assert_display_snapshot!(removed.unwrap(), @"0"); + + let documents = documents!([ + { "id": 1, "doggo": "kevin" }, + { "id": 2, "doggo": { "name": "jean", "age": 20 } }, + { "id": 3, "name": "bob", "age": 25 }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"3"); + + let (builder, removed) = + builder.remove_documents(vec![S("1"), S("2"), S("1"), S("2")]).unwrap(); + insta::assert_display_snapshot!(removed.unwrap(), @"2"); + + let addition = builder.execute().unwrap(); + insta::assert_debug_snapshot!(addition, @r###" + DocumentAdditionResult { + indexed_documents: 3, + number_of_documents: 1, + } + "###); + wtxn.commit().unwrap(); + + db_snap!(index, documents, @r###" + {"id":3,"name":"bob","age":25} + "###); + } } From ea9ac46f28f6c09e0e1989d7933687cf33a93408 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 8 Feb 2023 21:24:27 +0100 Subject: [PATCH 134/186] stop autobatching the deletion without the index creation right with the addition --- index-scheduler/src/autobatcher.rs | 34 +++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/index-scheduler/src/autobatcher.rs b/index-scheduler/src/autobatcher.rs index 9948bfc5b..31634237f 100644 --- a/index-scheduler/src/autobatcher.rs +++ b/index-scheduler/src/autobatcher.rs @@ -345,10 +345,11 @@ impl BatchKind { deletion_ids.push(id); Continue(BatchKind::DocumentClear { ids: deletion_ids }) } + // we can autobatch the deletion and import if the index already exists ( BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentImport { method, allow_index_creation, primary_key } - ) => { + ) if index_already_exists => { deletion_ids.push(id); Continue(BatchKind::DocumentOperation { @@ -358,6 +359,27 @@ impl BatchKind { operation_ids: deletion_ids, }) } + // we can autobatch the deletion and import if both can't create an index + ( + BatchKind::DocumentDeletion { mut deletion_ids }, + K::DocumentImport { method, allow_index_creation, primary_key } + ) if !allow_index_creation => { + deletion_ids.push(id); + + Continue(BatchKind::DocumentOperation { + method, + allow_index_creation, + primary_key, + operation_ids: deletion_ids, + }) + } + // we can't autobatch a deletion and an import if the index does not exists but would be created by an addition + ( + this @ BatchKind::DocumentDeletion { .. }, + K::DocumentImport { .. } + ) => { + Break(this) + } (BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentDeletion) => { deletion_ids.push(id); Continue(BatchKind::DocumentDeletion { deletion_ids }) @@ -678,12 +700,8 @@ mod tests { debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))"); debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); } @@ -863,6 +881,12 @@ mod tests { debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + + // batch deletion and addition + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); } #[test] From c690c4fec4f7eda2f32037aaa1ac2c038e4d8fb4 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Wed, 8 Feb 2023 18:19:51 +0100 Subject: [PATCH 135/186] Added and modified the current API Key and Tenant Token tests --- meilisearch/tests/auth/tenant_token.rs | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/meilisearch/tests/auth/tenant_token.rs b/meilisearch/tests/auth/tenant_token.rs index af3e7c2a5..81e161b5a 100644 --- a/meilisearch/tests/auth/tenant_token.rs +++ b/meilisearch/tests/auth/tenant_token.rs @@ -82,6 +82,11 @@ static ACCEPTED_KEYS: Lazy> = Lazy::new(|| { "actions": ["search"], "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() }), + json!({ + "indexes": ["sal*", "prod*"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), ] }); @@ -104,6 +109,11 @@ static REFUSED_KEYS: Lazy> = Lazy::new(|| { "actions": ["*"], "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() }), + json!({ + "indexes": ["prod*", "p*"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), json!({ "indexes": ["products"], "actions": ["search"], @@ -245,6 +255,10 @@ async fn search_authorized_simple_token() { "searchRules" => json!(["sales"]), "exp" => Value::Null }, + hashmap! { + "searchRules" => json!(["sa*"]), + "exp" => Value::Null + }, ]; compute_authorized_search!(tenant_tokens, {}, 5); @@ -351,11 +365,19 @@ async fn filter_search_authorized_filter_token() { }), "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sal*": {"filter": ["color = blue"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, ]; compute_authorized_search!(tenant_tokens, "color = yellow", 1); } +/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above. #[actix_rt::test] async fn error_search_token_forbidden_parent_key() { let tenant_tokens = vec![ @@ -383,6 +405,10 @@ async fn error_search_token_forbidden_parent_key() { "searchRules" => json!(["sales"]), "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) }, + hashmap! { + "searchRules" => json!(["sali*", "s*", "sales*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, ]; compute_forbidden_search!(tenant_tokens, REFUSED_KEYS); From eaad84bd1d058a8b84ec39986148b748f1e563cd Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 9 Feb 2023 11:29:13 +0100 Subject: [PATCH 136/186] fix the test to handle the document deletion correctly --- index-scheduler/src/lib.rs | 9 +++- .../after_failing_the_deletion.snap | 43 ++++++++++++++++++ .../after_last_successful_addition.snap | 45 +++++++++++++++++++ .../documents.snap | 17 +++++++ index-scheduler/src/utils.rs | 27 ++++++----- 5 files changed, 127 insertions(+), 14 deletions(-) create mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_failing_the_deletion.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_last_successful_addition.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/documents.snap diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index f1b177cb2..03cf961fd 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -1758,8 +1758,13 @@ mod tests { .unwrap(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); - handle.advance_one_successful_batch(); // The deletion AND addition should've been batched together - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_processing_the_batch"); + // The deletion should have failed because it can't create an index + handle.advance_one_failed_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_failing_the_deletion"); + + // The addition should works + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_last_successful_addition"); let index = index_scheduler.index("doggos").unwrap(); let rtxn = index.read_txn().unwrap(); diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_failing_the_deletion.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_failing_the_deletion.snap new file mode 100644 index 000000000..2850af744 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_failing_the_deletion.snap @@ -0,0 +1,43 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} +1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [1,] +failed [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [1,] +"documentDeletion" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_last_successful_addition.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_last_successful_addition.snap new file mode 100644 index 000000000..59e18bdb0 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_last_successful_addition.snap @@ -0,0 +1,45 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} +1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [1,] +failed [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [1,] +"documentDeletion" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/documents.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/documents.snap new file mode 100644 index 000000000..8204d059b --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/documents.snap @@ -0,0 +1,17 @@ +--- +source: index-scheduler/src/lib.rs +--- +[ + { + "id": 1, + "doggo": "jean bob" + }, + { + "id": 2, + "catto": "jorts" + }, + { + "id": 3, + "doggo": "bork" + } +] diff --git a/index-scheduler/src/utils.rs b/index-scheduler/src/utils.rs index c9b71b523..f68371767 100644 --- a/index-scheduler/src/utils.rs +++ b/index-scheduler/src/utils.rs @@ -439,20 +439,23 @@ impl IndexScheduler { provided_ids: received_document_ids, deleted_documents, } => { - if let Some(deleted_documents) = deleted_documents { - assert_eq!(status, Status::Succeeded); - assert!(deleted_documents <= received_document_ids as u64); - assert_eq!(kind.as_kind(), Kind::DocumentDeletion); + assert_eq!(kind.as_kind(), Kind::DocumentDeletion); + let KindWithContent::DocumentDeletion { + ref index_uid, + ref documents_ids, + } = kind else { unreachable!() }; + assert_eq!(&task_index_uid.unwrap(), index_uid); - match &kind { - KindWithContent::DocumentDeletion { index_uid, documents_ids } => { - assert_eq!(&task_index_uid.unwrap(), index_uid); - assert!(documents_ids.len() >= received_document_ids); - } - _ => panic!(), + match status { + Status::Enqueued | Status::Processing => (), + Status::Succeeded => { + assert!(deleted_documents.unwrap() <= received_document_ids as u64); + assert!(documents_ids.len() == received_document_ids); + } + Status::Failed | Status::Canceled => { + assert!(deleted_documents == Some(0)); + assert!(documents_ids.len() == received_document_ids); } - } else { - assert_ne!(status, Status::Succeeded); } } Details::ClearAll { deleted_documents } => { From 746b31c1ce0dd1f51ef9204228673c3400f86e85 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 9 Feb 2023 12:23:01 +0100 Subject: [PATCH 137/186] makes clippy happy --- milli/src/update/index_documents/transform.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index ebe1608fa..73c734aaa 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -345,9 +345,9 @@ impl<'a, 'i> Transform<'a, 'i> { Entry::Occupied(entry) => { let doc_id = *entry.get() as u32; self.original_sorter - .insert(doc_id.to_be_bytes(), &[Operation::Deletion as u8])?; + .insert(doc_id.to_be_bytes(), [Operation::Deletion as u8])?; self.flattened_sorter - .insert(doc_id.to_be_bytes(), &[Operation::Deletion as u8])?; + .insert(doc_id.to_be_bytes(), [Operation::Deletion as u8])?; // we must NOT update the list of replaced_documents_ids // Either: From 764df24b7d6ff98cbf9ca77fa103acb9cc69ff25 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 9 Feb 2023 13:21:20 +0100 Subject: [PATCH 138/186] Make clippy happy (again) --- meilisearch-auth/src/lib.rs | 3 +-- meilisearch/tests/auth/authorization.rs | 8 ++++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index c27b71e74..9f9b15f38 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -214,8 +214,7 @@ impl SearchRules { map.iter() .filter(|(pattern, _)| pattern.matches_str(index)) .max_by_key(|(pattern, _)| (pattern.is_exact(), pattern.len())) - .map(|(_, rule)| rule.clone()) - .flatten() + .and_then(|(_, rule)| rule.clone()) } } } diff --git a/meilisearch/tests/auth/authorization.rs b/meilisearch/tests/auth/authorization.rs index 15d4d96fb..69a74b022 100644 --- a/meilisearch/tests/auth/authorization.rs +++ b/meilisearch/tests/auth/authorization.rs @@ -445,7 +445,7 @@ async fn access_authorized_index_patterns() { // use created key. let key = response["key"].as_str().unwrap(); - server.use_api_key(&key); + server.use_api_key(key); // refer to products_1 and products with modified api key. let index_1 = server.index("products_1"); @@ -515,7 +515,7 @@ async fn raise_error_non_authorized_index_patterns() { // use created key. let key = response["key"].as_str().unwrap(); - server.use_api_key(&key); + server.use_api_key(key); // refer to products_1 and products_2 with modified api key. let product_1_index = server.index("products_1"); @@ -582,7 +582,7 @@ async fn pattern_indexes() { let (response, code) = server.add_api_key(content).await; assert_eq!(201, code, "{:?}", &response); let key = response["key"].as_str().expect("Key is not string"); - server.use_api_key(&key); + server.use_api_key(key); // Create Index products_1 using generated api key let products_1 = server.index("products_1"); @@ -859,7 +859,7 @@ async fn lazy_create_index_from_pattern() { // use created key. let key = response["key"].as_str().unwrap(); - server.use_api_key(&key); + server.use_api_key(key); // try to create a index via add documents route let index = server.index("products_1"); From 827c1c84478b1b2fb063368f447ff40f3c3d050b Mon Sep 17 00:00:00 2001 From: Ayman Date: Fri, 10 Feb 2023 11:42:19 +0400 Subject: [PATCH 139/186] edit gitignore to ignore .idea and .vscode folders --- .gitignore | 2 ++ .idea/.gitignore | 8 ++++++++ .idea/codeStyles/Project.xml | 13 +++++++++++++ .idea/codeStyles/codeStyleConfig.xml | 5 +++++ .idea/meilisearch.iml | 9 +++++++++ .idea/modules.xml | 8 ++++++++ .idea/vcs.xml | 6 ++++++ 7 files changed, 51 insertions(+) create mode 100644 .idea/.gitignore create mode 100644 .idea/codeStyles/Project.xml create mode 100644 .idea/codeStyles/codeStyleConfig.xml create mode 100644 .idea/meilisearch.iml create mode 100644 .idea/modules.xml create mode 100644 .idea/vcs.xml diff --git a/.gitignore b/.gitignore index ecf90ef8f..5f660c735 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +.idea/ +.vscode/ /target **/*.csv **/*.json_lines diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 000000000..13566b81b --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,8 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml diff --git a/.idea/codeStyles/Project.xml b/.idea/codeStyles/Project.xml new file mode 100644 index 000000000..f5cb8715f --- /dev/null +++ b/.idea/codeStyles/Project.xml @@ -0,0 +1,13 @@ + + + + + + + + + + \ No newline at end of file diff --git a/.idea/codeStyles/codeStyleConfig.xml b/.idea/codeStyles/codeStyleConfig.xml new file mode 100644 index 000000000..a55e7a179 --- /dev/null +++ b/.idea/codeStyles/codeStyleConfig.xml @@ -0,0 +1,5 @@ + + + + \ No newline at end of file diff --git a/.idea/meilisearch.iml b/.idea/meilisearch.iml new file mode 100644 index 000000000..d6ebd4805 --- /dev/null +++ b/.idea/meilisearch.iml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 000000000..dda3a76aa --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 000000000..35eb1ddfb --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file From 8770088df319689b4adc3bddd5542352cb62c6d6 Mon Sep 17 00:00:00 2001 From: Ayman Date: Fri, 10 Feb 2023 11:45:02 +0400 Subject: [PATCH 140/186] remove idea folder --- .idea/.gitignore | 8 -------- .idea/codeStyles/Project.xml | 13 ------------- .idea/codeStyles/codeStyleConfig.xml | 5 ----- .idea/meilisearch.iml | 9 --------- .idea/modules.xml | 8 -------- .idea/vcs.xml | 6 ------ 6 files changed, 49 deletions(-) delete mode 100644 .idea/.gitignore delete mode 100644 .idea/codeStyles/Project.xml delete mode 100644 .idea/codeStyles/codeStyleConfig.xml delete mode 100644 .idea/meilisearch.iml delete mode 100644 .idea/modules.xml delete mode 100644 .idea/vcs.xml diff --git a/.idea/.gitignore b/.idea/.gitignore deleted file mode 100644 index 13566b81b..000000000 --- a/.idea/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml -# Editor-based HTTP Client requests -/httpRequests/ -# Datasource local storage ignored files -/dataSources/ -/dataSources.local.xml diff --git a/.idea/codeStyles/Project.xml b/.idea/codeStyles/Project.xml deleted file mode 100644 index f5cb8715f..000000000 --- a/.idea/codeStyles/Project.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/.idea/codeStyles/codeStyleConfig.xml b/.idea/codeStyles/codeStyleConfig.xml deleted file mode 100644 index a55e7a179..000000000 --- a/.idea/codeStyles/codeStyleConfig.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/meilisearch.iml b/.idea/meilisearch.iml deleted file mode 100644 index d6ebd4805..000000000 --- a/.idea/meilisearch.iml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index dda3a76aa..000000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 35eb1ddfb..000000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file From 47748395dcfcbc78acb2838ebcf3cb5afa4b871b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Mon, 13 Feb 2023 17:20:08 +0100 Subject: [PATCH 141/186] Update an authentication comment Co-authored-by: Many the fish --- meilisearch/src/extractors/authentication/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch/src/extractors/authentication/mod.rs b/meilisearch/src/extractors/authentication/mod.rs index 4836679a9..84b598689 100644 --- a/meilisearch/src/extractors/authentication/mod.rs +++ b/meilisearch/src/extractors/authentication/mod.rs @@ -199,7 +199,7 @@ pub mod policies { token: &str, index: Option<&str>, ) -> Option { - // Tenant token will always define an index. + // A tenant token only has access to the search route which always defines an index. let index = index?; // Only search action can be accessed by a tenant token. From 4b1cd10653f0e9d7d9db5945caf4abd8b7e21758 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 13 Feb 2023 17:26:34 +0100 Subject: [PATCH 142/186] Return an internal error when index pattern should be valid --- meilisearch-auth/src/store.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 459234065..cc5dcdfb5 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -5,6 +5,7 @@ use std::convert::{TryFrom, TryInto}; use std::fs::create_dir_all; use std::path::Path; use std::str; +use std::str::FromStr; use std::sync::Arc; use hmac::{Hmac, Mac}; @@ -18,7 +19,7 @@ use time::OffsetDateTime; use uuid::fmt::Hyphenated; use uuid::Uuid; -use super::error::Result; +use super::error::{AuthControllerError, Result}; use super::{Action, Key}; const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB @@ -225,9 +226,9 @@ impl HeedAuthStore { for result in self.action_keyid_index_expiration.prefix_iter(&rtxn, &tuple)? { let ((_, _, index_uid_pattern), expiration) = result?; if let Some((pattern, index)) = index_uid_pattern.zip(index) { - let index_uid_pattern = str::from_utf8(pattern)?.to_string(); - // TODO I shouldn't unwrap here but rather return an internal error - let pattern = IndexUidPattern::try_from(index_uid_pattern).unwrap(); + let index_uid_pattern = str::from_utf8(pattern)?; + let pattern = IndexUidPattern::from_str(index_uid_pattern) + .map_err(|e| AuthControllerError::Internal(Box::new(e)))?; if pattern.matches_str(index) { return Ok(Some(expiration)); } From 6fa877efb03e0532bcddd04c95821b835cf3996c Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 13 Feb 2023 17:49:52 +0100 Subject: [PATCH 143/186] Fix attributes set candidates --- milli/src/search/criteria/attribute.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/milli/src/search/criteria/attribute.rs b/milli/src/search/criteria/attribute.rs index d7ec0d382..5b33fdf54 100644 --- a/milli/src/search/criteria/attribute.rs +++ b/milli/src/search/criteria/attribute.rs @@ -123,7 +123,7 @@ impl<'t> Criterion for Attribute<'t> { None => { return Ok(Some(CriterionResult { query_tree: Some(query_tree), - candidates: Some(RoaringBitmap::new()), + candidates: Some(allowed_candidates), filtered_candidates: None, initial_candidates: Some(self.initial_candidates.take()), })); From e405702733f43df31c3d48dae53b194a87ce8d06 Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Tue, 7 Feb 2023 18:57:27 +0100 Subject: [PATCH 144/186] chore: introduce new error ParseGeoError type --- milli/src/search/facet/filter.rs | 93 ++++++++++++++++++++------------ 1 file changed, 60 insertions(+), 33 deletions(-) diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 3cf11819f..548ed699e 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -22,12 +22,30 @@ pub struct Filter<'a> { } #[derive(Debug)] -enum FilterError<'a> { - AttributeNotFilterable { attribute: &'a str, filterable_fields: HashSet }, - BadGeo(&'a str), +enum ParseGeoError { + BadGeo(String), BadGeoLat(f64), BadGeoLng(f64), BadGeoBoundingBoxTopIsBelowBottom(f64, f64), +} + +impl std::error::Error for ParseGeoError {} + +impl Display for ParseGeoError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::BadGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword), + Self::BadGeoBoundingBoxTopIsBelowBottom(top, bottom) => write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`."), + Self::BadGeoLat(lat) => write!(f, "Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ", lat), + Self::BadGeoLng(lng) => write!(f, "Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ", lng), + } + } +} + +#[derive(Debug)] +enum FilterError<'a> { + AttributeNotFilterable { attribute: &'a str, filterable_fields: HashSet }, + ParseGeoError(ParseGeoError), Reserved(&'a str), TooDeep, } @@ -44,7 +62,11 @@ impl<'a> Display for FilterError<'a> { attribute, ) } else { - let filterables_list = filterable_fields.iter().map(AsRef::as_ref).collect::>().join(" "); + let filterables_list = filterable_fields + .iter() + .map(AsRef::as_ref) + .collect::>() + .join(" "); write!( f, @@ -53,8 +75,9 @@ impl<'a> Display for FilterError<'a> { filterables_list, ) } - }, - Self::TooDeep => write!(f, + } + Self::TooDeep => write!( + f, "Too many filter conditions, can't process more than {} filters.", MAX_FILTER_DEPTH ), @@ -63,10 +86,7 @@ impl<'a> Display for FilterError<'a> { "`{}` is a reserved keyword and thus can't be used as a filter expression.", keyword ), - Self::BadGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword), - Self::BadGeoBoundingBoxTopIsBelowBottom(top, bottom) => write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`."), - Self::BadGeoLat(lat) => write!(f, "Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ", lat), - Self::BadGeoLng(lng) => write!(f, "Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ", lng), + Self::ParseGeoError(error) => write!(f, "{}", error), } } } @@ -297,12 +317,13 @@ impl<'a> Filter<'a> { } } else { match fid.value() { - attribute @ "_geo" => { - Err(fid.as_external_error(FilterError::BadGeo(attribute)))? - } - attribute if attribute.starts_with("_geoPoint(") => { - Err(fid.as_external_error(FilterError::BadGeo("_geoPoint")))? - } + attribute @ "_geo" => Err(fid.as_external_error( + FilterError::ParseGeoError(ParseGeoError::BadGeo(attribute.to_owned())), + ))?, + attribute if attribute.starts_with("_geoPoint(") => Err(fid + .as_external_error(FilterError::ParseGeoError( + ParseGeoError::BadGeo("_geoPoint".to_owned()), + )))?, attribute @ "_geoDistance" => { Err(fid.as_external_error(FilterError::Reserved(attribute)))? } @@ -353,14 +374,14 @@ impl<'a> Filter<'a> { let base_point: [f64; 2] = [point[0].parse_finite_float()?, point[1].parse_finite_float()?]; if !(-90.0..=90.0).contains(&base_point[0]) { - return Err( - point[0].as_external_error(FilterError::BadGeoLat(base_point[0])) - )?; + return Err(point[0].as_external_error(FilterError::ParseGeoError( + ParseGeoError::BadGeoLat(base_point[0]), + )))?; } if !(-180.0..=180.0).contains(&base_point[1]) { - return Err( - point[1].as_external_error(FilterError::BadGeoLng(base_point[1])) - )?; + return Err(point[1].as_external_error(FilterError::ParseGeoError( + ParseGeoError::BadGeoLng(base_point[1]), + )))?; } let radius = radius.parse_finite_float()?; let rtree = match index.geo_rtree(rtxn)? { @@ -398,26 +419,32 @@ impl<'a> Filter<'a> { bottom_right_point[1].parse_finite_float()?, ]; if !(-90.0..=90.0).contains(&top_left[0]) { - return Err(top_left_point[0] - .as_external_error(FilterError::BadGeoLat(top_left[0])))?; + return Err(top_left_point[0].as_external_error( + FilterError::ParseGeoError(ParseGeoError::BadGeoLat(top_left[0])), + ))?; } if !(-180.0..=180.0).contains(&top_left[1]) { - return Err(top_left_point[1] - .as_external_error(FilterError::BadGeoLng(top_left[1])))?; + return Err(top_left_point[1].as_external_error( + FilterError::ParseGeoError(ParseGeoError::BadGeoLng(top_left[1])), + ))?; } if !(-90.0..=90.0).contains(&bottom_right[0]) { - return Err(bottom_right_point[0] - .as_external_error(FilterError::BadGeoLat(bottom_right[0])))?; + return Err(bottom_right_point[0].as_external_error( + FilterError::ParseGeoError(ParseGeoError::BadGeoLat(bottom_right[0])), + ))?; } if !(-180.0..=180.0).contains(&bottom_right[1]) { - return Err(bottom_right_point[1] - .as_external_error(FilterError::BadGeoLng(bottom_right[1])))?; + return Err(bottom_right_point[1].as_external_error( + FilterError::ParseGeoError(ParseGeoError::BadGeoLng(bottom_right[1])), + ))?; } if top_left[0] < bottom_right[0] { return Err(bottom_right_point[1].as_external_error( - FilterError::BadGeoBoundingBoxTopIsBelowBottom( - top_left[0], - bottom_right[0], + FilterError::ParseGeoError( + ParseGeoError::BadGeoBoundingBoxTopIsBelowBottom( + top_left[0], + bottom_right[0], + ), ), ))?; } From 825923f6fc7b2c725dc4b932dedd23181836e65c Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Tue, 7 Feb 2023 18:58:09 +0100 Subject: [PATCH 145/186] export ParseGeoError --- milli/src/search/facet/filter.rs | 2 +- milli/src/search/facet/mod.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 548ed699e..35352c764 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -22,7 +22,7 @@ pub struct Filter<'a> { } #[derive(Debug)] -enum ParseGeoError { +pub enum ParseGeoError { BadGeo(String), BadGeoLat(f64), BadGeoLng(f64), diff --git a/milli/src/search/facet/mod.rs b/milli/src/search/facet/mod.rs index 73054b84a..0d0f96851 100644 --- a/milli/src/search/facet/mod.rs +++ b/milli/src/search/facet/mod.rs @@ -4,7 +4,7 @@ use heed::types::{ByteSlice, DecodeIgnore}; use heed::{BytesDecode, RoTxn}; pub use self::facet_distribution::{FacetDistribution, DEFAULT_VALUES_PER_FACET}; -pub use self::filter::Filter; +pub use self::filter::{Filter, ParseGeoError}; use crate::heed_codec::facet::{FacetGroupKeyCodec, FacetGroupValueCodec}; use crate::heed_codec::ByteSliceRefCodec; mod facet_distribution; From 4c910376020a336f0bfda26587f7c9911671e47b Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Tue, 7 Feb 2023 19:15:06 +0100 Subject: [PATCH 146/186] use ParseGeoError in sort parser --- milli/src/asc_desc.rs | 41 +++++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index ebb28c27d..e821a847e 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -7,14 +7,15 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use crate::error::is_reserved_keyword; +use crate::search::facet::ParseGeoError; use crate::{CriterionError, Error, UserError}; /// This error type is never supposed to be shown to the end user. /// You must always cast it to a sort error or a criterion error. #[derive(Debug)] pub enum AscDescError { - InvalidLatitude, - InvalidLongitude, + InvalidLatitude(ParseGeoError), + InvalidLongitude(ParseGeoError), InvalidSyntax { name: String }, ReservedKeyword { name: String }, } @@ -22,11 +23,11 @@ pub enum AscDescError { impl fmt::Display for AscDescError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Self::InvalidLatitude => { - write!(f, "Latitude must be contained between -90 and 90 degrees.",) + Self::InvalidLatitude(error) => { + write!(f, "{error}",) } - Self::InvalidLongitude => { - write!(f, "Longitude must be contained between -180 and 180 degrees.",) + Self::InvalidLongitude(error) => { + write!(f, "{error}",) } Self::InvalidSyntax { name } => { write!(f, "Invalid syntax for the asc/desc parameter: expected expression ending by `:asc` or `:desc`, found `{}`.", name) @@ -45,7 +46,7 @@ impl fmt::Display for AscDescError { impl From for CriterionError { fn from(error: AscDescError) -> Self { match error { - AscDescError::InvalidLatitude | AscDescError::InvalidLongitude => { + AscDescError::InvalidLatitude(_) | AscDescError::InvalidLongitude(_) => { CriterionError::ReservedNameForSort { name: "_geoPoint".to_string() } } AscDescError::InvalidSyntax { name } => CriterionError::InvalidName { name }, @@ -85,9 +86,9 @@ impl FromStr for Member { .map_err(|_| AscDescError::ReservedKeyword { name: text.to_string() }) })?; if !(-90.0..=90.0).contains(&lat) { - return Err(AscDescError::InvalidLatitude)?; + return Err(AscDescError::InvalidLatitude(ParseGeoError::BadGeoLat(lat)))?; } else if !(-180.0..=180.0).contains(&lng) { - return Err(AscDescError::InvalidLongitude)?; + return Err(AscDescError::InvalidLongitude(ParseGeoError::BadGeoLng(lng)))?; } Ok(Member::Geo([lat, lng])) } @@ -162,10 +163,10 @@ impl FromStr for AscDesc { #[derive(Error, Debug)] pub enum SortError { - #[error("{}", AscDescError::InvalidLatitude)] - InvalidLatitude, - #[error("{}", AscDescError::InvalidLongitude)] - InvalidLongitude, + #[error("{}", error)] + InvalidLatitude { error: ParseGeoError }, + #[error("{}", error)] + InvalidLongitude { error: ParseGeoError }, #[error("Invalid syntax for the geo parameter: expected expression formated like \ `_geoPoint(latitude, longitude)` and ending by `:asc` or `:desc`, found `{name}`.")] BadGeoPointUsage { name: String }, @@ -184,8 +185,8 @@ pub enum SortError { impl From for SortError { fn from(error: AscDescError) -> Self { match error { - AscDescError::InvalidLatitude => SortError::InvalidLatitude, - AscDescError::InvalidLongitude => SortError::InvalidLongitude, + AscDescError::InvalidLatitude(error) => SortError::InvalidLatitude { error }, + AscDescError::InvalidLongitude(error) => SortError::InvalidLongitude { error }, AscDescError::InvalidSyntax { name } => SortError::InvalidName { name }, AscDescError::ReservedKeyword { name } if name.starts_with("_geoPoint") => { SortError::BadGeoPointUsage { name } @@ -277,11 +278,11 @@ mod tests { ), ("_geoPoint(35, 85, 75):asc", ReservedKeyword { name: S("_geoPoint(35, 85, 75)") }), ("_geoPoint(18):asc", ReservedKeyword { name: S("_geoPoint(18)") }), - ("_geoPoint(200, 200):asc", InvalidLatitude), - ("_geoPoint(90.000001, 0):asc", InvalidLatitude), - ("_geoPoint(0, -180.000001):desc", InvalidLongitude), - ("_geoPoint(159.256, 130):asc", InvalidLatitude), - ("_geoPoint(12, -2021):desc", InvalidLongitude), + ("_geoPoint(200, 200):asc", InvalidLatitude(ParseGeoError::BadGeoLat(200.))), + ("_geoPoint(90.000001, 0):asc", InvalidLatitude(ParseGeoError::BadGeoLat(90.000001))), + ("_geoPoint(0, -180.000001):desc", InvalidLongitude(ParseGeoError::BadGeoLng(-180.000001))), + ("_geoPoint(159.256, 130):asc", InvalidLatitude(ParseGeoError::BadGeoLat(159.256))), + ("_geoPoint(12, -2021):desc", InvalidLongitude(ParseGeoError::BadGeoLng(-2021.))), ]; for (req, expected_error) in invalid_req { From 83c765ce6cdf37a120fe0cccbca8553aa4c3e50b Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Thu, 9 Feb 2023 16:35:15 +0100 Subject: [PATCH 147/186] implement From for FilterError --- milli/src/search/facet/filter.rs | 60 ++++++++++++++++---------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 35352c764..2740669b7 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -51,6 +51,12 @@ enum FilterError<'a> { } impl<'a> std::error::Error for FilterError<'a> {} +impl<'a> From for FilterError<'a> { + fn from(geo_error: ParseGeoError) -> Self { + FilterError::ParseGeoError(geo_error) + } +} + impl<'a> Display for FilterError<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -317,13 +323,13 @@ impl<'a> Filter<'a> { } } else { match fid.value() { - attribute @ "_geo" => Err(fid.as_external_error( - FilterError::ParseGeoError(ParseGeoError::BadGeo(attribute.to_owned())), - ))?, - attribute if attribute.starts_with("_geoPoint(") => Err(fid - .as_external_error(FilterError::ParseGeoError( - ParseGeoError::BadGeo("_geoPoint".to_owned()), - )))?, + attribute @ "_geo" => { + Err(fid.as_external_error(ParseGeoError::BadGeo(attribute.to_owned())))? + } + attribute if attribute.starts_with("_geoPoint(") => { + Err(fid + .as_external_error(ParseGeoError::BadGeo("_geoPoint".to_owned())))? + } attribute @ "_geoDistance" => { Err(fid.as_external_error(FilterError::Reserved(attribute)))? } @@ -374,14 +380,14 @@ impl<'a> Filter<'a> { let base_point: [f64; 2] = [point[0].parse_finite_float()?, point[1].parse_finite_float()?]; if !(-90.0..=90.0).contains(&base_point[0]) { - return Err(point[0].as_external_error(FilterError::ParseGeoError( - ParseGeoError::BadGeoLat(base_point[0]), - )))?; + return Err( + point[0].as_external_error(ParseGeoError::BadGeoLat(base_point[0])) + )?; } if !(-180.0..=180.0).contains(&base_point[1]) { - return Err(point[1].as_external_error(FilterError::ParseGeoError( - ParseGeoError::BadGeoLng(base_point[1]), - )))?; + return Err( + point[1].as_external_error(ParseGeoError::BadGeoLng(base_point[1])) + )?; } let radius = radius.parse_finite_float()?; let rtree = match index.geo_rtree(rtxn)? { @@ -419,32 +425,26 @@ impl<'a> Filter<'a> { bottom_right_point[1].parse_finite_float()?, ]; if !(-90.0..=90.0).contains(&top_left[0]) { - return Err(top_left_point[0].as_external_error( - FilterError::ParseGeoError(ParseGeoError::BadGeoLat(top_left[0])), - ))?; + return Err(top_left_point[0] + .as_external_error(ParseGeoError::BadGeoLat(top_left[0])))?; } if !(-180.0..=180.0).contains(&top_left[1]) { - return Err(top_left_point[1].as_external_error( - FilterError::ParseGeoError(ParseGeoError::BadGeoLng(top_left[1])), - ))?; + return Err(top_left_point[1] + .as_external_error(ParseGeoError::BadGeoLng(top_left[1])))?; } if !(-90.0..=90.0).contains(&bottom_right[0]) { - return Err(bottom_right_point[0].as_external_error( - FilterError::ParseGeoError(ParseGeoError::BadGeoLat(bottom_right[0])), - ))?; + return Err(bottom_right_point[0] + .as_external_error(ParseGeoError::BadGeoLat(bottom_right[0])))?; } if !(-180.0..=180.0).contains(&bottom_right[1]) { - return Err(bottom_right_point[1].as_external_error( - FilterError::ParseGeoError(ParseGeoError::BadGeoLng(bottom_right[1])), - ))?; + return Err(bottom_right_point[1] + .as_external_error(ParseGeoError::BadGeoLng(bottom_right[1])))?; } if top_left[0] < bottom_right[0] { return Err(bottom_right_point[1].as_external_error( - FilterError::ParseGeoError( - ParseGeoError::BadGeoBoundingBoxTopIsBelowBottom( - top_left[0], - bottom_right[0], - ), + ParseGeoError::BadGeoBoundingBoxTopIsBelowBottom( + top_left[0], + bottom_right[0], ), ))?; } From 7481559e8b1ee969323802d422d95f1c01d0db30 Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Thu, 9 Feb 2023 17:37:18 +0100 Subject: [PATCH 148/186] move BadGeo to FilterError --- milli/src/search/facet/filter.rs | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 2740669b7..3e95909a0 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -23,7 +23,6 @@ pub struct Filter<'a> { #[derive(Debug)] pub enum ParseGeoError { - BadGeo(String), BadGeoLat(f64), BadGeoLng(f64), BadGeoBoundingBoxTopIsBelowBottom(f64, f64), @@ -34,10 +33,19 @@ impl std::error::Error for ParseGeoError {} impl Display for ParseGeoError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::BadGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword), - Self::BadGeoBoundingBoxTopIsBelowBottom(top, bottom) => write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`."), - Self::BadGeoLat(lat) => write!(f, "Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ", lat), - Self::BadGeoLng(lng) => write!(f, "Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ", lng), + Self::BadGeoBoundingBoxTopIsBelowBottom(top, bottom) => { + write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`.") + } + Self::BadGeoLat(lat) => write!( + f, + "Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ", + lat + ), + Self::BadGeoLng(lng) => write!( + f, + "Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ", + lng + ), } } } @@ -46,6 +54,7 @@ impl Display for ParseGeoError { enum FilterError<'a> { AttributeNotFilterable { attribute: &'a str, filterable_fields: HashSet }, ParseGeoError(ParseGeoError), + ReservedGeo(&'a str), Reserved(&'a str), TooDeep, } @@ -87,6 +96,7 @@ impl<'a> Display for FilterError<'a> { "Too many filter conditions, can't process more than {} filters.", MAX_FILTER_DEPTH ), + Self::ReservedGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword), Self::Reserved(keyword) => write!( f, "`{}` is a reserved keyword and thus can't be used as a filter expression.", @@ -324,11 +334,10 @@ impl<'a> Filter<'a> { } else { match fid.value() { attribute @ "_geo" => { - Err(fid.as_external_error(ParseGeoError::BadGeo(attribute.to_owned())))? + Err(fid.as_external_error(FilterError::ReservedGeo(attribute)))? } attribute if attribute.starts_with("_geoPoint(") => { - Err(fid - .as_external_error(ParseGeoError::BadGeo("_geoPoint".to_owned())))? + Err(fid.as_external_error(FilterError::ReservedGeo("_geoPoint")))? } attribute @ "_geoDistance" => { Err(fid.as_external_error(FilterError::Reserved(attribute)))? From c0b77773bad9547a76a53e8b1e659c600158b223 Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Thu, 9 Feb 2023 17:41:36 +0100 Subject: [PATCH 149/186] fmt asc_desc --- milli/src/asc_desc.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index e821a847e..fc29367ec 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -280,7 +280,10 @@ mod tests { ("_geoPoint(18):asc", ReservedKeyword { name: S("_geoPoint(18)") }), ("_geoPoint(200, 200):asc", InvalidLatitude(ParseGeoError::BadGeoLat(200.))), ("_geoPoint(90.000001, 0):asc", InvalidLatitude(ParseGeoError::BadGeoLat(90.000001))), - ("_geoPoint(0, -180.000001):desc", InvalidLongitude(ParseGeoError::BadGeoLng(-180.000001))), + ( + "_geoPoint(0, -180.000001):desc", + InvalidLongitude(ParseGeoError::BadGeoLng(-180.000001)), + ), ("_geoPoint(159.256, 130):asc", InvalidLatitude(ParseGeoError::BadGeoLat(159.256))), ("_geoPoint(12, -2021):desc", InvalidLongitude(ParseGeoError::BadGeoLng(-2021.))), ]; From c810af3ebfa1be46efd6eb0e866cbe9088e8b064 Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Thu, 9 Feb 2023 18:33:41 +0100 Subject: [PATCH 150/186] implement From for AscDescError --- milli/src/asc_desc.rs | 40 ++++++++++++++++++---------------------- 1 file changed, 18 insertions(+), 22 deletions(-) diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index fc29367ec..ecac4194a 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -14,19 +14,21 @@ use crate::{CriterionError, Error, UserError}; /// You must always cast it to a sort error or a criterion error. #[derive(Debug)] pub enum AscDescError { - InvalidLatitude(ParseGeoError), - InvalidLongitude(ParseGeoError), + GeoError(ParseGeoError), InvalidSyntax { name: String }, ReservedKeyword { name: String }, } +impl From for AscDescError { + fn from(geo_error: ParseGeoError) -> Self { + AscDescError::GeoError(geo_error) + } +} + impl fmt::Display for AscDescError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Self::InvalidLatitude(error) => { - write!(f, "{error}",) - } - Self::InvalidLongitude(error) => { + Self::GeoError(error) => { write!(f, "{error}",) } Self::InvalidSyntax { name } => { @@ -46,7 +48,7 @@ impl fmt::Display for AscDescError { impl From for CriterionError { fn from(error: AscDescError) -> Self { match error { - AscDescError::InvalidLatitude(_) | AscDescError::InvalidLongitude(_) => { + AscDescError::GeoError(_) => { CriterionError::ReservedNameForSort { name: "_geoPoint".to_string() } } AscDescError::InvalidSyntax { name } => CriterionError::InvalidName { name }, @@ -86,9 +88,9 @@ impl FromStr for Member { .map_err(|_| AscDescError::ReservedKeyword { name: text.to_string() }) })?; if !(-90.0..=90.0).contains(&lat) { - return Err(AscDescError::InvalidLatitude(ParseGeoError::BadGeoLat(lat)))?; + return Err(ParseGeoError::BadGeoLat(lat))?; } else if !(-180.0..=180.0).contains(&lng) { - return Err(AscDescError::InvalidLongitude(ParseGeoError::BadGeoLng(lng)))?; + return Err(ParseGeoError::BadGeoLng(lng))?; } Ok(Member::Geo([lat, lng])) } @@ -164,9 +166,7 @@ impl FromStr for AscDesc { #[derive(Error, Debug)] pub enum SortError { #[error("{}", error)] - InvalidLatitude { error: ParseGeoError }, - #[error("{}", error)] - InvalidLongitude { error: ParseGeoError }, + ParseGeoError { error: ParseGeoError }, #[error("Invalid syntax for the geo parameter: expected expression formated like \ `_geoPoint(latitude, longitude)` and ending by `:asc` or `:desc`, found `{name}`.")] BadGeoPointUsage { name: String }, @@ -185,8 +185,7 @@ pub enum SortError { impl From for SortError { fn from(error: AscDescError) -> Self { match error { - AscDescError::InvalidLatitude(error) => SortError::InvalidLatitude { error }, - AscDescError::InvalidLongitude(error) => SortError::InvalidLongitude { error }, + AscDescError::GeoError(error) => SortError::ParseGeoError { error }, AscDescError::InvalidSyntax { name } => SortError::InvalidName { name }, AscDescError::ReservedKeyword { name } if name.starts_with("_geoPoint") => { SortError::BadGeoPointUsage { name } @@ -278,14 +277,11 @@ mod tests { ), ("_geoPoint(35, 85, 75):asc", ReservedKeyword { name: S("_geoPoint(35, 85, 75)") }), ("_geoPoint(18):asc", ReservedKeyword { name: S("_geoPoint(18)") }), - ("_geoPoint(200, 200):asc", InvalidLatitude(ParseGeoError::BadGeoLat(200.))), - ("_geoPoint(90.000001, 0):asc", InvalidLatitude(ParseGeoError::BadGeoLat(90.000001))), - ( - "_geoPoint(0, -180.000001):desc", - InvalidLongitude(ParseGeoError::BadGeoLng(-180.000001)), - ), - ("_geoPoint(159.256, 130):asc", InvalidLatitude(ParseGeoError::BadGeoLat(159.256))), - ("_geoPoint(12, -2021):desc", InvalidLongitude(ParseGeoError::BadGeoLng(-2021.))), + ("_geoPoint(200, 200):asc", GeoError(ParseGeoError::BadGeoLat(200.))), + ("_geoPoint(90.000001, 0):asc", GeoError(ParseGeoError::BadGeoLat(90.000001))), + ("_geoPoint(0, -180.000001):desc", GeoError(ParseGeoError::BadGeoLng(-180.000001))), + ("_geoPoint(159.256, 130):asc", GeoError(ParseGeoError::BadGeoLat(159.256))), + ("_geoPoint(12, -2021):desc", GeoError(ParseGeoError::BadGeoLng(-2021.))), ]; for (req, expected_error) in invalid_req { From 7f25007d31ab3a87afcae0984c5e605d79f54022 Mon Sep 17 00:00:00 2001 From: filip Date: Mon, 13 Feb 2023 18:02:33 +0100 Subject: [PATCH 151/186] Update milli/src/asc_desc.rs Co-authored-by: Tamo --- milli/src/asc_desc.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index ecac4194a..3d6ec2d24 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -165,7 +165,7 @@ impl FromStr for AscDesc { #[derive(Error, Debug)] pub enum SortError { - #[error("{}", error)] + #[error(transparent)] ParseGeoError { error: ParseGeoError }, #[error("Invalid syntax for the geo parameter: expected expression formated like \ `_geoPoint(latitude, longitude)` and ending by `:asc` or `:desc`, found `{name}`.")] From 849de089d2efca18aa2efb8f0dcdc440bc0f6f36 Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Tue, 14 Feb 2023 00:08:42 +0100 Subject: [PATCH 152/186] add thiserror for AscDescError --- milli/src/asc_desc.rs | 24 ++++-------------------- 1 file changed, 4 insertions(+), 20 deletions(-) diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index 3d6ec2d24..f8595a0ee 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -12,10 +12,13 @@ use crate::{CriterionError, Error, UserError}; /// This error type is never supposed to be shown to the end user. /// You must always cast it to a sort error or a criterion error. -#[derive(Debug)] +#[derive(Error, Debug)] pub enum AscDescError { + #[error(transparent)] GeoError(ParseGeoError), + #[error("Invalid syntax for the asc/desc parameter: expected expression ending by `:asc` or `:desc`, found `{name}`.")] InvalidSyntax { name: String }, + #[error("`{name}` is a reserved keyword and thus can't be used as a asc/desc rule.")] ReservedKeyword { name: String }, } @@ -25,25 +28,6 @@ impl From for AscDescError { } } -impl fmt::Display for AscDescError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Self::GeoError(error) => { - write!(f, "{error}",) - } - Self::InvalidSyntax { name } => { - write!(f, "Invalid syntax for the asc/desc parameter: expected expression ending by `:asc` or `:desc`, found `{}`.", name) - } - Self::ReservedKeyword { name } => { - write!( - f, - "`{}` is a reserved keyword and thus can't be used as a asc/desc rule.", - name - ) - } - } - } -} impl From for CriterionError { fn from(error: AscDescError) -> Self { From d7ad39ad778e8299c9255277d4c13d83cc6078cc Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Tue, 14 Feb 2023 00:14:17 +0100 Subject: [PATCH 153/186] fix: clippy error --- milli/src/asc_desc.rs | 24 ++++++++++---------- milli/src/search/facet/filter.rs | 38 ++++++++++++++++---------------- milli/src/search/facet/mod.rs | 2 +- 3 files changed, 32 insertions(+), 32 deletions(-) diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index f8595a0ee..2e3892707 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use crate::error::is_reserved_keyword; -use crate::search::facet::ParseGeoError; +use crate::search::facet::BadGeoError; use crate::{CriterionError, Error, UserError}; /// This error type is never supposed to be shown to the end user. @@ -15,15 +15,15 @@ use crate::{CriterionError, Error, UserError}; #[derive(Error, Debug)] pub enum AscDescError { #[error(transparent)] - GeoError(ParseGeoError), + GeoError(BadGeoError), #[error("Invalid syntax for the asc/desc parameter: expected expression ending by `:asc` or `:desc`, found `{name}`.")] InvalidSyntax { name: String }, #[error("`{name}` is a reserved keyword and thus can't be used as a asc/desc rule.")] ReservedKeyword { name: String }, } -impl From for AscDescError { - fn from(geo_error: ParseGeoError) -> Self { +impl From for AscDescError { + fn from(geo_error: BadGeoError) -> Self { AscDescError::GeoError(geo_error) } } @@ -72,9 +72,9 @@ impl FromStr for Member { .map_err(|_| AscDescError::ReservedKeyword { name: text.to_string() }) })?; if !(-90.0..=90.0).contains(&lat) { - return Err(ParseGeoError::BadGeoLat(lat))?; + return Err(BadGeoError::Lat(lat))?; } else if !(-180.0..=180.0).contains(&lng) { - return Err(ParseGeoError::BadGeoLng(lng))?; + return Err(BadGeoError::Lng(lng))?; } Ok(Member::Geo([lat, lng])) } @@ -150,7 +150,7 @@ impl FromStr for AscDesc { #[derive(Error, Debug)] pub enum SortError { #[error(transparent)] - ParseGeoError { error: ParseGeoError }, + ParseGeoError { error: BadGeoError }, #[error("Invalid syntax for the geo parameter: expected expression formated like \ `_geoPoint(latitude, longitude)` and ending by `:asc` or `:desc`, found `{name}`.")] BadGeoPointUsage { name: String }, @@ -261,11 +261,11 @@ mod tests { ), ("_geoPoint(35, 85, 75):asc", ReservedKeyword { name: S("_geoPoint(35, 85, 75)") }), ("_geoPoint(18):asc", ReservedKeyword { name: S("_geoPoint(18)") }), - ("_geoPoint(200, 200):asc", GeoError(ParseGeoError::BadGeoLat(200.))), - ("_geoPoint(90.000001, 0):asc", GeoError(ParseGeoError::BadGeoLat(90.000001))), - ("_geoPoint(0, -180.000001):desc", GeoError(ParseGeoError::BadGeoLng(-180.000001))), - ("_geoPoint(159.256, 130):asc", GeoError(ParseGeoError::BadGeoLat(159.256))), - ("_geoPoint(12, -2021):desc", GeoError(ParseGeoError::BadGeoLng(-2021.))), + ("_geoPoint(200, 200):asc", GeoError(BadGeoError::Lat(200.))), + ("_geoPoint(90.000001, 0):asc", GeoError(BadGeoError::Lat(90.000001))), + ("_geoPoint(0, -180.000001):desc", GeoError(BadGeoError::Lng(-180.000001))), + ("_geoPoint(159.256, 130):asc", GeoError(BadGeoError::Lat(159.256))), + ("_geoPoint(12, -2021):desc", GeoError(BadGeoError::Lng(-2021.))), ]; for (req, expected_error) in invalid_req { diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 3e95909a0..585d59a29 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -22,26 +22,26 @@ pub struct Filter<'a> { } #[derive(Debug)] -pub enum ParseGeoError { - BadGeoLat(f64), - BadGeoLng(f64), - BadGeoBoundingBoxTopIsBelowBottom(f64, f64), +pub enum BadGeoError { + Lat(f64), + Lng(f64), + BoundingBoxTopIsBelowBottom(f64, f64), } -impl std::error::Error for ParseGeoError {} +impl std::error::Error for BadGeoError {} -impl Display for ParseGeoError { +impl Display for BadGeoError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::BadGeoBoundingBoxTopIsBelowBottom(top, bottom) => { + Self::BoundingBoxTopIsBelowBottom(top, bottom) => { write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`.") } - Self::BadGeoLat(lat) => write!( + Self::Lat(lat) => write!( f, "Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ", lat ), - Self::BadGeoLng(lng) => write!( + Self::Lng(lng) => write!( f, "Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ", lng @@ -53,15 +53,15 @@ impl Display for ParseGeoError { #[derive(Debug)] enum FilterError<'a> { AttributeNotFilterable { attribute: &'a str, filterable_fields: HashSet }, - ParseGeoError(ParseGeoError), + ParseGeoError(BadGeoError), ReservedGeo(&'a str), Reserved(&'a str), TooDeep, } impl<'a> std::error::Error for FilterError<'a> {} -impl<'a> From for FilterError<'a> { - fn from(geo_error: ParseGeoError) -> Self { +impl<'a> From for FilterError<'a> { + fn from(geo_error: BadGeoError) -> Self { FilterError::ParseGeoError(geo_error) } } @@ -390,12 +390,12 @@ impl<'a> Filter<'a> { [point[0].parse_finite_float()?, point[1].parse_finite_float()?]; if !(-90.0..=90.0).contains(&base_point[0]) { return Err( - point[0].as_external_error(ParseGeoError::BadGeoLat(base_point[0])) + point[0].as_external_error(BadGeoError::Lat(base_point[0])) )?; } if !(-180.0..=180.0).contains(&base_point[1]) { return Err( - point[1].as_external_error(ParseGeoError::BadGeoLng(base_point[1])) + point[1].as_external_error(BadGeoError::Lng(base_point[1])) )?; } let radius = radius.parse_finite_float()?; @@ -435,23 +435,23 @@ impl<'a> Filter<'a> { ]; if !(-90.0..=90.0).contains(&top_left[0]) { return Err(top_left_point[0] - .as_external_error(ParseGeoError::BadGeoLat(top_left[0])))?; + .as_external_error(BadGeoError::Lat(top_left[0])))?; } if !(-180.0..=180.0).contains(&top_left[1]) { return Err(top_left_point[1] - .as_external_error(ParseGeoError::BadGeoLng(top_left[1])))?; + .as_external_error(BadGeoError::Lng(top_left[1])))?; } if !(-90.0..=90.0).contains(&bottom_right[0]) { return Err(bottom_right_point[0] - .as_external_error(ParseGeoError::BadGeoLat(bottom_right[0])))?; + .as_external_error(BadGeoError::Lat(bottom_right[0])))?; } if !(-180.0..=180.0).contains(&bottom_right[1]) { return Err(bottom_right_point[1] - .as_external_error(ParseGeoError::BadGeoLng(bottom_right[1])))?; + .as_external_error(BadGeoError::Lng(bottom_right[1])))?; } if top_left[0] < bottom_right[0] { return Err(bottom_right_point[1].as_external_error( - ParseGeoError::BadGeoBoundingBoxTopIsBelowBottom( + BadGeoError::BoundingBoxTopIsBelowBottom( top_left[0], bottom_right[0], ), diff --git a/milli/src/search/facet/mod.rs b/milli/src/search/facet/mod.rs index 0d0f96851..fb93ae00b 100644 --- a/milli/src/search/facet/mod.rs +++ b/milli/src/search/facet/mod.rs @@ -4,7 +4,7 @@ use heed::types::{ByteSlice, DecodeIgnore}; use heed::{BytesDecode, RoTxn}; pub use self::facet_distribution::{FacetDistribution, DEFAULT_VALUES_PER_FACET}; -pub use self::filter::{Filter, ParseGeoError}; +pub use self::filter::{Filter, BadGeoError}; use crate::heed_codec::facet::{FacetGroupKeyCodec, FacetGroupValueCodec}; use crate::heed_codec::ByteSliceRefCodec; mod facet_distribution; From b095325bf8cbde54139210b51009de33a6471c87 Mon Sep 17 00:00:00 2001 From: Kebron Date: Wed, 15 Feb 2023 00:23:02 +0900 Subject: [PATCH 154/186] Add tests with rust nightly in CI --- .github/workflows/rust.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 1752739bc..5f783ca9e 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -2,6 +2,9 @@ name: Rust on: workflow_dispatch: + schedule: + # Everyday at 5:00am + - cron: '0 5 * * *' pull_request: push: # trying and staging branches are for Bors config @@ -27,10 +30,18 @@ jobs: run: | apt-get update && apt-get install -y curl apt-get install build-essential -y - - uses: actions-rs/toolchain@v1 + - name: Run test with Rust stable + if: github.event_name != 'schedule' + uses: actions-rs/toolchain@v1 with: toolchain: stable override: true + - name: Run test with Rust nightly + if: github.event_name == 'schedule' + uses: actions-rs/toolchain@v1 + with: + toolchain: nightly + override: true # Disable cache due to disk space issues with Windows workers in CI # - name: Cache dependencies # uses: Swatinem/rust-cache@v2.2.0 From a53536836b79fd3b6e8ba9b1301bb46177a27cd4 Mon Sep 17 00:00:00 2001 From: Filip Bachul Date: Tue, 14 Feb 2023 17:03:44 +0100 Subject: [PATCH 155/186] fmt --- milli/src/asc_desc.rs | 1 - milli/src/search/facet/filter.rs | 23 +++++++++-------------- milli/src/search/facet/mod.rs | 2 +- 3 files changed, 10 insertions(+), 16 deletions(-) diff --git a/milli/src/asc_desc.rs b/milli/src/asc_desc.rs index 2e3892707..bbc49ea7d 100644 --- a/milli/src/asc_desc.rs +++ b/milli/src/asc_desc.rs @@ -28,7 +28,6 @@ impl From for AscDescError { } } - impl From for CriterionError { fn from(error: AscDescError) -> Self { match error { diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 585d59a29..a4ac53950 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -389,14 +389,10 @@ impl<'a> Filter<'a> { let base_point: [f64; 2] = [point[0].parse_finite_float()?, point[1].parse_finite_float()?]; if !(-90.0..=90.0).contains(&base_point[0]) { - return Err( - point[0].as_external_error(BadGeoError::Lat(base_point[0])) - )?; + return Err(point[0].as_external_error(BadGeoError::Lat(base_point[0])))?; } if !(-180.0..=180.0).contains(&base_point[1]) { - return Err( - point[1].as_external_error(BadGeoError::Lng(base_point[1])) - )?; + return Err(point[1].as_external_error(BadGeoError::Lng(base_point[1])))?; } let radius = radius.parse_finite_float()?; let rtree = match index.geo_rtree(rtxn)? { @@ -434,12 +430,14 @@ impl<'a> Filter<'a> { bottom_right_point[1].parse_finite_float()?, ]; if !(-90.0..=90.0).contains(&top_left[0]) { - return Err(top_left_point[0] - .as_external_error(BadGeoError::Lat(top_left[0])))?; + return Err( + top_left_point[0].as_external_error(BadGeoError::Lat(top_left[0])) + )?; } if !(-180.0..=180.0).contains(&top_left[1]) { - return Err(top_left_point[1] - .as_external_error(BadGeoError::Lng(top_left[1])))?; + return Err( + top_left_point[1].as_external_error(BadGeoError::Lng(top_left[1])) + )?; } if !(-90.0..=90.0).contains(&bottom_right[0]) { return Err(bottom_right_point[0] @@ -451,10 +449,7 @@ impl<'a> Filter<'a> { } if top_left[0] < bottom_right[0] { return Err(bottom_right_point[1].as_external_error( - BadGeoError::BoundingBoxTopIsBelowBottom( - top_left[0], - bottom_right[0], - ), + BadGeoError::BoundingBoxTopIsBelowBottom(top_left[0], bottom_right[0]), ))?; } diff --git a/milli/src/search/facet/mod.rs b/milli/src/search/facet/mod.rs index fb93ae00b..c88d4e9e7 100644 --- a/milli/src/search/facet/mod.rs +++ b/milli/src/search/facet/mod.rs @@ -4,7 +4,7 @@ use heed::types::{ByteSlice, DecodeIgnore}; use heed::{BytesDecode, RoTxn}; pub use self::facet_distribution::{FacetDistribution, DEFAULT_VALUES_PER_FACET}; -pub use self::filter::{Filter, BadGeoError}; +pub use self::filter::{BadGeoError, Filter}; use crate::heed_codec::facet::{FacetGroupKeyCodec, FacetGroupValueCodec}; use crate::heed_codec::ByteSliceRefCodec; mod facet_distribution; From 7f3ae40204aa861440349ec3d95f48a207ab2289 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Tue, 14 Feb 2023 17:09:20 +0100 Subject: [PATCH 156/186] Remove a useless comment regarding the index pattern error code --- meilisearch-types/src/index_uid_pattern.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/meilisearch-types/src/index_uid_pattern.rs b/meilisearch-types/src/index_uid_pattern.rs index 9f49c06ea..99537b22f 100644 --- a/meilisearch-types/src/index_uid_pattern.rs +++ b/meilisearch-types/src/index_uid_pattern.rs @@ -119,7 +119,6 @@ impl Error for IndexUidPatternFormatError {} impl ErrorCode for IndexUidPatternFormatError { fn error_code(&self) -> Code { - // TODO should I return a new error code? Code::InvalidIndexUid } } From 29d14bed90ef6d744c0d7b3407dee6763e75f769 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 14 Feb 2023 17:45:46 +0100 Subject: [PATCH 157/186] get rids of the let/else syntax --- index-scheduler/src/batch.rs | 26 ++++++++++++++++++++------ index-scheduler/src/utils.rs | 14 ++++++++++---- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index 7ca3bfb20..23fb5a46b 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -1060,9 +1060,17 @@ impl IndexScheduler { let (new_builder, user_result) = builder.add_documents(reader)?; builder = new_builder; - let Some(Details::DocumentAdditionOrUpdate { received_documents, .. }) = task.details - // In the case of a `documentAdditionOrUpdate` the details MUST be set - else { unreachable!(); }; + let received_documents = + if let Some(Details::DocumentAdditionOrUpdate { + received_documents, + .. + }) = task.details + { + received_documents + } else { + // In the case of a `documentAdditionOrUpdate` the details MUST be set + unreachable!(); + }; match user_result { Ok(count) => { @@ -1087,9 +1095,15 @@ impl IndexScheduler { builder.remove_documents(document_ids)?; builder = new_builder; - let Some(Details::DocumentDeletion { provided_ids, .. }) = task.details - // In the case of a `documentAdditionOrUpdate` the details MUST be set - else { unreachable!(); }; + let provided_ids = + if let Some(Details::DocumentDeletion { provided_ids, .. }) = + task.details + { + provided_ids + } else { + // In the case of a `documentAdditionOrUpdate` the details MUST be set + unreachable!(); + }; match user_result { Ok(count) => { diff --git a/index-scheduler/src/utils.rs b/index-scheduler/src/utils.rs index f68371767..acb520513 100644 --- a/index-scheduler/src/utils.rs +++ b/index-scheduler/src/utils.rs @@ -440,10 +440,16 @@ impl IndexScheduler { deleted_documents, } => { assert_eq!(kind.as_kind(), Kind::DocumentDeletion); - let KindWithContent::DocumentDeletion { - ref index_uid, - ref documents_ids, - } = kind else { unreachable!() }; + let (index_uid, documents_ids) = + if let KindWithContent::DocumentDeletion { + ref index_uid, + ref documents_ids, + } = kind + { + (index_uid, documents_ids) + } else { + unreachable!() + }; assert_eq!(&task_index_uid.unwrap(), index_uid); match status { From 43a19d07094a9b5c81afab0cb2b3026dadc1a66d Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 14 Feb 2023 17:55:26 +0100 Subject: [PATCH 158/186] document the operation enum + the grenads --- milli/src/update/index_documents/transform.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 73c734aaa..05e7089b7 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -50,8 +50,12 @@ pub struct Transform<'a, 'i> { pub index_documents_method: IndexDocumentsMethod, available_documents_ids: AvailableDocumentsIds, + // Both grenad follows the same format: + // key | value + // u32 | 1 byte for the Operation byte, the rest is the obkv of the document stored original_sorter: grenad::Sorter, flattened_sorter: grenad::Sorter, + replaced_documents_ids: RoaringBitmap, new_documents_ids: RoaringBitmap, // To increase the cache locality and decrease the heap usage we use compact smartstring. @@ -59,6 +63,8 @@ pub struct Transform<'a, 'i> { documents_count: usize, } +/// This enum is specific to the grenad sorter stored in the transform. +/// It's used as the first byte of the grenads and tells you if the document id was an addition or a deletion. #[repr(u8)] enum Operation { Addition, From 8de3c9f737a092b8aaf7b4dc6587659b11ac3747 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 14 Feb 2023 17:57:14 +0100 Subject: [PATCH 159/186] Update milli/src/update/index_documents/transform.rs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Clément Renault --- milli/src/update/index_documents/transform.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 05e7089b7..9d07dc871 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -786,9 +786,8 @@ fn merge_obkvs_and_operations<'a>(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Resul // we can ignore everything that happened before the last delete. let starting_position = obkvs .iter() - .rev() - .position(|obkv| obkv[0] == Operation::Deletion as u8) - .map_or(0, |pos| obkvs.len() - pos); + .rposition(|obkv| obkv[0] == Operation::Deletion as u8) + .unwrap_or(0); // [add, add, delete] // if the last operation was a deletion then we simply return the deletion From fb5e4957a6f0359cc045e02c826ad2ba88b6f2ac Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 14 Feb 2023 18:23:57 +0100 Subject: [PATCH 160/186] fix and test the early exit in case a grenad ends with a deletion --- milli/src/update/index_documents/transform.rs | 51 +++++++++++++++++-- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 9d07dc871..f5da9473e 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -784,14 +784,13 @@ impl<'a, 'i> Transform<'a, 'i> { fn merge_obkvs_and_operations<'a>(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Result> { // [add, add, delete, add, add] // we can ignore everything that happened before the last delete. - let starting_position = obkvs - .iter() - .rposition(|obkv| obkv[0] == Operation::Deletion as u8) - .unwrap_or(0); + let starting_position = + obkvs.iter().rposition(|obkv| obkv[0] == Operation::Deletion as u8).unwrap_or(0); // [add, add, delete] // if the last operation was a deletion then we simply return the deletion - if starting_position == obkvs.len() { + if starting_position == obkvs.len() - 1 && obkvs.last().unwrap()[0] == Operation::Deletion as u8 + { return Ok(obkvs[obkvs.len() - 1].clone()); } let mut buffer = Vec::new(); @@ -836,3 +835,45 @@ impl TransformOutput { .collect()) } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn merge_obkvs() { + let mut doc_0 = Vec::new(); + let mut kv_writer = KvWriter::new(&mut doc_0); + kv_writer.insert(0_u8, [0]).unwrap(); + kv_writer.finish().unwrap(); + doc_0.insert(0, Operation::Addition as u8); + + let ret = merge_obkvs_and_operations(&[], &[Cow::from(doc_0.as_slice())]).unwrap(); + assert_eq!(*ret, doc_0); + + let ret = merge_obkvs_and_operations( + &[], + &[Cow::from([Operation::Deletion as u8].as_slice()), Cow::from(doc_0.as_slice())], + ) + .unwrap(); + assert_eq!(*ret, doc_0); + + let ret = merge_obkvs_and_operations( + &[], + &[Cow::from(doc_0.as_slice()), Cow::from([Operation::Deletion as u8].as_slice())], + ) + .unwrap(); + assert_eq!(*ret, [Operation::Deletion as u8]); + + let ret = merge_obkvs_and_operations( + &[], + &[ + Cow::from([Operation::Addition as u8, 1].as_slice()), + Cow::from([Operation::Deletion as u8].as_slice()), + Cow::from(doc_0.as_slice()), + ], + ) + .unwrap(); + assert_eq!(*ret, doc_0); + } +} From 1b1703a6090008ce7ebff5c5d9ed5c53fc0293e6 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 14 Feb 2023 18:32:41 +0100 Subject: [PATCH 161/186] make a small optimization to merge obkvs a little bit faster --- milli/src/update/index_documents/transform.rs | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index f5da9473e..81088eb69 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -797,18 +797,19 @@ fn merge_obkvs_and_operations<'a>(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Resul // (add, add, delete) [add, add] // in the other case, no deletion will be encountered during the merge - Ok(obkvs[starting_position..] - .iter() - .cloned() - .reduce(|acc, current| { - let first = obkv::KvReader::new(&acc[1..]); + let mut ret = + obkvs[starting_position..].iter().cloned().fold(Vec::new(), |mut acc, current| { + let first = obkv::KvReader::new(&acc); let second = obkv::KvReader::new(¤t[1..]); merge_two_obkvs(first, second, &mut buffer); - // TODO: do this only once at the end - buffer.insert(0, Operation::Addition as u8); - Cow::from(buffer.clone()) - }) - .unwrap()) + + // we want the result of the merge into our accumulator + std::mem::swap(&mut acc, &mut buffer); + acc + }); + + ret.insert(0, Operation::Addition as u8); + Ok(Cow::from(ret)) } /// Drops all the value of type `U` in vec, and reuses the allocation to create a `Vec`. From 74dcfe967636deb2ccb6cfc02c9c62c294e826ee Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 14 Feb 2023 19:09:40 +0100 Subject: [PATCH 162/186] Fix a bug when you update a document that was already present in the db, deleted and then inserted again in the same transform --- milli/src/update/index_documents/mod.rs | 103 ++++++++++++++++-- milli/src/update/index_documents/transform.rs | 9 +- 2 files changed, 98 insertions(+), 14 deletions(-) diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index 11e1e5811..5e547a049 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -1908,7 +1908,9 @@ mod tests { #[test] fn add_and_delete_documents_in_single_transform() { - let index = TempIndex::new(); + let mut index = TempIndex::new(); + index.index_documents_config.update_method = IndexDocumentsMethod::UpdateDocuments; + let mut wtxn = index.write_txn().unwrap(); let builder = IndexDocuments::new( &mut wtxn, @@ -1948,7 +1950,9 @@ mod tests { #[test] fn add_update_and_delete_documents_in_single_transform() { - let index = TempIndex::new(); + let mut index = TempIndex::new(); + index.index_documents_config.update_method = IndexDocumentsMethod::UpdateDocuments; + let mut wtxn = index.write_txn().unwrap(); let builder = IndexDocuments::new( &mut wtxn, @@ -1969,8 +1973,8 @@ mod tests { insta::assert_display_snapshot!(added.unwrap(), @"3"); let documents = documents!([ - { "id": 2, "doggo": { "name": "jean", "age": 20 } }, - { "id": 3, "name": "bob", "age": 25 }, + { "id": 2, "catto": "jorts" }, + { "id": 3, "legs": 4 }, ]); let (builder, added) = builder.add_documents(documents).unwrap(); insta::assert_display_snapshot!(added.unwrap(), @"2"); @@ -1988,13 +1992,15 @@ mod tests { wtxn.commit().unwrap(); db_snap!(index, documents, @r###" - {"id":3,"name":"bob","age":25} + {"id":3,"name":"jean","age":25,"legs":4} "###); } #[test] fn add_document_and_in_another_transform_update_and_delete_documents() { - let index = TempIndex::new(); + let mut index = TempIndex::new(); + index.index_documents_config.update_method = IndexDocumentsMethod::UpdateDocuments; + let mut wtxn = index.write_txn().unwrap(); let builder = IndexDocuments::new( &mut wtxn, @@ -2043,8 +2049,8 @@ mod tests { .unwrap(); let documents = documents!([ - { "id": 2, "doggo": { "name": "jean", "age": 20 } }, - { "id": 3, "name": "bob", "age": 25 }, + { "id": 2, "catto": "jorts" }, + { "id": 3, "legs": 4 }, ]); let (builder, added) = builder.add_documents(documents).unwrap(); insta::assert_display_snapshot!(added.unwrap(), @"2"); @@ -2062,13 +2068,15 @@ mod tests { wtxn.commit().unwrap(); db_snap!(index, documents, @r###" - {"id":3,"name":"bob","age":25} + {"id":3,"name":"jean","age":25,"legs":4} "###); } #[test] fn delete_document_and_then_add_documents_in_the_same_transform() { - let index = TempIndex::new(); + let mut index = TempIndex::new(); + index.index_documents_config.update_method = IndexDocumentsMethod::UpdateDocuments; + let mut wtxn = index.write_txn().unwrap(); let builder = IndexDocuments::new( &mut wtxn, @@ -2107,7 +2115,9 @@ mod tests { #[test] fn delete_the_same_document_multiple_time() { - let index = TempIndex::new(); + let mut index = TempIndex::new(); + index.index_documents_config.update_method = IndexDocumentsMethod::UpdateDocuments; + let mut wtxn = index.write_txn().unwrap(); let builder = IndexDocuments::new( &mut wtxn, @@ -2148,4 +2158,75 @@ mod tests { {"id":3,"name":"bob","age":25} "###); } + + #[test] + fn add_document_and_in_another_transform_delete_the_document_then_add_it_again() { + let mut index = TempIndex::new(); + index.index_documents_config.update_method = IndexDocumentsMethod::UpdateDocuments; + + let mut wtxn = index.write_txn().unwrap(); + let builder = IndexDocuments::new( + &mut wtxn, + &index, + &index.indexer_config, + index.index_documents_config.clone(), + |_| (), + || false, + ) + .unwrap(); + + let documents = documents!([ + { "id": 1, "doggo": "kevin" }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"1"); + + let addition = builder.execute().unwrap(); + insta::assert_debug_snapshot!(addition, @r###" + DocumentAdditionResult { + indexed_documents: 1, + number_of_documents: 1, + } + "###); + wtxn.commit().unwrap(); + + db_snap!(index, documents, @r###" + {"id":1,"doggo":"kevin"} + "###); + + // A first batch of documents has been inserted + + let mut wtxn = index.write_txn().unwrap(); + let builder = IndexDocuments::new( + &mut wtxn, + &index, + &index.indexer_config, + index.index_documents_config.clone(), + |_| (), + || false, + ) + .unwrap(); + + let (builder, removed) = builder.remove_documents(vec![S("1")]).unwrap(); + insta::assert_display_snapshot!(removed.unwrap(), @"1"); + + let documents = documents!([ + { "id": 1, "catto": "jorts" }, + ]); + let (builder, added) = builder.add_documents(documents).unwrap(); + insta::assert_display_snapshot!(added.unwrap(), @"1"); + + let addition = builder.execute().unwrap(); + insta::assert_debug_snapshot!(addition, @r###" + DocumentAdditionResult { + indexed_documents: 1, + number_of_documents: 1, + } + "###); + wtxn.commit().unwrap(); + + db_snap!(index, documents, @r###" + {"id":1,"catto":"jorts"} + "###); + } } diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 81088eb69..0624db468 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -223,10 +223,13 @@ impl<'a, 'i> Transform<'a, 'i> { Entry::Occupied(entry) => *entry.get() as u32, Entry::Vacant(entry) => { // If the document was already in the db we mark it as a replaced document. - // It'll be deleted later. We keep its original docid to insert it in the grenad. + // It'll be deleted later. if let Some(docid) = external_documents_ids.get(entry.key()) { - self.replaced_documents_ids.insert(docid); - original_docid = Some(docid); + // If it was already in the list of replaced documents it means it was deleted + // by the remove_document method. We should starts as if it never existed. + if self.replaced_documents_ids.insert(docid) { + original_docid = Some(docid); + } } let docid = self .available_documents_ids From 8fb7b1d10f2ae21a439da5ae21aecc7612a6c2f9 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 13 Feb 2023 18:45:13 +0100 Subject: [PATCH 163/186] bump deserr --- Cargo.lock | 53 ++++++++++++++----- meilisearch-types/Cargo.toml | 2 +- .../src/deserr/error_messages.rs | 10 ++-- meilisearch-types/src/deserr/mod.rs | 9 ++-- meilisearch-types/src/deserr/query_params.rs | 4 +- meilisearch-types/src/error.rs | 8 +-- meilisearch-types/src/index_uid.rs | 6 +-- meilisearch-types/src/index_uid_pattern.rs | 6 +-- meilisearch-types/src/keys.rs | 16 +++--- meilisearch-types/src/settings.rs | 21 ++++---- meilisearch-types/src/star_or.rs | 14 +++-- meilisearch/Cargo.toml | 2 +- meilisearch/src/extractors/json.rs | 6 +-- .../src/extractors/query_parameters.rs | 6 +-- meilisearch/src/routes/api_key.rs | 4 +- meilisearch/src/routes/indexes/documents.rs | 8 +-- meilisearch/src/routes/indexes/mod.rs | 8 +-- meilisearch/src/routes/indexes/search.rs | 2 +- meilisearch/src/routes/swap_indexes.rs | 4 +- meilisearch/src/routes/tasks.rs | 36 +++++++------ meilisearch/src/search.rs | 6 +-- milli/Cargo.toml | 2 +- milli/src/update/settings.rs | 6 +-- 23 files changed, 137 insertions(+), 102 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c44891518..5aa355d73 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -36,9 +36,9 @@ dependencies = [ [[package]] name = "actix-http" -version = "3.2.2" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c83abf9903e1f0ad9973cc4f7b9767fd5a03a583f51a5b7a339e07987cd2724" +checksum = "0070905b2c4a98d184c4e81025253cb192aa8a73827553f38e9410801ceb35bb" dependencies = [ "actix-codec", "actix-rt", @@ -46,7 +46,7 @@ dependencies = [ "actix-tls", "actix-utils", "ahash", - "base64 0.13.1", + "base64 0.21.0", "bitflags", "brotli", "bytes", @@ -68,7 +68,10 @@ dependencies = [ "rand", "sha1", "smallvec", + "tokio", + "tokio-util", "tracing", + "zstd 0.12.3+zstd.1.5.2", ] [[package]] @@ -164,9 +167,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.2.1" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d48f7b6534e06c7bfc72ee91db7917d4af6afe23e7d223b51e68fffbb21e96b9" +checksum = "464e0fddc668ede5f26ec1f9557a8d44eda948732f40c6b0ad79126930eb775f" dependencies = [ "actix-codec", "actix-http", @@ -1110,20 +1113,23 @@ dependencies = [ [[package]] name = "deserr" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28380303ca15ec07e1d5b079baf19cf849b09edad5cab219c1c51b2bd07523de" +checksum = "a13eed41ca58d9dc99e2c67e1a5f50507dfa1b123cc4a942c81c49707bd347f0" dependencies = [ + "actix-web", "deserr-internal", + "futures", "serde-cs", "serde_json", + "strsim", ] [[package]] name = "deserr-internal" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "860928cd8af78d223a3d70dd581f21d7c3de8aa2eecd938e0c0a399ded7c1451" +checksum = "4d5412186d7149542b09319901d28b3c7d1f714a61d0c5d48a50560d09573ae4" dependencies = [ "convert_case 0.5.0", "proc-macro2", @@ -4423,7 +4429,7 @@ dependencies = [ "pbkdf2", "sha1", "time", - "zstd", + "zstd 0.11.2+zstd.1.5.2", ] [[package]] @@ -4432,7 +4438,16 @@ version = "0.11.2+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" dependencies = [ - "zstd-safe", + "zstd-safe 5.0.2+zstd.1.5.2", +] + +[[package]] +name = "zstd" +version = "0.12.3+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806" +dependencies = [ + "zstd-safe 6.0.4+zstd.1.5.4", ] [[package]] @@ -4446,10 +4461,20 @@ dependencies = [ ] [[package]] -name = "zstd-sys" -version = "2.0.5+zstd.1.5.2" +name = "zstd-safe" +version = "6.0.4+zstd.1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edc50ffce891ad571e9f9afe5039c4837bede781ac4bb13052ed7ae695518596" +checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.7+zstd.1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5" dependencies = [ "cc", "libc", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index f62202f76..021c44ea0 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -9,7 +9,7 @@ actix-web = { version = "4.2.1", default-features = false } anyhow = "1.0.65" convert_case = "0.6.0" csv = "1.1.6" -deserr = "0.3.0" +deserr = "0.4.0" either = { version = "1.6.1", features = ["serde"] } enum-iterator = "1.1.3" file-store = { path = "../file-store" } diff --git a/meilisearch-types/src/deserr/error_messages.rs b/meilisearch-types/src/deserr/error_messages.rs index 7e288085d..f17263813 100644 --- a/meilisearch-types/src/deserr/error_messages.rs +++ b/meilisearch-types/src/deserr/error_messages.rs @@ -6,6 +6,8 @@ We try to: 2. Use the correct terms depending on the format of the request (json/query param) 3. Categorise the type of the error (e.g. missing field, wrong value type, unexpected error, etc.) */ +use std::ops::ControlFlow; + use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef}; use super::{DeserrJsonError, DeserrQueryParamError}; @@ -129,7 +131,7 @@ impl deserr::DeserializeError for DeserrJsonError { _self_: Option, error: deserr::ErrorKind, location: ValuePointerRef, - ) -> Result { + ) -> ControlFlow { let mut message = String::new(); message.push_str(&match error { @@ -175,7 +177,7 @@ impl deserr::DeserializeError for DeserrJsonError { } }); - Err(DeserrJsonError::new(message, C::default().error_code())) + ControlFlow::Break(DeserrJsonError::new(message, C::default().error_code())) } } @@ -222,7 +224,7 @@ impl deserr::DeserializeError for DeserrQueryParamError< _self_: Option, error: deserr::ErrorKind, location: ValuePointerRef, - ) -> Result { + ) -> ControlFlow { let mut message = String::new(); message.push_str(&match error { @@ -268,7 +270,7 @@ impl deserr::DeserializeError for DeserrQueryParamError< } }); - Err(DeserrQueryParamError::new(message, C::default().error_code())) + ControlFlow::Break(DeserrQueryParamError::new(message, C::default().error_code())) } } diff --git a/meilisearch-types/src/deserr/mod.rs b/meilisearch-types/src/deserr/mod.rs index c15b2c3a0..49474a9e6 100644 --- a/meilisearch-types/src/deserr/mod.rs +++ b/meilisearch-types/src/deserr/mod.rs @@ -1,6 +1,7 @@ use std::convert::Infallible; use std::fmt; use std::marker::PhantomData; +use std::ops::ControlFlow; use deserr::{DeserializeError, MergeWithError, ValuePointerRef}; @@ -64,8 +65,8 @@ impl _self_: Option, other: DeserrError, _merge_location: ValuePointerRef, - ) -> Result { - Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData }) + ) -> ControlFlow { + ControlFlow::Break(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData }) } } @@ -74,7 +75,7 @@ impl MergeWithError for DeserrError< _self_: Option, _other: Infallible, _merge_location: ValuePointerRef, - ) -> Result { + ) -> ControlFlow { unreachable!() } } @@ -112,7 +113,7 @@ macro_rules! merge_with_error_impl_take_error_message { _self_: Option, other: $err_type, merge_location: ValuePointerRef, - ) -> Result { + ) -> ControlFlow { DeserrError::::error::( None, deserr::ErrorKind::Unexpected { msg: other.to_string() }, diff --git a/meilisearch-types/src/deserr/query_params.rs b/meilisearch-types/src/deserr/query_params.rs index 28629aa1b..06d83747c 100644 --- a/meilisearch-types/src/deserr/query_params.rs +++ b/meilisearch-types/src/deserr/query_params.rs @@ -15,7 +15,7 @@ use std::convert::Infallible; use std::ops::Deref; use std::str::FromStr; -use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind}; +use deserr::{DeserializeError, Deserr, MergeWithError, ValueKind}; use super::{DeserrParseBoolError, DeserrParseIntError}; use crate::error::unwrap_any; @@ -38,7 +38,7 @@ impl Deref for Param { } } -impl DeserializeFromValue for Param +impl Deserr for Param where E: DeserializeError + MergeWithError, T: FromQueryParameter, diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 39d9a1551..d83bf7eb9 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -382,10 +382,12 @@ impl ErrorCode for io::Error { } /// Unwrap a result, either its Ok or Err value. -pub fn unwrap_any(any: Result) -> T { +pub fn unwrap_any(any: std::ops::ControlFlow) -> T { + use std::ops::ControlFlow::*; + match any { - Ok(any) => any, - Err(any) => any, + Continue(any) => any, + Break(any) => any, } } diff --git a/meilisearch-types/src/index_uid.rs b/meilisearch-types/src/index_uid.rs index 2f3f6e5df..341ab02cb 100644 --- a/meilisearch-types/src/index_uid.rs +++ b/meilisearch-types/src/index_uid.rs @@ -2,14 +2,14 @@ use std::error::Error; use std::fmt; use std::str::FromStr; -use deserr::DeserializeFromValue; +use deserr::Deserr; use crate::error::{Code, ErrorCode}; /// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long -#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)] -#[deserr(from(String) = IndexUid::try_from -> IndexUidFormatError)] +#[derive(Debug, Clone, PartialEq, Eq, Deserr)] +#[deserr(try_from(String) = IndexUid::try_from -> IndexUidFormatError)] pub struct IndexUid(String); impl IndexUid { diff --git a/meilisearch-types/src/index_uid_pattern.rs b/meilisearch-types/src/index_uid_pattern.rs index 99537b22f..baf0249e2 100644 --- a/meilisearch-types/src/index_uid_pattern.rs +++ b/meilisearch-types/src/index_uid_pattern.rs @@ -4,7 +4,7 @@ use std::fmt; use std::ops::Deref; use std::str::FromStr; -use deserr::DeserializeFromValue; +use deserr::Deserr; use serde::{Deserialize, Serialize}; use crate::error::{Code, ErrorCode}; @@ -12,8 +12,8 @@ use crate::index_uid::{IndexUid, IndexUidFormatError}; /// An index uid pattern is composed of only ascii alphanumeric characters, - and _, between 1 and 400 /// bytes long and optionally ending with a *. -#[derive(Serialize, Deserialize, DeserializeFromValue, Debug, Clone, PartialEq, Eq, Hash)] -#[deserr(from(&String) = FromStr::from_str -> IndexUidPatternFormatError)] +#[derive(Serialize, Deserialize, Deserr, Debug, Clone, PartialEq, Eq, Hash)] +#[deserr(try_from(&String) = FromStr::from_str -> IndexUidPatternFormatError)] pub struct IndexUidPattern(String); impl IndexUidPattern { diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index 804ee19c6..7478391ba 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -2,7 +2,7 @@ use std::convert::Infallible; use std::hash::Hash; use std::str::FromStr; -use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValuePointerRef}; +use deserr::{DeserializeError, Deserr, MergeWithError, ValuePointerRef}; use enum_iterator::Sequence; use milli::update::Setting; use serde::{Deserialize, Serialize}; @@ -24,7 +24,7 @@ impl MergeWithError for Dese _self_: Option, other: IndexUidPatternFormatError, merge_location: deserr::ValuePointerRef, - ) -> std::result::Result { + ) -> std::ops::ControlFlow { DeserrError::error::( None, deserr::ErrorKind::Unexpected { msg: other.to_string() }, @@ -33,20 +33,20 @@ impl MergeWithError for Dese } } -#[derive(Debug, DeserializeFromValue)] +#[derive(Debug, Deserr)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct CreateApiKey { #[deserr(default, error = DeserrJsonError)] pub description: Option, #[deserr(default, error = DeserrJsonError)] pub name: Option, - #[deserr(default = Uuid::new_v4(), error = DeserrJsonError, from(&String) = Uuid::from_str -> uuid::Error)] + #[deserr(default = Uuid::new_v4(), error = DeserrJsonError, try_from(&String) = Uuid::from_str -> uuid::Error)] pub uid: KeyId, #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_api_key_actions)] pub actions: Vec, #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_api_key_indexes)] pub indexes: Vec, - #[deserr(error = DeserrJsonError, from(Option) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)] + #[deserr(error = DeserrJsonError, try_from(Option) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)] pub expires_at: Option, } @@ -87,7 +87,7 @@ fn deny_immutable_fields_api_key( } } -#[derive(Debug, DeserializeFromValue)] +#[derive(Debug, Deserr)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)] pub struct PatchApiKey { #[deserr(default, error = DeserrJsonError)] @@ -182,9 +182,7 @@ fn parse_expiration_date( } } -#[derive( - Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, DeserializeFromValue, -)] +#[derive(Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, Deserr)] #[repr(u8)] pub enum Action { #[serde(rename = "*")] diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index b4ab1eff6..c7023bb10 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -3,9 +3,10 @@ use std::convert::Infallible; use std::fmt; use std::marker::PhantomData; use std::num::NonZeroUsize; +use std::ops::ControlFlow; use std::str::FromStr; -use deserr::{DeserializeError, DeserializeFromValue, ErrorKind, MergeWithError, ValuePointerRef}; +use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef}; use fst::IntoStreamer; use milli::update::Setting; use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET}; @@ -41,7 +42,7 @@ pub struct Checked; #[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct Unchecked; -impl DeserializeFromValue for Unchecked +impl Deserr for Unchecked where E: DeserializeError, { @@ -65,7 +66,7 @@ fn validate_min_word_size_for_typo_setting( Ok(s) } -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)] #[serde(deny_unknown_fields, rename_all = "camelCase")] #[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError)] pub struct MinWordSizeTyposSetting { @@ -77,7 +78,7 @@ pub struct MinWordSizeTyposSetting { pub two_typos: Setting, } -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)] #[serde(deny_unknown_fields, rename_all = "camelCase")] #[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError>)] pub struct TypoSettings { @@ -95,7 +96,7 @@ pub struct TypoSettings { pub disable_on_attributes: Setting>, } -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)] #[serde(deny_unknown_fields, rename_all = "camelCase")] #[deserr(rename_all = camelCase, deny_unknown_fields)] pub struct FacetingSettings { @@ -104,7 +105,7 @@ pub struct FacetingSettings { pub max_values_per_facet: Setting, } -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)] #[serde(deny_unknown_fields, rename_all = "camelCase")] #[deserr(rename_all = camelCase, deny_unknown_fields)] pub struct PaginationSettings { @@ -118,7 +119,7 @@ impl MergeWithError for DeserrJsonError, other: milli::CriterionError, merge_location: ValuePointerRef, - ) -> Result { + ) -> ControlFlow { Self::error::( None, ErrorKind::Unexpected { msg: other.to_string() }, @@ -130,7 +131,7 @@ impl MergeWithError for DeserrJsonError` from a `Settings`. -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)] #[serde( deny_unknown_fields, rename_all = "camelCase", @@ -509,8 +510,8 @@ pub fn settings( }) } -#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)] -#[deserr(from(&String) = FromStr::from_str -> CriterionError)] +#[derive(Debug, Clone, PartialEq, Eq, Deserr)] +#[deserr(try_from(&String) = FromStr::from_str -> CriterionError)] pub enum RankingRuleView { /// Sorted by decreasing number of matched query terms. /// Query words at the front of an attribute is considered better than if it was at the back. diff --git a/meilisearch-types/src/star_or.rs b/meilisearch-types/src/star_or.rs index 135f610c4..0f3ef10fb 100644 --- a/meilisearch-types/src/star_or.rs +++ b/meilisearch-types/src/star_or.rs @@ -1,8 +1,9 @@ use std::fmt; use std::marker::PhantomData; +use std::ops::ControlFlow; use std::str::FromStr; -use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind}; +use deserr::{DeserializeError, Deserr, MergeWithError, ValueKind}; use serde::de::Visitor; use serde::{Deserialize, Deserializer, Serialize, Serializer}; @@ -111,7 +112,7 @@ where } } -impl DeserializeFromValue for StarOr +impl Deserr for StarOr where T: FromStr, E: DeserializeError + MergeWithError, @@ -191,7 +192,7 @@ where } } -impl DeserializeFromValue for OptionStarOr +impl Deserr for OptionStarOr where E: DeserializeError + MergeWithError, T: FromQueryParameter, @@ -271,7 +272,7 @@ impl OptionStarOrList { } } -impl DeserializeFromValue for OptionStarOrList +impl Deserr for OptionStarOrList where E: DeserializeError + MergeWithError, T: FromQueryParameter, @@ -299,7 +300,10 @@ where Err(e) => { let location = if len_cs > 1 { location.push_index(i) } else { location }; - error = Some(E::merge(error, e, location)?); + error = match E::merge(error, e, location) { + ControlFlow::Continue(e) => Some(e), + ControlFlow::Break(e) => return Err(e), + }; } } } diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index dba645665..b6410f7cb 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", " bytes = "1.2.1" clap = { version = "4.0.9", features = ["derive", "env"] } crossbeam-channel = "0.5.6" -deserr = "0.3.0" +deserr = "0.4.0" dump = { path = "../dump" } either = "1.8.0" env_logger = "0.9.1" diff --git a/meilisearch/src/extractors/json.rs b/meilisearch/src/extractors/json.rs index e2418c9fb..c59af14db 100644 --- a/meilisearch/src/extractors/json.rs +++ b/meilisearch/src/extractors/json.rs @@ -7,7 +7,7 @@ use std::task::{Context, Poll}; use actix_web::dev::Payload; use actix_web::web::Json; use actix_web::{FromRequest, HttpRequest}; -use deserr::{DeserializeError, DeserializeFromValue}; +use deserr::{DeserializeError, Deserr}; use futures::ready; use meilisearch_types::error::{ErrorCode, ResponseError}; @@ -33,7 +33,7 @@ impl ValidatedJson { impl FromRequest for ValidatedJson where E: DeserializeError + ErrorCode + std::error::Error + 'static, - T: DeserializeFromValue, + T: Deserr, { type Error = actix_web::Error; type Future = ValidatedJsonExtractFut; @@ -54,7 +54,7 @@ pub struct ValidatedJsonExtractFut { impl Future for ValidatedJsonExtractFut where - T: DeserializeFromValue, + T: Deserr, E: DeserializeError + ErrorCode + std::error::Error + 'static, { type Output = Result, actix_web::Error>; diff --git a/meilisearch/src/extractors/query_parameters.rs b/meilisearch/src/extractors/query_parameters.rs index 99c76f3aa..39b833062 100644 --- a/meilisearch/src/extractors/query_parameters.rs +++ b/meilisearch/src/extractors/query_parameters.rs @@ -6,7 +6,7 @@ use std::{fmt, ops}; use actix_http::Payload; use actix_utils::future::{err, ok, Ready}; use actix_web::{FromRequest, HttpRequest}; -use deserr::{DeserializeError, DeserializeFromValue}; +use deserr::{DeserializeError, Deserr}; use meilisearch_types::error::{Code, ErrorCode, ResponseError}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] @@ -21,7 +21,7 @@ impl QueryParameter { impl QueryParameter where - T: DeserializeFromValue, + T: Deserr, E: DeserializeError + ErrorCode + std::error::Error + 'static, { pub fn from_query(query_str: &str) -> Result { @@ -57,7 +57,7 @@ impl fmt::Display for QueryParameter { impl FromRequest for QueryParameter where - T: DeserializeFromValue, + T: Deserr, E: DeserializeError + ErrorCode + std::error::Error + 'static, { type Error = actix_web::Error; diff --git a/meilisearch/src/routes/api_key.rs b/meilisearch/src/routes/api_key.rs index efc591d54..096aa7df0 100644 --- a/meilisearch/src/routes/api_key.rs +++ b/meilisearch/src/routes/api_key.rs @@ -1,7 +1,7 @@ use std::str; use actix_web::{web, HttpRequest, HttpResponse}; -use deserr::DeserializeFromValue; +use deserr::Deserr; use meilisearch_auth::error::AuthControllerError; use meilisearch_auth::AuthController; use meilisearch_types::deserr::query_params::Param; @@ -51,7 +51,7 @@ pub async fn create_api_key( Ok(HttpResponse::Created().json(res)) } -#[derive(DeserializeFromValue, Debug, Clone, Copy)] +#[derive(Deserr, Debug, Clone, Copy)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct ListApiKeys { #[deserr(default, error = DeserrQueryParamError)] diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index 0ec1057ae..90bf70692 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -4,7 +4,7 @@ use actix_web::http::header::CONTENT_TYPE; use actix_web::web::Data; use actix_web::{web, HttpMessage, HttpRequest, HttpResponse}; use bstr::ByteSlice; -use deserr::DeserializeFromValue; +use deserr::Deserr; use futures::StreamExt; use index_scheduler::IndexScheduler; use log::debug; @@ -80,7 +80,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { ); } -#[derive(Debug, DeserializeFromValue)] +#[derive(Debug, Deserr)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct GetDocument { #[deserr(default, error = DeserrQueryParamError)] @@ -125,7 +125,7 @@ pub async fn delete_document( Ok(HttpResponse::Accepted().json(task)) } -#[derive(Debug, DeserializeFromValue)] +#[derive(Debug, Deserr)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct BrowseQuery { #[deserr(default, error = DeserrQueryParamError)] @@ -155,7 +155,7 @@ pub async fn get_all_documents( Ok(HttpResponse::Ok().json(ret)) } -#[derive(Deserialize, Debug, DeserializeFromValue)] +#[derive(Deserialize, Debug, Deserr)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct UpdateDocumentsQuery { #[deserr(default, error = DeserrJsonError)] diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index 2d352bfe5..c087fe202 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -2,7 +2,7 @@ use std::convert::Infallible; use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; -use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef}; +use deserr::{DeserializeError, Deserr, ValuePointerRef}; use index_scheduler::IndexScheduler; use log::debug; use meilisearch_types::deserr::error_messages::immutable_field_error; @@ -73,7 +73,7 @@ impl IndexView { } } -#[derive(DeserializeFromValue, Debug, Clone, Copy)] +#[derive(Deserr, Debug, Clone, Copy)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct ListIndexes { #[deserr(default, error = DeserrQueryParamError)] @@ -105,7 +105,7 @@ pub async fn list_indexes( Ok(HttpResponse::Ok().json(ret)) } -#[derive(DeserializeFromValue, Debug)] +#[derive(Deserr, Debug)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct IndexCreateRequest { #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_index_uid)] @@ -157,7 +157,7 @@ fn deny_immutable_fields_index( } } -#[derive(DeserializeFromValue, Debug)] +#[derive(Deserr, Debug)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)] pub struct UpdateIndexRequest { #[deserr(default, error = DeserrJsonError)] diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 545c69ec5..50a2ffd74 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -31,7 +31,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { ); } -#[derive(Debug, deserr::DeserializeFromValue)] +#[derive(Debug, deserr::Deserr)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct SearchQueryGet { #[deserr(default, error = DeserrQueryParamError)] diff --git a/meilisearch/src/routes/swap_indexes.rs b/meilisearch/src/routes/swap_indexes.rs index 4a7802f2e..2070177d9 100644 --- a/meilisearch/src/routes/swap_indexes.rs +++ b/meilisearch/src/routes/swap_indexes.rs @@ -1,6 +1,6 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; -use deserr::DeserializeFromValue; +use deserr::Deserr; use index_scheduler::IndexScheduler; use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::deserr_codes::InvalidSwapIndexes; @@ -21,7 +21,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes)))); } -#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)] +#[derive(Deserr, Debug, Clone, PartialEq, Eq)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct SwapIndexesPayload { #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_swap_indexes)] diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index b78be7876..f747320b1 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -1,6 +1,6 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; -use deserr::DeserializeFromValue; +use deserr::Deserr; use index_scheduler::{IndexScheduler, Query, TaskId}; use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::DeserrQueryParamError; @@ -162,7 +162,7 @@ impl From
for DetailsView { } } -#[derive(Debug, DeserializeFromValue)] +#[derive(Debug, Deserr)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct TasksFilterQuery { #[deserr(default = Param(DEFAULT_LIMIT), error = DeserrQueryParamError)] @@ -181,19 +181,20 @@ pub struct TasksFilterQuery { #[deserr(default, error = DeserrQueryParamError)] pub index_uids: OptionStarOrList, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] pub after_enqueued_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] pub before_enqueued_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] pub after_started_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] pub before_started_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] pub after_finished_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] pub before_finished_at: OptionStarOr, } + impl TasksFilterQuery { fn into_query(self) -> Query { Query { @@ -235,7 +236,7 @@ impl TaskDeletionOrCancelationQuery { } } -#[derive(Debug, DeserializeFromValue)] +#[derive(Debug, Deserr)] #[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)] pub struct TaskDeletionOrCancelationQuery { #[deserr(default, error = DeserrQueryParamError)] @@ -249,19 +250,20 @@ pub struct TaskDeletionOrCancelationQuery { #[deserr(default, error = DeserrQueryParamError)] pub index_uids: OptionStarOrList, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] pub after_enqueued_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] pub before_enqueued_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] pub after_started_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] pub before_started_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_after -> InvalidTaskDateError)] pub after_finished_at: OptionStarOr, - #[deserr(default, error = DeserrQueryParamError, from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] + #[deserr(default, error = DeserrQueryParamError, try_from(OptionStarOr) = deserialize_date_before -> InvalidTaskDateError)] pub before_finished_at: OptionStarOr, } + impl TaskDeletionOrCancelationQuery { fn into_query(self) -> Query { Query { @@ -498,7 +500,7 @@ pub fn deserialize_date_before( #[cfg(test)] mod tests { - use deserr::DeserializeFromValue; + use deserr::Deserr; use meili_snap::snapshot; use meilisearch_types::deserr::DeserrQueryParamError; use meilisearch_types::error::{Code, ResponseError}; @@ -507,7 +509,7 @@ mod tests { fn deserr_query_params(j: &str) -> Result where - T: DeserializeFromValue, + T: Deserr, { let value = serde_urlencoded::from_str::(j) .map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?; diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index d4a65cc39..f48563141 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -3,7 +3,7 @@ use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::str::FromStr; use std::time::Instant; -use deserr::DeserializeFromValue; +use deserr::Deserr; use either::Either; use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::deserr_codes::*; @@ -29,7 +29,7 @@ pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string(); pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "".to_string(); pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "".to_string(); -#[derive(Debug, Clone, Default, PartialEq, Eq, DeserializeFromValue)] +#[derive(Debug, Clone, Default, PartialEq, Eq, Deserr)] #[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] pub struct SearchQuery { #[deserr(default, error = DeserrJsonError)] @@ -74,7 +74,7 @@ impl SearchQuery { } } -#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)] +#[derive(Debug, Clone, PartialEq, Eq, Deserr)] #[deserr(rename_all = camelCase)] pub enum MatchingStrategy { /// Remove query words from last to first diff --git a/milli/Cargo.toml b/milli/Cargo.toml index d9458ca70..70658d81d 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -12,7 +12,7 @@ byteorder = "1.4.3" charabia = { version = "0.7.0", default-features = false } concat-arrays = "0.1.2" crossbeam-channel = "0.5.6" -deserr = "0.3.0" +deserr = "0.4.0" either = "1.8.0" flatten-serde-json = { path = "../flatten-serde-json" } fst = "0.4.7" diff --git a/milli/src/update/settings.rs b/milli/src/update/settings.rs index 1646ab5b2..4f4fa25d6 100644 --- a/milli/src/update/settings.rs +++ b/milli/src/update/settings.rs @@ -2,7 +2,7 @@ use std::collections::{BTreeSet, HashMap, HashSet}; use std::result::Result as StdResult; use charabia::{Tokenizer, TokenizerBuilder}; -use deserr::{DeserializeError, DeserializeFromValue}; +use deserr::{DeserializeError, Deserr}; use itertools::Itertools; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use time::OffsetDateTime; @@ -23,9 +23,9 @@ pub enum Setting { NotSet, } -impl DeserializeFromValue for Setting +impl Deserr for Setting where - T: DeserializeFromValue, + T: Deserr, E: DeserializeError, { fn deserialize_from_value( From 769576fd9465ff4c19aa04d04d6f01bd76f237f0 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 13 Feb 2023 19:34:47 +0100 Subject: [PATCH 164/186] get rids of the whole error_message module since it has been integrated into the last version of deserr --- .../src/deserr/error_messages.rs | 330 ------------------ meilisearch-types/src/deserr/mod.rs | 49 ++- meilisearch-types/src/keys.rs | 3 +- meilisearch/src/routes/indexes/mod.rs | 3 +- meilisearch/tests/auth/errors.rs | 4 +- 5 files changed, 48 insertions(+), 341 deletions(-) delete mode 100644 meilisearch-types/src/deserr/error_messages.rs diff --git a/meilisearch-types/src/deserr/error_messages.rs b/meilisearch-types/src/deserr/error_messages.rs deleted file mode 100644 index f17263813..000000000 --- a/meilisearch-types/src/deserr/error_messages.rs +++ /dev/null @@ -1,330 +0,0 @@ -/*! -This module implements the error messages of deserialization errors. - -We try to: -1. Give a human-readable description of where the error originated. -2. Use the correct terms depending on the format of the request (json/query param) -3. Categorise the type of the error (e.g. missing field, wrong value type, unexpected error, etc.) - */ -use std::ops::ControlFlow; - -use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef}; - -use super::{DeserrJsonError, DeserrQueryParamError}; -use crate::error::{Code, ErrorCode}; - -/// Return a description of the given location in a Json, preceded by the given article. -/// e.g. `at .key1[8].key2`. If the location is the origin, the given article will not be -/// included in the description. -pub fn location_json_description(location: ValuePointerRef, article: &str) -> String { - fn rec(location: ValuePointerRef) -> String { - match location { - ValuePointerRef::Origin => String::new(), - ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key, - ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)), - } - } - match location { - ValuePointerRef::Origin => String::new(), - _ => { - format!("{article} `{}`", rec(location)) - } - } -} - -/// Return a description of the list of value kinds for a Json payload. -fn value_kinds_description_json(kinds: &[ValueKind]) -> String { - // Rank each value kind so that they can be sorted (and deduplicated) - // Having a predictable order helps with pattern matching - fn order(kind: &ValueKind) -> u8 { - match kind { - ValueKind::Null => 0, - ValueKind::Boolean => 1, - ValueKind::Integer => 2, - ValueKind::NegativeInteger => 3, - ValueKind::Float => 4, - ValueKind::String => 5, - ValueKind::Sequence => 6, - ValueKind::Map => 7, - } - } - // Return a description of a single value kind, preceded by an article - fn single_description(kind: &ValueKind) -> &'static str { - match kind { - ValueKind::Null => "null", - ValueKind::Boolean => "a boolean", - ValueKind::Integer => "a positive integer", - ValueKind::NegativeInteger => "a negative integer", - ValueKind::Float => "a number", - ValueKind::String => "a string", - ValueKind::Sequence => "an array", - ValueKind::Map => "an object", - } - } - - fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) { - let (msg_part, rest): (_, &[ValueKind]) = match kinds { - [] => (String::new(), &[]), - [ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => { - ("a number".to_owned(), rest) - } - [ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => { - ("a number".to_owned(), rest) - } - [ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => { - ("an integer".to_owned(), rest) - } - [a] => (single_description(a).to_owned(), &[]), - [a, rest @ ..] => (single_description(a).to_owned(), rest), - }; - - if rest.is_empty() { - if *count_items == 0 { - message.push_str(&msg_part); - } else if *count_items == 1 { - message.push_str(&format!(" or {msg_part}")); - } else { - message.push_str(&format!(", or {msg_part}")); - } - } else { - if *count_items == 0 { - message.push_str(&msg_part); - } else { - message.push_str(&format!(", {msg_part}")); - } - - *count_items += 1; - description_rec(rest, count_items, message); - } - } - - let mut kinds = kinds.to_owned(); - kinds.sort_by_key(order); - kinds.dedup(); - - if kinds.is_empty() { - // Should not happen ideally - "a different value".to_owned() - } else { - let mut message = String::new(); - description_rec(kinds.as_slice(), &mut 0, &mut message); - message - } -} - -/// Return the JSON string of the value preceded by a description of its kind -fn value_description_with_kind_json(v: &serde_json::Value) -> String { - match v.kind() { - ValueKind::Null => "null".to_owned(), - kind => { - format!( - "{}: `{}`", - value_kinds_description_json(&[kind]), - serde_json::to_string(v).unwrap() - ) - } - } -} - -impl deserr::DeserializeError for DeserrJsonError { - fn error( - _self_: Option, - error: deserr::ErrorKind, - location: ValuePointerRef, - ) -> ControlFlow { - let mut message = String::new(); - - message.push_str(&match error { - ErrorKind::IncorrectValueKind { actual, accepted } => { - let expected = value_kinds_description_json(accepted); - let received = value_description_with_kind_json(&serde_json::Value::from(actual)); - - let location = location_json_description(location, " at"); - - format!("Invalid value type{location}: expected {expected}, but found {received}") - } - ErrorKind::MissingField { field } => { - let location = location_json_description(location, " inside"); - format!("Missing field `{field}`{location}") - } - ErrorKind::UnknownKey { key, accepted } => { - let location = location_json_description(location, " inside"); - format!( - "Unknown field `{}`{location}: expected one of {}", - key, - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", ") - ) - } - ErrorKind::UnknownValue { value, accepted } => { - let location = location_json_description(location, " at"); - format!( - "Unknown value `{}`{location}: expected one of {}", - value, - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", "), - ) - } - ErrorKind::Unexpected { msg } => { - let location = location_json_description(location, " at"); - format!("Invalid value{location}: {msg}") - } - }); - - ControlFlow::Break(DeserrJsonError::new(message, C::default().error_code())) - } -} - -pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError { - let msg = format!( - "Immutable field `{field}`: expected one of {}", - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", ") - ); - - DeserrJsonError::new(msg, code) -} - -/// Return a description of the given location in query parameters, preceded by the -/// given article. e.g. `at key5[2]`. If the location is the origin, the given article -/// will not be included in the description. -pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String { - fn rec(location: ValuePointerRef) -> String { - match location { - ValuePointerRef::Origin => String::new(), - ValuePointerRef::Key { key, prev } => { - if matches!(prev, ValuePointerRef::Origin) { - key.to_owned() - } else { - rec(*prev) + "." + key - } - } - ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)), - } - } - match location { - ValuePointerRef::Origin => String::new(), - _ => { - format!("{article} `{}`", rec(location)) - } - } -} - -impl deserr::DeserializeError for DeserrQueryParamError { - fn error( - _self_: Option, - error: deserr::ErrorKind, - location: ValuePointerRef, - ) -> ControlFlow { - let mut message = String::new(); - - message.push_str(&match error { - ErrorKind::IncorrectValueKind { actual, accepted } => { - let expected = value_kinds_description_query_param(accepted); - let received = value_description_with_kind_query_param(actual); - - let location = location_query_param_description(location, " for parameter"); - - format!("Invalid value type{location}: expected {expected}, but found {received}") - } - ErrorKind::MissingField { field } => { - let location = location_query_param_description(location, " inside"); - format!("Missing parameter `{field}`{location}") - } - ErrorKind::UnknownKey { key, accepted } => { - let location = location_query_param_description(location, " inside"); - format!( - "Unknown parameter `{}`{location}: expected one of {}", - key, - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", ") - ) - } - ErrorKind::UnknownValue { value, accepted } => { - let location = location_query_param_description(location, " for parameter"); - format!( - "Unknown value `{}`{location}: expected one of {}", - value, - accepted - .iter() - .map(|accepted| format!("`{}`", accepted)) - .collect::>() - .join(", "), - ) - } - ErrorKind::Unexpected { msg } => { - let location = location_query_param_description(location, " in parameter"); - format!("Invalid value{location}: {msg}") - } - }); - - ControlFlow::Break(DeserrQueryParamError::new(message, C::default().error_code())) - } -} - -/// Return a description of the list of value kinds for query parameters -/// Since query parameters are always treated as strings, we always return -/// "a string" for now. -fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String { - "a string".to_owned() -} - -fn value_description_with_kind_query_param(actual: deserr::Value) -> String { - match actual { - deserr::Value::Null => "null".to_owned(), - deserr::Value::Boolean(x) => format!("a boolean: `{x}`"), - deserr::Value::Integer(x) => format!("an integer: `{x}`"), - deserr::Value::NegativeInteger(x) => { - format!("an integer: `{x}`") - } - deserr::Value::Float(x) => { - format!("a number: `{x}`") - } - deserr::Value::String(x) => { - format!("a string: `{x}`") - } - deserr::Value::Sequence(_) => "multiple values".to_owned(), - deserr::Value::Map(_) => "multiple parameters".to_owned(), - } -} - -#[cfg(test)] -mod tests { - use deserr::ValueKind; - - use crate::deserr::error_messages::value_kinds_description_json; - - #[test] - fn test_value_kinds_description_json() { - insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value"); - - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"a negative integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object"); - - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"a negative integer or an array"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number"); - insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number"); - } -} diff --git a/meilisearch-types/src/deserr/mod.rs b/meilisearch-types/src/deserr/mod.rs index 49474a9e6..33213c953 100644 --- a/meilisearch-types/src/deserr/mod.rs +++ b/meilisearch-types/src/deserr/mod.rs @@ -3,9 +3,10 @@ use std::fmt; use std::marker::PhantomData; use std::ops::ControlFlow; -use deserr::{DeserializeError, MergeWithError, ValuePointerRef}; +use deserr::errors::{JsonError, QueryParamError}; +use deserr::{take_cf_content, DeserializeError, IntoValue, MergeWithError, ValuePointerRef}; -use crate::error::deserr_codes::{self, *}; +use crate::error::deserr_codes::*; use crate::error::{ unwrap_any, Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError, ParseOffsetDateTimeError, @@ -13,7 +14,6 @@ use crate::error::{ use crate::index_uid::IndexUidFormatError; use crate::tasks::{ParseTaskKindError, ParseTaskStatusError}; -pub mod error_messages; pub mod query_params; /// Marker type for the Json format @@ -21,8 +21,8 @@ pub struct DeserrJson; /// Marker type for the Query Parameter format pub struct DeserrQueryParam; -pub type DeserrJsonError = DeserrError; -pub type DeserrQueryParamError = DeserrError; +pub type DeserrJsonError = DeserrError; +pub type DeserrQueryParamError = DeserrError; /// A request deserialization error. /// @@ -80,6 +80,45 @@ impl MergeWithError for DeserrError< } } +impl DeserializeError for DeserrJsonError { + fn error( + _self_: Option, + error: deserr::ErrorKind, + location: ValuePointerRef, + ) -> ControlFlow { + ControlFlow::Break(DeserrJsonError::new( + take_cf_content(JsonError::error(None, error, location)).to_string(), + C::default().error_code(), + )) + } +} + +impl DeserializeError for DeserrQueryParamError { + fn error( + _self_: Option, + error: deserr::ErrorKind, + location: ValuePointerRef, + ) -> ControlFlow { + ControlFlow::Break(DeserrQueryParamError::new( + take_cf_content(QueryParamError::error(None, error, location)).to_string(), + C::default().error_code(), + )) + } +} + +pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError { + let msg = format!( + "Immutable field `{field}`: expected one of {}", + accepted + .iter() + .map(|accepted| format!("`{}`", accepted)) + .collect::>() + .join(", ") + ); + + DeserrJsonError::new(msg, code) +} + // Implement a convenience function to build a `missing_field` error macro_rules! make_missing_field_convenience_builder { ($err_code:ident, $fn_name:ident) => { diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index 7478391ba..87ed543d6 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -11,8 +11,7 @@ use time::macros::{format_description, time}; use time::{Date, OffsetDateTime, PrimitiveDateTime}; use uuid::Uuid; -use crate::deserr::error_messages::immutable_field_error; -use crate::deserr::{DeserrError, DeserrJsonError}; +use crate::deserr::{immutable_field_error, DeserrError, DeserrJsonError}; use crate::error::deserr_codes::*; use crate::error::{unwrap_any, Code, ErrorCode, ParseOffsetDateTimeError}; use crate::index_uid_pattern::{IndexUidPattern, IndexUidPatternFormatError}; diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index c087fe202..16b9faa24 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -5,9 +5,8 @@ use actix_web::{web, HttpRequest, HttpResponse}; use deserr::{DeserializeError, Deserr, ValuePointerRef}; use index_scheduler::IndexScheduler; use log::debug; -use meilisearch_types::deserr::error_messages::immutable_field_error; use meilisearch_types::deserr::query_params::Param; -use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; +use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::error::deserr_codes::*; use meilisearch_types::error::{unwrap_any, Code, ResponseError}; use meilisearch_types::index_uid::IndexUid; diff --git a/meilisearch/tests/auth/errors.rs b/meilisearch/tests/auth/errors.rs index 0bfef878b..904bb182d 100644 --- a/meilisearch/tests/auth/errors.rs +++ b/meilisearch/tests/auth/errors.rs @@ -138,7 +138,7 @@ async fn create_api_key_bad_expires_at() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Unknown field `expires_at`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`", + "message": "Unknown field `expires_at`: did you mean `expiresAt`? expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad_request" @@ -150,7 +150,7 @@ async fn create_api_key_bad_expires_at() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Unknown field `expires_at`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`", + "message": "Unknown field `expires_at`: did you mean `expiresAt`? expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`", "code": "bad_request", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#bad_request" From a43765d45473e41ab0fd6a2298b676742142af5f Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 14 Feb 2023 13:12:42 +0100 Subject: [PATCH 165/186] use the pre-defined deserr extractors --- Cargo.lock | 11 ++- meilisearch-types/Cargo.toml | 2 +- meilisearch-types/src/deserr/mod.rs | 11 +++ meilisearch-types/src/error.rs | 2 +- meilisearch/Cargo.toml | 2 +- meilisearch/src/extractors/json.rs | 78 ------------------- meilisearch/src/extractors/mod.rs | 2 - .../src/extractors/query_parameters.rs | 70 ----------------- meilisearch/src/routes/api_key.rs | 9 +-- meilisearch/src/routes/indexes/documents.rs | 10 +-- meilisearch/src/routes/indexes/mod.rs | 9 +-- meilisearch/src/routes/indexes/search.rs | 7 +- meilisearch/src/routes/indexes/settings.rs | 6 +- meilisearch/src/routes/swap_indexes.rs | 4 +- meilisearch/src/routes/tasks.rs | 8 +- milli/Cargo.toml | 2 +- 16 files changed, 47 insertions(+), 186 deletions(-) delete mode 100644 meilisearch/src/extractors/json.rs delete mode 100644 meilisearch/src/extractors/query_parameters.rs diff --git a/Cargo.lock b/Cargo.lock index 5aa355d73..28ac2ca63 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1113,23 +1113,26 @@ dependencies = [ [[package]] name = "deserr" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a13eed41ca58d9dc99e2c67e1a5f50507dfa1b123cc4a942c81c49707bd347f0" +checksum = "6eee2844f21cf7fb5693aae1fb8f1658127acfdb2fc072167d68a9152584ae64" dependencies = [ + "actix-http", + "actix-utils", "actix-web", "deserr-internal", "futures", "serde-cs", "serde_json", + "serde_urlencoded", "strsim", ] [[package]] name = "deserr-internal" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d5412186d7149542b09319901d28b3c7d1f714a61d0c5d48a50560d09573ae4" +checksum = "c27246f8ca9eeba9dd70d614b664dc43b529251ed7bd9e633131010d340da4b9" dependencies = [ "convert_case 0.5.0", "proc-macro2", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 021c44ea0..3ed9464d3 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -9,7 +9,7 @@ actix-web = { version = "4.2.1", default-features = false } anyhow = "1.0.65" convert_case = "0.6.0" csv = "1.1.6" -deserr = "0.4.0" +deserr = "0.4.1" either = { version = "1.6.1", features = ["serde"] } enum-iterator = "1.1.3" file-store = { path = "../file-store" } diff --git a/meilisearch-types/src/deserr/mod.rs b/meilisearch-types/src/deserr/mod.rs index 33213c953..ad6f72e2c 100644 --- a/meilisearch-types/src/deserr/mod.rs +++ b/meilisearch-types/src/deserr/mod.rs @@ -38,6 +38,7 @@ impl DeserrError { Self { msg, code, _phantom: PhantomData } } } + impl std::fmt::Debug for DeserrError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish() @@ -50,6 +51,16 @@ impl std::fmt::Display for DeserrError actix_web::ResponseError for DeserrError { + fn status_code(&self) -> actix_web::http::StatusCode { + self.code.http() + } + + fn error_response(&self) -> actix_web::HttpResponse { + crate::error::ResponseError::from_msg(self.msg.to_string(), self.code).error_response() + } +} + impl std::error::Error for DeserrError {} impl ErrorCode for DeserrError { fn error_code(&self) -> Code { diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index d83bf7eb9..5c2968b39 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -127,7 +127,7 @@ macro_rules! make_error_codes { } impl Code { /// return the HTTP status code associated with the `Code` - fn http(&self) -> StatusCode { + pub fn http(&self) -> StatusCode { match self { $( Code::$code_ident => StatusCode::$status diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index b6410f7cb..b4a87c632 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -19,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", " bytes = "1.2.1" clap = { version = "4.0.9", features = ["derive", "env"] } crossbeam-channel = "0.5.6" -deserr = "0.4.0" +deserr = "0.4.1" dump = { path = "../dump" } either = "1.8.0" env_logger = "0.9.1" diff --git a/meilisearch/src/extractors/json.rs b/meilisearch/src/extractors/json.rs deleted file mode 100644 index c59af14db..000000000 --- a/meilisearch/src/extractors/json.rs +++ /dev/null @@ -1,78 +0,0 @@ -use std::fmt::Debug; -use std::future::Future; -use std::marker::PhantomData; -use std::pin::Pin; -use std::task::{Context, Poll}; - -use actix_web::dev::Payload; -use actix_web::web::Json; -use actix_web::{FromRequest, HttpRequest}; -use deserr::{DeserializeError, Deserr}; -use futures::ready; -use meilisearch_types::error::{ErrorCode, ResponseError}; - -/// Extractor for typed data from Json request payloads -/// deserialised by deserr. -/// -/// # Extractor -/// To extract typed data from a request body, the inner type `T` must implement the -/// [`deserr::DeserializeFromError`] trait. The inner type `E` must implement the -/// [`ErrorCode`](meilisearch_error::ErrorCode) trait. -#[derive(Debug)] -pub struct ValidatedJson(pub T, PhantomData<*const E>); - -impl ValidatedJson { - pub fn new(data: T) -> Self { - ValidatedJson(data, PhantomData) - } - pub fn into_inner(self) -> T { - self.0 - } -} - -impl FromRequest for ValidatedJson -where - E: DeserializeError + ErrorCode + std::error::Error + 'static, - T: Deserr, -{ - type Error = actix_web::Error; - type Future = ValidatedJsonExtractFut; - - #[inline] - fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { - ValidatedJsonExtractFut { - fut: Json::::from_request(req, payload), - _phantom: PhantomData, - } - } -} - -pub struct ValidatedJsonExtractFut { - fut: as FromRequest>::Future, - _phantom: PhantomData<*const (T, E)>, -} - -impl Future for ValidatedJsonExtractFut -where - T: Deserr, - E: DeserializeError + ErrorCode + std::error::Error + 'static, -{ - type Output = Result, actix_web::Error>; - - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - let ValidatedJsonExtractFut { fut, .. } = self.get_mut(); - let fut = Pin::new(fut); - - let res = ready!(fut.poll(cx)); - - let res = match res { - Err(err) => Err(err), - Ok(data) => match deserr::deserialize::<_, _, E>(data.into_inner()) { - Ok(data) => Ok(ValidatedJson::new(data)), - Err(e) => Err(ResponseError::from(e).into()), - }, - }; - - Poll::Ready(res) - } -} diff --git a/meilisearch/src/extractors/mod.rs b/meilisearch/src/extractors/mod.rs index f44b0c004..98a22f8c9 100644 --- a/meilisearch/src/extractors/mod.rs +++ b/meilisearch/src/extractors/mod.rs @@ -1,6 +1,4 @@ pub mod payload; #[macro_use] pub mod authentication; -pub mod json; -pub mod query_parameters; pub mod sequential_extractor; diff --git a/meilisearch/src/extractors/query_parameters.rs b/meilisearch/src/extractors/query_parameters.rs deleted file mode 100644 index 39b833062..000000000 --- a/meilisearch/src/extractors/query_parameters.rs +++ /dev/null @@ -1,70 +0,0 @@ -//! A module to parse query parameter with deserr - -use std::marker::PhantomData; -use std::{fmt, ops}; - -use actix_http::Payload; -use actix_utils::future::{err, ok, Ready}; -use actix_web::{FromRequest, HttpRequest}; -use deserr::{DeserializeError, Deserr}; -use meilisearch_types::error::{Code, ErrorCode, ResponseError}; - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct QueryParameter(pub T, PhantomData<*const E>); - -impl QueryParameter { - /// Unwrap into inner `T` value. - pub fn into_inner(self) -> T { - self.0 - } -} - -impl QueryParameter -where - T: Deserr, - E: DeserializeError + ErrorCode + std::error::Error + 'static, -{ - pub fn from_query(query_str: &str) -> Result { - let value = serde_urlencoded::from_str::(query_str) - .map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?; - - match deserr::deserialize::<_, _, E>(value) { - Ok(data) => Ok(QueryParameter(data, PhantomData)), - Err(e) => Err(ResponseError::from(e).into()), - } - } -} - -impl ops::Deref for QueryParameter { - type Target = T; - - fn deref(&self) -> &T { - &self.0 - } -} - -impl ops::DerefMut for QueryParameter { - fn deref_mut(&mut self) -> &mut T { - &mut self.0 - } -} - -impl fmt::Display for QueryParameter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl FromRequest for QueryParameter -where - T: Deserr, - E: DeserializeError + ErrorCode + std::error::Error + 'static, -{ - type Error = actix_web::Error; - type Future = Ready>; - - #[inline] - fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { - QueryParameter::from_query(req.query_string()).map(ok).unwrap_or_else(err) - } -} diff --git a/meilisearch/src/routes/api_key.rs b/meilisearch/src/routes/api_key.rs index 096aa7df0..7514d01f6 100644 --- a/meilisearch/src/routes/api_key.rs +++ b/meilisearch/src/routes/api_key.rs @@ -1,6 +1,7 @@ use std::str; use actix_web::{web, HttpRequest, HttpResponse}; +use deserr::actix_web::{AwebJson, AwebQueryParameter}; use deserr::Deserr; use meilisearch_auth::error::AuthControllerError; use meilisearch_auth::AuthController; @@ -16,8 +17,6 @@ use uuid::Uuid; use super::PAGINATION_DEFAULT_LIMIT; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; -use crate::extractors::json::ValidatedJson; -use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; use crate::routes::Pagination; @@ -37,7 +36,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { pub async fn create_api_key( auth_controller: GuardedData, AuthController>, - body: ValidatedJson, + body: AwebJson, _req: HttpRequest, ) -> Result { let v = body.into_inner(); @@ -68,7 +67,7 @@ impl ListApiKeys { pub async fn list_api_keys( auth_controller: GuardedData, AuthController>, - list_api_keys: QueryParameter, + list_api_keys: AwebQueryParameter, ) -> Result { let paginate = list_api_keys.into_inner().as_pagination(); let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> { @@ -105,7 +104,7 @@ pub async fn get_api_key( pub async fn patch_api_key( auth_controller: GuardedData, AuthController>, - body: ValidatedJson, + body: AwebJson, path: web::Path, ) -> Result { let key = path.into_inner().key; diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index 90bf70692..fb75cfcf8 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -4,6 +4,7 @@ use actix_web::http::header::CONTENT_TYPE; use actix_web::web::Data; use actix_web::{web, HttpMessage, HttpRequest, HttpResponse}; use bstr::ByteSlice; +use deserr::actix_web::AwebQueryParameter; use deserr::Deserr; use futures::StreamExt; use index_scheduler::IndexScheduler; @@ -33,7 +34,6 @@ use crate::error::PayloadError::ReceivePayload; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; use crate::extractors::payload::Payload; -use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; use crate::routes::{PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT}; @@ -90,7 +90,7 @@ pub struct GetDocument { pub async fn get_document( index_scheduler: GuardedData, Data>, document_param: web::Path, - params: QueryParameter, + params: AwebQueryParameter, ) -> Result { let DocumentParam { index_uid, document_id } = document_param.into_inner(); let index_uid = IndexUid::try_from(index_uid)?; @@ -139,7 +139,7 @@ pub struct BrowseQuery { pub async fn get_all_documents( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: QueryParameter, + params: AwebQueryParameter, ) -> Result { let index_uid = IndexUid::try_from(index_uid.into_inner())?; debug!("called with params: {:?}", params); @@ -165,7 +165,7 @@ pub struct UpdateDocumentsQuery { pub async fn add_documents( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: QueryParameter, + params: AwebQueryParameter, body: Payload, req: HttpRequest, analytics: web::Data, @@ -195,7 +195,7 @@ pub async fn add_documents( pub async fn update_documents( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: QueryParameter, + params: AwebQueryParameter, body: Payload, req: HttpRequest, analytics: web::Data, diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index 16b9faa24..2e13d782d 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -2,6 +2,7 @@ use std::convert::Infallible; use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; +use deserr::actix_web::{AwebJson, AwebQueryParameter}; use deserr::{DeserializeError, Deserr, ValuePointerRef}; use index_scheduler::IndexScheduler; use log::debug; @@ -20,8 +21,6 @@ use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT}; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::{AuthenticationError, GuardedData}; -use crate::extractors::json::ValidatedJson; -use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; pub mod documents; @@ -88,7 +87,7 @@ impl ListIndexes { pub async fn list_indexes( index_scheduler: GuardedData, Data>, - paginate: QueryParameter, + paginate: AwebQueryParameter, ) -> Result { let search_rules = &index_scheduler.filters().search_rules; let indexes: Vec<_> = index_scheduler.indexes()?; @@ -115,7 +114,7 @@ pub struct IndexCreateRequest { pub async fn create_index( index_scheduler: GuardedData, Data>, - body: ValidatedJson, + body: AwebJson, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -180,7 +179,7 @@ pub async fn get_index( pub async fn update_index( index_scheduler: GuardedData, Data>, index_uid: web::Path, - body: ValidatedJson, + body: AwebJson, req: HttpRequest, analytics: web::Data, ) -> Result { diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 50a2ffd74..3fb413c43 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -1,5 +1,6 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; +use deserr::actix_web::{AwebJson, AwebQueryParameter}; use index_scheduler::IndexScheduler; use log::debug; use meilisearch_auth::IndexSearchRules; @@ -14,8 +15,6 @@ use serde_json::Value; use crate::analytics::{Analytics, SearchAggregator}; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; -use crate::extractors::json::ValidatedJson; -use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; use crate::search::{ perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, @@ -150,7 +149,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec { pub async fn search_with_url_query( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: QueryParameter, + params: AwebQueryParameter, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -184,7 +183,7 @@ pub async fn search_with_url_query( pub async fn search_with_post( index_scheduler: GuardedData, Data>, index_uid: web::Path, - params: ValidatedJson, + params: AwebJson, req: HttpRequest, analytics: web::Data, ) -> Result { diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 0c864cc73..d10aec1a2 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -1,5 +1,6 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; +use deserr::actix_web::AwebJson; use index_scheduler::IndexScheduler; use log::debug; use meilisearch_types::deserr::DeserrJsonError; @@ -12,7 +13,6 @@ use serde_json::json; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; -use crate::extractors::json::ValidatedJson; use crate::routes::SummarizedTaskView; #[macro_export] @@ -68,7 +68,7 @@ macro_rules! make_setting_route { Data, >, index_uid: actix_web::web::Path, - body: $crate::routes::indexes::ValidatedJson, $err_ty>, + body: deserr::actix_web::AwebJson, $err_ty>, req: HttpRequest, $analytics_var: web::Data, ) -> std::result::Result { @@ -468,7 +468,7 @@ generate_configure!( pub async fn update_all( index_scheduler: GuardedData, Data>, index_uid: web::Path, - body: ValidatedJson, DeserrJsonError>, + body: AwebJson, DeserrJsonError>, req: HttpRequest, analytics: web::Data, ) -> Result { diff --git a/meilisearch/src/routes/swap_indexes.rs b/meilisearch/src/routes/swap_indexes.rs index 2070177d9..c4177e900 100644 --- a/meilisearch/src/routes/swap_indexes.rs +++ b/meilisearch/src/routes/swap_indexes.rs @@ -1,5 +1,6 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; +use deserr::actix_web::AwebJson; use deserr::Deserr; use index_scheduler::IndexScheduler; use meilisearch_types::deserr::DeserrJsonError; @@ -14,7 +15,6 @@ use crate::analytics::Analytics; use crate::error::MeilisearchHttpError; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::{AuthenticationError, GuardedData}; -use crate::extractors::json::ValidatedJson; use crate::extractors::sequential_extractor::SeqHandler; pub fn configure(cfg: &mut web::ServiceConfig) { @@ -30,7 +30,7 @@ pub struct SwapIndexesPayload { pub async fn swap_indexes( index_scheduler: GuardedData, Data>, - params: ValidatedJson, DeserrJsonError>, + params: AwebJson, DeserrJsonError>, req: HttpRequest, analytics: web::Data, ) -> Result { diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index f747320b1..49f3aac66 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -1,5 +1,6 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; +use deserr::actix_web::AwebQueryParameter; use deserr::Deserr; use index_scheduler::{IndexScheduler, Query, TaskId}; use meilisearch_types::deserr::query_params::Param; @@ -23,7 +24,6 @@ use super::SummarizedTaskView; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; -use crate::extractors::query_parameters::QueryParameter; use crate::extractors::sequential_extractor::SeqHandler; const DEFAULT_LIMIT: u32 = 20; @@ -286,7 +286,7 @@ impl TaskDeletionOrCancelationQuery { async fn cancel_tasks( index_scheduler: GuardedData, Data>, - params: QueryParameter, + params: AwebQueryParameter, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -332,7 +332,7 @@ async fn cancel_tasks( async fn delete_tasks( index_scheduler: GuardedData, Data>, - params: QueryParameter, + params: AwebQueryParameter, req: HttpRequest, analytics: web::Data, ) -> Result { @@ -385,7 +385,7 @@ pub struct AllTasks { async fn get_tasks( index_scheduler: GuardedData, Data>, - params: QueryParameter, + params: AwebQueryParameter, req: HttpRequest, analytics: web::Data, ) -> Result { diff --git a/milli/Cargo.toml b/milli/Cargo.toml index 70658d81d..1752cb3d9 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -12,7 +12,7 @@ byteorder = "1.4.3" charabia = { version = "0.7.0", default-features = false } concat-arrays = "0.1.2" crossbeam-channel = "0.5.6" -deserr = "0.4.0" +deserr = "0.4.1" either = "1.8.0" flatten-serde-json = { path = "../flatten-serde-json" } fst = "0.4.7" From 42a3cdca66eea60c69a5ab04c1a37a8b67a29cc4 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 14 Feb 2023 13:58:33 +0100 Subject: [PATCH 166/186] get rids of the unwrap_any function in favor of take_cf_content --- meilisearch-types/src/deserr/mod.rs | 4 ++-- meilisearch-types/src/deserr/query_params.rs | 5 ++--- meilisearch-types/src/error.rs | 10 ---------- meilisearch-types/src/keys.rs | 4 ++-- meilisearch-types/src/settings.rs | 3 +-- meilisearch-types/src/star_or.rs | 11 +++++------ meilisearch/src/routes/indexes/mod.rs | 4 ++-- 7 files changed, 14 insertions(+), 27 deletions(-) diff --git a/meilisearch-types/src/deserr/mod.rs b/meilisearch-types/src/deserr/mod.rs index ad6f72e2c..3e6ec8b96 100644 --- a/meilisearch-types/src/deserr/mod.rs +++ b/meilisearch-types/src/deserr/mod.rs @@ -8,7 +8,7 @@ use deserr::{take_cf_content, DeserializeError, IntoValue, MergeWithError, Value use crate::error::deserr_codes::*; use crate::error::{ - unwrap_any, Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError, + Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError, ParseOffsetDateTimeError, }; use crate::index_uid::IndexUidFormatError; @@ -135,7 +135,7 @@ macro_rules! make_missing_field_convenience_builder { ($err_code:ident, $fn_name:ident) => { impl DeserrJsonError<$err_code> { pub fn $fn_name(field: &str, location: ValuePointerRef) -> Self { - let x = unwrap_any(Self::error::( + let x = deserr::take_cf_content(Self::error::( None, deserr::ErrorKind::MissingField { field }, location, diff --git a/meilisearch-types/src/deserr/query_params.rs b/meilisearch-types/src/deserr/query_params.rs index 06d83747c..dded0ea5c 100644 --- a/meilisearch-types/src/deserr/query_params.rs +++ b/meilisearch-types/src/deserr/query_params.rs @@ -18,7 +18,6 @@ use std::str::FromStr; use deserr::{DeserializeError, Deserr, MergeWithError, ValueKind}; use super::{DeserrParseBoolError, DeserrParseIntError}; -use crate::error::unwrap_any; use crate::index_uid::IndexUid; use crate::tasks::{Kind, Status}; @@ -50,9 +49,9 @@ where match value { deserr::Value::String(s) => match T::from_query_param(&s) { Ok(x) => Ok(Param(x)), - Err(e) => Err(unwrap_any(E::merge(None, e, location))), + Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))), }, - _ => Err(unwrap_any(E::error( + _ => Err(deserr::take_cf_content(E::error( None, deserr::ErrorKind::IncorrectValueKind { actual: value, diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 5c2968b39..46368c29e 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -381,16 +381,6 @@ impl ErrorCode for io::Error { } } -/// Unwrap a result, either its Ok or Err value. -pub fn unwrap_any(any: std::ops::ControlFlow) -> T { - use std::ops::ControlFlow::*; - - match any { - Continue(any) => any, - Break(any) => any, - } -} - /// Deserialization when `deserr` cannot parse an API key date. #[derive(Debug)] pub struct ParseOffsetDateTimeError(pub String); diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index 87ed543d6..b2389b238 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -13,7 +13,7 @@ use uuid::Uuid; use crate::deserr::{immutable_field_error, DeserrError, DeserrJsonError}; use crate::error::deserr_codes::*; -use crate::error::{unwrap_any, Code, ErrorCode, ParseOffsetDateTimeError}; +use crate::error::{Code, ErrorCode, ParseOffsetDateTimeError}; use crate::index_uid_pattern::{IndexUidPattern, IndexUidPatternFormatError}; pub type KeyId = Uuid; @@ -78,7 +78,7 @@ fn deny_immutable_fields_api_key( "expiresAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyExpiresAt), "createdAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyCreatedAt), "updatedAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUpdatedAt), - _ => unwrap_any(DeserrJsonError::::error::( + _ => deserr::take_cf_content(DeserrJsonError::::error::( None, deserr::ErrorKind::UnknownKey { key: field, accepted }, location, diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index c7023bb10..0fc7ea6e2 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -14,7 +14,6 @@ use serde::{Deserialize, Serialize, Serializer}; use crate::deserr::DeserrJsonError; use crate::error::deserr_codes::*; -use crate::error::unwrap_any; /// The maximimum number of results that the engine /// will be able to return in one search call. @@ -60,7 +59,7 @@ fn validate_min_word_size_for_typo_setting( ) -> Result { if let (Setting::Set(one), Setting::Set(two)) = (s.one_typo, s.two_typos) { if one > two { - return Err(unwrap_any(E::error::(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location))); + return Err(deserr::take_cf_content(E::error::(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location))); } } Ok(s) diff --git a/meilisearch-types/src/star_or.rs b/meilisearch-types/src/star_or.rs index 0f3ef10fb..cd26a1fb0 100644 --- a/meilisearch-types/src/star_or.rs +++ b/meilisearch-types/src/star_or.rs @@ -8,7 +8,6 @@ use serde::de::Visitor; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use crate::deserr::query_params::FromQueryParameter; -use crate::error::unwrap_any; /// A type that tries to match either a star (*) or /// any other thing that implements `FromStr`. @@ -128,11 +127,11 @@ where } else { match T::from_str(&v) { Ok(parsed) => Ok(StarOr::Other(parsed)), - Err(e) => Err(unwrap_any(E::merge(None, e, location))), + Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))), } } } - _ => Err(unwrap_any(E::error::( + _ => Err(deserr::take_cf_content(E::error::( None, deserr::ErrorKind::IncorrectValueKind { actual: value, @@ -206,10 +205,10 @@ where "*" => Ok(OptionStarOr::Star), s => match T::from_query_param(s) { Ok(x) => Ok(OptionStarOr::Other(x)), - Err(e) => Err(unwrap_any(E::merge(None, e, location))), + Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))), }, }, - _ => Err(unwrap_any(E::error::( + _ => Err(deserr::take_cf_content(E::error::( None, deserr::ErrorKind::IncorrectValueKind { actual: value, @@ -318,7 +317,7 @@ where Ok(OptionStarOrList::List(els)) } } - _ => Err(unwrap_any(E::error::( + _ => Err(deserr::take_cf_content(E::error::( None, deserr::ErrorKind::IncorrectValueKind { actual: value, diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index 2e13d782d..b58bef0e9 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -9,7 +9,7 @@ use log::debug; use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::error::deserr_codes::*; -use meilisearch_types::error::{unwrap_any, Code, ResponseError}; +use meilisearch_types::error::{Code, ResponseError}; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::milli::{self, FieldDistribution, Index}; use meilisearch_types::tasks::KindWithContent; @@ -147,7 +147,7 @@ fn deny_immutable_fields_index( "uid" => immutable_field_error(field, accepted, Code::ImmutableIndexUid), "createdAt" => immutable_field_error(field, accepted, Code::ImmutableIndexCreatedAt), "updatedAt" => immutable_field_error(field, accepted, Code::ImmutableIndexUpdatedAt), - _ => unwrap_any(DeserrJsonError::::error::( + _ => deserr::take_cf_content(DeserrJsonError::::error::( None, deserr::ErrorKind::UnknownKey { key: field, accepted }, location, From 74d1a67a99c7c8f62c8e3ad54e4207f531edd81c Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 15 Feb 2023 13:51:07 +0100 Subject: [PATCH 167/186] Use the workspace inheritance feature of rust 1.64 --- Cargo.lock | 4 ++-- Cargo.toml | 9 +++++++++ benchmarks/Cargo.toml | 10 ++++++++-- dump/Cargo.toml | 11 +++++++++-- file-store/Cargo.toml | 11 +++++++++-- filter-parser/Cargo.toml | 10 ++++++++-- flatten-serde-json/Cargo.toml | 10 ++++++++-- index-scheduler/Cargo.toml | 11 +++++++++-- json-depth-checker/Cargo.toml | 10 ++++++++-- meili-snap/Cargo.toml | 11 +++++++++-- meilisearch-auth/Cargo.toml | 11 +++++++++-- meilisearch-types/Cargo.toml | 12 +++++++++--- meilisearch/Cargo.toml | 16 ++++++++++------ milli/Cargo.toml | 11 +++++++++-- permissive-json-pointer/Cargo.toml | 11 +++++++++-- 15 files changed, 125 insertions(+), 33 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 28ac2ca63..d0fcd431c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -609,9 +609,9 @@ dependencies = [ [[package]] name = "cargo_toml" -version = "0.13.3" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497049e9477329f8f6a559972ee42e117487d01d1e8c2cc9f836ea6fa23a9e1a" +checksum = "2bfbc36312494041e2cdd5f06697b7e89d4b76f42773a0b5556ac290ff22acc2" dependencies = [ "serde", "toml", diff --git a/Cargo.toml b/Cargo.toml index a76827de0..d330b3a42 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,15 @@ members = [ "benchmarks" ] +[workspace.package] +version = "1.0.0" +authors = ["Quentin de Quelen ", "Clément Renault "] +description = "Meilisearch HTTP server" +homepage = "https://meilisearch.com" +readme = "README.md" +edition = "2021" +license = "MIT" + [profile.release] codegen-units = 1 diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index 561fe9d3a..5203a7601 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -1,9 +1,15 @@ [package] name = "benchmarks" -version = "1.0.0" -edition = "2018" publish = false +version.workspace = true +authors.workspace = true +description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true + [dependencies] anyhow = "1.0.65" csv = "1.1.6" diff --git a/dump/Cargo.toml b/dump/Cargo.toml index 611b93735..d0ed46d7c 100644 --- a/dump/Cargo.toml +++ b/dump/Cargo.toml @@ -1,7 +1,14 @@ [package] name = "dump" -version = "1.0.0" -edition = "2021" +publish = false + +version.workspace = true +authors.workspace = true +description.workspace = true +edition.workspace = true +homepage.workspace = true +readme.workspace = true +license.workspace = true [dependencies] anyhow = "1.0.65" diff --git a/file-store/Cargo.toml b/file-store/Cargo.toml index 577d54ac6..b9ab9ea36 100644 --- a/file-store/Cargo.toml +++ b/file-store/Cargo.toml @@ -1,7 +1,14 @@ [package] name = "file-store" -version = "1.0.0" -edition = "2021" +publish = false + +version.workspace = true +authors.workspace = true +description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true [dependencies] tempfile = "3.3.0" diff --git a/filter-parser/Cargo.toml b/filter-parser/Cargo.toml index f7d3f044f..e6652a489 100644 --- a/filter-parser/Cargo.toml +++ b/filter-parser/Cargo.toml @@ -1,10 +1,16 @@ [package] name = "filter-parser" -version = "1.0.0" -edition = "2021" description = "The parser for the Meilisearch filter syntax" publish = false +version.workspace = true +authors.workspace = true +# description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true + [dependencies] nom = "7.1.1" nom_locate = "4.0.0" diff --git a/flatten-serde-json/Cargo.toml b/flatten-serde-json/Cargo.toml index 36667486f..1ad00b0b3 100644 --- a/flatten-serde-json/Cargo.toml +++ b/flatten-serde-json/Cargo.toml @@ -1,11 +1,17 @@ [package] name = "flatten-serde-json" -version = "1.0.0" -edition = "2021" description = "Flatten serde-json objects like elastic search" readme = "README.md" publish = false +version.workspace = true +authors.workspace = true +# description.workspace = true +homepage.workspace = true +# readme.workspace = true +edition.workspace = true +license.workspace = true + [dependencies] serde_json = "1.0" diff --git a/index-scheduler/Cargo.toml b/index-scheduler/Cargo.toml index c5fd03533..77936c435 100644 --- a/index-scheduler/Cargo.toml +++ b/index-scheduler/Cargo.toml @@ -1,7 +1,14 @@ [package] name = "index-scheduler" -version = "1.0.0" -edition = "2021" +publish = false + +version.workspace = true +authors.workspace = true +description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true [dependencies] anyhow = "1.0.64" diff --git a/json-depth-checker/Cargo.toml b/json-depth-checker/Cargo.toml index 0cf8344fd..01b2d03a7 100644 --- a/json-depth-checker/Cargo.toml +++ b/json-depth-checker/Cargo.toml @@ -1,10 +1,16 @@ [package] name = "json-depth-checker" -version = "1.0.0" -edition = "2021" description = "A library that indicates if a JSON must be flattened" publish = false +version.workspace = true +authors.workspace = true +# description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true + [dependencies] serde_json = "1.0" diff --git a/meili-snap/Cargo.toml b/meili-snap/Cargo.toml index 547f76dbe..8aeb30141 100644 --- a/meili-snap/Cargo.toml +++ b/meili-snap/Cargo.toml @@ -1,7 +1,14 @@ [package] name = "meili-snap" -version = "1.0.0" -edition = "2021" +publish = false + +version.workspace = true +authors.workspace = true +description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true [dependencies] insta = { version = "^1.19.1", features = ["json", "redactions"] } diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index a42cbae02..9a00140fa 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -1,7 +1,14 @@ [package] name = "meilisearch-auth" -version = "1.0.0" -edition = "2021" +publish = false + +version.workspace = true +authors.workspace = true +description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true [dependencies] base64 = "0.13.1" diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 3ed9464d3..50abbc105 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -1,8 +1,14 @@ [package] name = "meilisearch-types" -version = "1.0.0" -authors = ["marin "] -edition = "2021" +publish = false + +version.workspace = true +authors.workspace = true +description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true [dependencies] actix-web = { version = "4.2.1", default-features = false } diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index b4a87c632..bb30767e5 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -1,10 +1,14 @@ [package] -authors = ["Quentin de Quelen ", "Clément Renault "] -description = "Meilisearch HTTP server" -edition = "2021" -license = "MIT" name = "meilisearch" -version = "1.0.0" +publish = false + +version.workspace = true +authors.workspace = true +description.workspace = true +homepage.workspace = true +readme.workspace = true +edition.workspace = true +license.workspace = true [dependencies] actix-cors = "0.6.3" @@ -90,7 +94,7 @@ yaup = "0.2.1" [build-dependencies] anyhow = { version = "1.0.65", optional = true } -cargo_toml = { version = "0.13.0", optional = true } +cargo_toml = { version = "0.14.0", optional = true } hex = { version = "0.4.3", optional = true } reqwest = { version = "0.11.12", features = ["blocking", "rustls-tls"], default-features = false, optional = true } sha-1 = { version = "0.10.0", optional = true } diff --git a/milli/Cargo.toml b/milli/Cargo.toml index 1752cb3d9..2f14b8fdf 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -1,8 +1,15 @@ [package] name = "milli" -version = "1.0.0" -authors = ["Kerollmops "] edition = "2018" +publish = false + +version.workspace = true +authors.workspace = true +description.workspace = true +homepage.workspace = true +readme.workspace = true +# edition.workspace = true +license.workspace = true [dependencies] bimap = { version = "0.6.2", features = ["serde"] } diff --git a/permissive-json-pointer/Cargo.toml b/permissive-json-pointer/Cargo.toml index c15c341db..697555364 100644 --- a/permissive-json-pointer/Cargo.toml +++ b/permissive-json-pointer/Cargo.toml @@ -1,9 +1,16 @@ [package] name = "permissive-json-pointer" -version = "1.0.0" -edition = "2021" description = "A permissive json pointer" readme = "README.md" +publish = false + +version.workspace = true +authors.workspace = true +# description.workspace = true +homepage.workspace = true +# readme.workspace = true +edition.workspace = true +license.workspace = true [dependencies] serde_json = "1.0" From ab2adba183ef26b7bb82a9ce63d3a99edba0a91a Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 15 Feb 2023 13:56:24 +0100 Subject: [PATCH 168/186] update our CI scripts accordingly --- .github/scripts/check-release.sh | 10 +++------- .github/workflows/update-cargo-toml-version.yml | 2 +- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/scripts/check-release.sh b/.github/scripts/check-release.sh index 2ce171459..b0b272226 100644 --- a/.github/scripts/check-release.sh +++ b/.github/scripts/check-release.sh @@ -3,7 +3,7 @@ # check_tag $current_tag $file_tag $file_name function check_tag { if [[ "$1" != "$2" ]]; then - echo "Error: the current tag does not match the version in $3: found $2 - expected $1" + echo "Error: the current tag does not match the version in Cargo.toml: found $2 - expected $1" ret=1 fi } @@ -11,12 +11,8 @@ function check_tag { ret=0 current_tag=${GITHUB_REF#'refs/tags/v'} -toml_files='*/Cargo.toml' -for toml_file in $toml_files; -do - file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')" - check_tag $current_tag $file_tag $toml_file -done +file_tag="$(grep '^version = ' Cargo.toml | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')" +check_tag $current_tag $file_tag lock_file='Cargo.lock' lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ') diff --git a/.github/workflows/update-cargo-toml-version.yml b/.github/workflows/update-cargo-toml-version.yml index 80961e878..d5838a5e8 100644 --- a/.github/workflows/update-cargo-toml-version.yml +++ b/.github/workflows/update-cargo-toml-version.yml @@ -29,7 +29,7 @@ jobs: run: | raw_new_version=$(echo $NEW_VERSION | cut -d 'v' -f 2) new_string="version = \"$raw_new_version\"" - sd '^version = "\d+.\d+.\w+"$' "$new_string" */Cargo.toml + sd '^version = "\d+.\d+.\w+"$' "$new_string" Cargo.toml - name: Build Meilisearch to update Cargo.lock run: cargo build - name: Commit and push the changes to the ${{ env.NEW_BRANCH }} branch From c3a30a5a91bc8f0e3b5d8d62a4c8b8393f1bf2bb Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 7 Feb 2023 15:02:04 +0100 Subject: [PATCH 169/186] If using a prototype, display its name at Meilisearch startup --- meilisearch/build.rs | 10 ++++++++-- meilisearch/src/lib.rs | 32 ++++++++++++++++++++++++++++++++ meilisearch/src/main.rs | 5 ++++- 3 files changed, 44 insertions(+), 3 deletions(-) diff --git a/meilisearch/build.rs b/meilisearch/build.rs index e2207561b..ba0cf9e70 100644 --- a/meilisearch/build.rs +++ b/meilisearch/build.rs @@ -1,7 +1,13 @@ -use vergen::{vergen, Config}; +use vergen::{vergen, Config, SemverKind}; fn main() { - if let Err(e) = vergen(Config::default()) { + let mut config = Config::default(); + // allow using non-annotated tags + *config.git_mut().semver_kind_mut() = SemverKind::Lightweight; + // add -dirty suffix when we're not right on the tag + *config.git_mut().semver_dirty_mut() = Some("-dirty"); + + if let Err(e) = vergen(config) { println!("cargo:warning=vergen: {}", e); } diff --git a/meilisearch/src/lib.rs b/meilisearch/src/lib.rs index 7a7555659..8b87b5a87 100644 --- a/meilisearch/src/lib.rs +++ b/meilisearch/src/lib.rs @@ -427,3 +427,35 @@ pub fn configure_metrics_route(config: &mut web::ServiceConfig, enable_metrics_r ); } } + +/// Parses the output of +/// [`VERGEN_GIT_SEMVER_LIGHTWEIGHT`](https://docs.rs/vergen/latest/vergen/struct.Git.html#instructions) +/// as a prototype name. +/// +/// Returns `Some(prototype_name)` if the following conditions are met on this value: +/// +/// 1. starts with `prototype-`, +/// 2. does not end with `dirty-`, +/// 3. ends with `-`, +/// 4. does not end with `-`. +/// +/// Otherwise, returns `None`. +pub fn prototype_name() -> Option<&'static str> { + let prototype: &'static str = option_env!("VERGEN_GIT_SEMVER_LIGHTWEIGHT")?; + + if prototype.ends_with("-dirty") { + return None; + } + + if !prototype.starts_with("prototype-") { + return None; + } + + let mut rsplit_prototype = prototype.rsplit('-'); + // last component MUST be a number + rsplit_prototype.next()?.parse::().ok()?; + // before than last component SHALL NOT be a number + rsplit_prototype.next()?.parse::().err()?; + + Some(prototype) +} diff --git a/meilisearch/src/main.rs b/meilisearch/src/main.rs index b78362ec1..d12539e20 100644 --- a/meilisearch/src/main.rs +++ b/meilisearch/src/main.rs @@ -8,7 +8,7 @@ use actix_web::web::Data; use actix_web::HttpServer; use index_scheduler::IndexScheduler; use meilisearch::analytics::Analytics; -use meilisearch::{analytics, create_app, setup_meilisearch, Opt}; +use meilisearch::{analytics, create_app, prototype_name, setup_meilisearch, Opt}; use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; @@ -137,6 +137,9 @@ pub fn print_launch_resume( eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string()); eprintln!("Commit date:\t\t{:?}", commit_date.to_string()); eprintln!("Package version:\t{:?}", env!("CARGO_PKG_VERSION").to_string()); + if let Some(prototype) = prototype_name() { + eprintln!("Prototype:\t\t{:?}", prototype); + } #[cfg(all(not(debug_assertions), feature = "analytics"))] { From f46cf46b8cf0929ba045d766731bd1254ef52bf1 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 7 Feb 2023 16:01:12 +0100 Subject: [PATCH 170/186] Add prototype to analytics if any --- meilisearch/src/analytics/segment_analytics.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/meilisearch/src/analytics/segment_analytics.rs b/meilisearch/src/analytics/segment_analytics.rs index 21b6696e7..566770646 100644 --- a/meilisearch/src/analytics/segment_analytics.rs +++ b/meilisearch/src/analytics/segment_analytics.rs @@ -401,12 +401,19 @@ impl Segment { if let Ok(stats) = create_all_stats(index_scheduler.into(), auth_controller, &SearchRules::default()) { + // Replace the version number with the prototype name if any. + let version = if let Some(prototype) = crate::prototype_name() { + prototype + } else { + env!("CARGO_PKG_VERSION") + }; + let _ = self .batcher .push(Identify { context: Some(json!({ "app": { - "version": env!("CARGO_PKG_VERSION").to_string(), + "version": version.to_string(), }, })), user: self.user.clone(), From a341c94871799db5e7bb38503f90ff0cda0a0dcb Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 9 Feb 2023 16:26:59 +0100 Subject: [PATCH 171/186] Update contributing.md --- CONTRIBUTING.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 19e1d9372..31fe45a94 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -121,15 +121,19 @@ The full Meilisearch release process is described in [this guide](https://github Depending on the developed feature, you might need to provide a prototyped version of Meilisearch to make it easier to test by the users. The prototype name must follow this convention: `prototype-X-Y` where -- `X` is the feature name formatted in `kebab-case` +- `X` is the feature name formatted in `kebab-case`. It should not end with a single number. - `Y` is the version of the prototype, starting from `0`. -Example: `prototype-auto-resize-0`. +✅ Example: `prototype-auto-resize-0`.
+❌ Bad example: `auto-resize-0`: lacks the `prototype` prefix.
+❌ Bad example: `prototype-auto-resize`: lacks the version suffix.
+❌ Bad example: `prototype-auto-resize-0-0`: feature name ends with a single number. Steps to create a prototype: 1. In your terminal, go to the last commit of your branch (the one you want to provide as a prototype). 2. Create a tag following the convention: `git tag prototype-X-Y` +3. Run Meilisearch and check that its launch summary features a line: `Prototype: prototype-X-Y` (you may need to switch branches and back after tagging for this to work). 3. Push the tag: `git push origin prototype-X-Y` 4. Check the [Docker CI](https://github.com/meilisearch/meilisearch/actions/workflows/publish-docker-images.yml) is now running. @@ -138,7 +142,7 @@ More information about [how to run Meilisearch with Docker](https://docs.meilise ⚙️ However, no binaries will be created. If the users do not use Docker, they can go to the `prototype-X-Y` tag in the Meilisearch repository and compile from the source code. -⚠️ When sharing a prototype with users, prevent them from using it in production. Prototypes are only for test purposes. +⚠️ When sharing a prototype with users, remind them to not use it in production. Prototypes are solely for test purposes. ### Release assets From 9bd1cfb3a323fbac4c31ed5155175ab45e71035f Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 9 Feb 2023 16:27:18 +0100 Subject: [PATCH 172/186] Ignore -dirty flag --- meilisearch/build.rs | 2 -- meilisearch/src/lib.rs | 9 ++------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/meilisearch/build.rs b/meilisearch/build.rs index ba0cf9e70..cfc8cd63d 100644 --- a/meilisearch/build.rs +++ b/meilisearch/build.rs @@ -4,8 +4,6 @@ fn main() { let mut config = Config::default(); // allow using non-annotated tags *config.git_mut().semver_kind_mut() = SemverKind::Lightweight; - // add -dirty suffix when we're not right on the tag - *config.git_mut().semver_dirty_mut() = Some("-dirty"); if let Err(e) = vergen(config) { println!("cargo:warning=vergen: {}", e); diff --git a/meilisearch/src/lib.rs b/meilisearch/src/lib.rs index 8b87b5a87..f17f93e74 100644 --- a/meilisearch/src/lib.rs +++ b/meilisearch/src/lib.rs @@ -435,18 +435,13 @@ pub fn configure_metrics_route(config: &mut web::ServiceConfig, enable_metrics_r /// Returns `Some(prototype_name)` if the following conditions are met on this value: /// /// 1. starts with `prototype-`, -/// 2. does not end with `dirty-`, -/// 3. ends with `-`, -/// 4. does not end with `-`. +/// 2. ends with `-`, +/// 3. does not end with `-`. /// /// Otherwise, returns `None`. pub fn prototype_name() -> Option<&'static str> { let prototype: &'static str = option_env!("VERGEN_GIT_SEMVER_LIGHTWEIGHT")?; - if prototype.ends_with("-dirty") { - return None; - } - if !prototype.starts_with("prototype-") { return None; } From e1ed4bc7509e5fac65cffbdee6333ca4ee7ddea4 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Fri, 10 Feb 2023 15:48:54 +0100 Subject: [PATCH 173/186] Change Dockerfile to also pass the VERGEN_GIT_SEMVER_LIGHTWEIGHT when building --- .github/uffizzi/Dockerfile | 3 ++- .github/workflows/publish-docker-images.yml | 1 + Dockerfile | 3 ++- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/uffizzi/Dockerfile b/.github/uffizzi/Dockerfile index ae2b8231e..57ee4aa8d 100644 --- a/.github/uffizzi/Dockerfile +++ b/.github/uffizzi/Dockerfile @@ -7,7 +7,8 @@ WORKDIR /meilisearch ARG COMMIT_SHA ARG COMMIT_DATE -ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE} +ARG GIT_TAG +ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG} ENV RUSTFLAGS="-C target-feature=-crt-static" COPY . . diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index 5d1b50f79..34f52e0ed 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -92,6 +92,7 @@ jobs: build-args: | COMMIT_SHA=${{ github.sha }} COMMIT_DATE=${{ steps.build-metadata.outputs.date }} + GIT_TAG=$(printf "%q" ${{ github.ref_name }}) # /!\ Don't touch this without checking with Cloud team - name: Send CI information to Cloud team diff --git a/Dockerfile b/Dockerfile index 6846fdad7..70950f338 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,8 @@ WORKDIR /meilisearch ARG COMMIT_SHA ARG COMMIT_DATE -ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} +ARG GIT_TAG +ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG} ENV RUSTFLAGS="-C target-feature=-crt-static" COPY . . From 54240db4952a78f168e7e43e8ad0294de9f72b42 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Fri, 10 Feb 2023 15:50:21 +0100 Subject: [PATCH 174/186] Add note in code so one does not forget next time --- meilisearch/build.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/meilisearch/build.rs b/meilisearch/build.rs index cfc8cd63d..c839b6e33 100644 --- a/meilisearch/build.rs +++ b/meilisearch/build.rs @@ -1,6 +1,9 @@ use vergen::{vergen, Config, SemverKind}; fn main() { + // Note: any code that needs VERGEN_ environment variables should take care to define them manually in the Dockerfile and pass them + // in the corresponding GitHub workflow (publish_docker.yml). + // This is due to the Dockerfile building the binary outside of the git directory. let mut config = Config::default(); // allow using non-annotated tags *config.git_mut().semver_kind_mut() = SemverKind::Lightweight; From 49e18da23e58c10d672882db72e22f9c8f3bc1d3 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 14 Feb 2023 13:50:34 +0100 Subject: [PATCH 175/186] Do not escape tag name $() syntax is not interpreted by the Dockerfile --- .github/workflows/publish-docker-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index 34f52e0ed..39bab4d0d 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -92,7 +92,7 @@ jobs: build-args: | COMMIT_SHA=${{ github.sha }} COMMIT_DATE=${{ steps.build-metadata.outputs.date }} - GIT_TAG=$(printf "%q" ${{ github.ref_name }}) + GIT_TAG=${{ github.ref_name }} # /!\ Don't touch this without checking with Cloud team - name: Send CI information to Cloud team From f11c7d4b627e66809864f868eb675d8b883995e4 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 16 Feb 2023 18:03:45 +0100 Subject: [PATCH 176/186] cargo run execute meilisearch by default --- meilisearch/Cargo.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index bb30767e5..91f05fd16 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -10,6 +10,8 @@ readme.workspace = true edition.workspace = true license.workspace = true +default-run = "meilisearch" + [dependencies] actix-cors = "0.6.3" actix-http = { version = "3.2.2", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] } From 895ab2906c1a2478d5550f3c6763a91967561f47 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 16 Feb 2023 18:42:47 +0100 Subject: [PATCH 177/186] apply review suggestions --- .../helpers/merge_functions.rs | 36 ++++++++++++++ .../src/update/index_documents/helpers/mod.rs | 4 +- milli/src/update/index_documents/transform.rs | 49 ++++++------------- 3 files changed, 52 insertions(+), 37 deletions(-) diff --git a/milli/src/update/index_documents/helpers/merge_functions.rs b/milli/src/update/index_documents/helpers/merge_functions.rs index 6e3aa7ec7..7b8891a7a 100644 --- a/milli/src/update/index_documents/helpers/merge_functions.rs +++ b/milli/src/update/index_documents/helpers/merge_functions.rs @@ -6,6 +6,7 @@ use roaring::RoaringBitmap; use super::read_u32_ne_bytes; use crate::heed_codec::CboRoaringBitmapCodec; +use crate::update::index_documents::transform::Operation; use crate::Result; pub type MergeFn = for<'a> fn(&[u8], &[Cow<'a, [u8]>]) -> Result>; @@ -73,6 +74,41 @@ pub fn merge_two_obkvs(base: obkv::KvReaderU16, update: obkv::KvReaderU16, buffe writer.finish().unwrap(); } +/// Merge all the obks in the order we see them. +pub fn merge_obkvs_and_operations<'a>( + _key: &[u8], + obkvs: &[Cow<'a, [u8]>], +) -> Result> { + // [add, add, delete, add, add] + // we can ignore everything that happened before the last delete. + let starting_position = + obkvs.iter().rposition(|obkv| obkv[0] == Operation::Deletion as u8).unwrap_or(0); + + // [add, add, delete] + // if the last operation was a deletion then we simply return the deletion + if starting_position == obkvs.len() - 1 && obkvs.last().unwrap()[0] == Operation::Deletion as u8 + { + return Ok(obkvs[obkvs.len() - 1].clone()); + } + let mut buffer = Vec::new(); + + // (add, add, delete) [add, add] + // in the other case, no deletion will be encountered during the merge + let mut ret = + obkvs[starting_position..].iter().cloned().fold(Vec::new(), |mut acc, current| { + let first = obkv::KvReader::new(&acc); + let second = obkv::KvReader::new(¤t[1..]); + merge_two_obkvs(first, second, &mut buffer); + + // we want the result of the merge into our accumulator + std::mem::swap(&mut acc, &mut buffer); + acc + }); + + ret.insert(0, Operation::Addition as u8); + Ok(Cow::from(ret)) +} + pub fn merge_cbo_roaring_bitmaps<'a>( _key: &[u8], values: &[Cow<'a, [u8]>], diff --git a/milli/src/update/index_documents/helpers/mod.rs b/milli/src/update/index_documents/helpers/mod.rs index 3a25851e4..ce6a2abe9 100644 --- a/milli/src/update/index_documents/helpers/mod.rs +++ b/milli/src/update/index_documents/helpers/mod.rs @@ -14,8 +14,8 @@ pub use grenad_helpers::{ }; pub use merge_functions::{ concat_u32s_array, keep_first, keep_latest_obkv, merge_cbo_roaring_bitmaps, - merge_roaring_bitmaps, merge_two_obkvs, roaring_bitmap_from_u32s_array, - serialize_roaring_bitmap, MergeFn, + merge_obkvs_and_operations, merge_roaring_bitmaps, merge_two_obkvs, + roaring_bitmap_from_u32s_array, serialize_roaring_bitmap, MergeFn, }; use crate::MAX_WORD_LENGTH; diff --git a/milli/src/update/index_documents/transform.rs b/milli/src/update/index_documents/transform.rs index 0624db468..6097278a7 100644 --- a/milli/src/update/index_documents/transform.rs +++ b/milli/src/update/index_documents/transform.rs @@ -12,7 +12,9 @@ use roaring::RoaringBitmap; use serde_json::Value; use smartstring::SmartString; -use super::helpers::{create_sorter, create_writer, keep_latest_obkv, merge_two_obkvs, MergeFn}; +use super::helpers::{ + create_sorter, create_writer, keep_latest_obkv, merge_obkvs_and_operations, MergeFn, +}; use super::{IndexDocumentsMethod, IndexerConfig}; use crate::documents::{DocumentsBatchIndex, EnrichedDocument, EnrichedDocumentsBatchReader}; use crate::error::{Error, InternalError, UserError}; @@ -66,7 +68,7 @@ pub struct Transform<'a, 'i> { /// This enum is specific to the grenad sorter stored in the transform. /// It's used as the first byte of the grenads and tells you if the document id was an addition or a deletion. #[repr(u8)] -enum Operation { +pub enum Operation { Addition, Deletion, } @@ -327,7 +329,16 @@ impl<'a, 'i> Transform<'a, 'i> { Ok(documents_count) } - /// The counter part of `read_documents` that removes documents that may have been inserted into the transform previously. + /// The counter part of `read_documents` that removes documents either from the transform or the database. + /// It can be called before, after or in between two calls of the `read_documents`. + /// + /// It needs to update all the internal datastructure in the transform. + /// - If the document is coming from the database -> it's marked as a to_delete document + /// - If the document to remove was inserted by the `read_documents` method before AND was present in the db, + /// it's marked as `to_delete` + added into the grenad to ensure we don't reinsert it. + /// - If the document to remove was inserted by the `read_documents` method before but was NOT present in the db, + /// it's added into the grenad to ensure we don't insert it + removed from the list of new documents ids. + /// - If the document to remove was not present in either the db or the transform we do nothing. pub fn remove_documents( &mut self, mut to_remove: Vec, @@ -783,38 +794,6 @@ impl<'a, 'i> Transform<'a, 'i> { } } -/// Merge all the obks in the order we see them. -fn merge_obkvs_and_operations<'a>(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Result> { - // [add, add, delete, add, add] - // we can ignore everything that happened before the last delete. - let starting_position = - obkvs.iter().rposition(|obkv| obkv[0] == Operation::Deletion as u8).unwrap_or(0); - - // [add, add, delete] - // if the last operation was a deletion then we simply return the deletion - if starting_position == obkvs.len() - 1 && obkvs.last().unwrap()[0] == Operation::Deletion as u8 - { - return Ok(obkvs[obkvs.len() - 1].clone()); - } - let mut buffer = Vec::new(); - - // (add, add, delete) [add, add] - // in the other case, no deletion will be encountered during the merge - let mut ret = - obkvs[starting_position..].iter().cloned().fold(Vec::new(), |mut acc, current| { - let first = obkv::KvReader::new(&acc); - let second = obkv::KvReader::new(¤t[1..]); - merge_two_obkvs(first, second, &mut buffer); - - // we want the result of the merge into our accumulator - std::mem::swap(&mut acc, &mut buffer); - acc - }); - - ret.insert(0, Operation::Addition as u8); - Ok(Cow::from(ret)) -} - /// Drops all the value of type `U` in vec, and reuses the allocation to create a `Vec`. /// /// The size and alignment of T and U must match. From 18796d6e6a3a4e3e07627440ccb21c2778787885 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 20 Feb 2023 13:45:51 +0100 Subject: [PATCH 178/186] Consider null as a valid geo object --- meilisearch/tests/documents/add_documents.rs | 47 +++++++++++++++++++ milli/src/update/index_documents/enrich.rs | 1 + .../extract/extract_geo_points.rs | 1 + 3 files changed, 49 insertions(+) diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index e553dcacd..e1530ccc0 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -1027,6 +1027,53 @@ async fn error_document_field_limit_reached() { @""); } +#[actix_rt::test] +async fn add_documents_with_geo_field() { + let server = Server::new().await; + let index = server.index("doggo"); + index.update_settings(json!({"sortableAttributes": ["_geo"]})).await; + + let documents = json!([ + { + "id": "1", + }, + { + "id": "2", + "_geo": null, + }, + { + "id": "3", + "_geo": { "lat": 1, "lng": 1 }, + }, + { + "id": "4", + "_geo": { "lat": "1", "lng": "1" }, + }, + ]); + + index.add_documents(documents, None).await; + let response = index.wait_task(1).await; + snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), + @r###" + { + "uid": 1, + "indexUid": "doggo", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 4, + "indexedDocuments": 4 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); +} + #[actix_rt::test] async fn add_documents_invalid_geo_field() { let server = Server::new().await; diff --git a/milli/src/update/index_documents/enrich.rs b/milli/src/update/index_documents/enrich.rs index ed04e9962..09599ac82 100644 --- a/milli/src/update/index_documents/enrich.rs +++ b/milli/src/update/index_documents/enrich.rs @@ -395,6 +395,7 @@ pub fn validate_geo_from_json(id: &DocumentId, bytes: &[u8]) -> Result Ok(Err(MissingLongitude { document_id: debug_id() })), (None, None) => Ok(Err(MissingLatitudeAndLongitude { document_id: debug_id() })), }, + Value::Null => Ok(Ok(())), value => Ok(Err(NotAnObject { document_id: debug_id(), value })), } } diff --git a/milli/src/update/index_documents/extract/extract_geo_points.rs b/milli/src/update/index_documents/extract/extract_geo_points.rs index 55044e712..ddb38abe5 100644 --- a/milli/src/update/index_documents/extract/extract_geo_points.rs +++ b/milli/src/update/index_documents/extract/extract_geo_points.rs @@ -59,6 +59,7 @@ pub fn extract_geo_points( } else if lat.is_some() && lng.is_none() { return Err(GeoError::MissingLongitude { document_id: document_id() })?; } + // else => the _geo object was `null`, there is nothing to do } writer_into_reader(writer) From dd120e0e16bef1ab167ab9c3fd433bad8b0ddd1a Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Mon, 20 Feb 2023 13:45:57 +0100 Subject: [PATCH 179/186] Bump version of mini-dashboard to v0.2.6 --- meilisearch/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index 91f05fd16..eff27479e 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -116,5 +116,5 @@ japanese = ["meilisearch-types/japanese"] thai = ["meilisearch-types/thai"] [package.metadata.mini-dashboard] -assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.5/build.zip" -sha1 = "6fe959b78511b32e9ff857fd9fd31740633b9fce" +assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.6/build.zip" +sha1 = "dce0aba16bceab5549edf9f01de89858800f7422" From 4c519c2ab3db032b525d57117b95a76b2a5e8517 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 9 Jan 2023 09:33:44 +0100 Subject: [PATCH 180/186] Add Batch::index_uid --- index-scheduler/src/batch.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index 8a479a12b..50225ff82 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -169,6 +169,22 @@ impl Batch { Batch::IndexSwap { task } => vec![task.uid], } } + + /// Return the index UID associated with this batch + pub fn index_uid(&self) -> Option<&str> { + use Batch::*; + match self { + TaskCancelation { .. } + | TaskDeletion(_) + | SnapshotCreation(_) + | Dump(_) + | IndexSwap { .. } => None, + IndexOperation { op, .. } => Some(op.index_uid()), + IndexCreation { index_uid, .. } + | IndexUpdate { index_uid, .. } + | IndexDeletion { index_uid, .. } => Some(index_uid), + } + } } impl IndexOperation { From faf1e17a27761e89ed5888dca1c6879f67b2e234 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 9 Jan 2023 09:34:46 +0100 Subject: [PATCH 181/186] `create_or_open_index` takes a `map_size` argument --- index-scheduler/src/index_mapper.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/index-scheduler/src/index_mapper.rs b/index-scheduler/src/index_mapper.rs index 02b53749f..9b9da9852 100644 --- a/index-scheduler/src/index_mapper.rs +++ b/index-scheduler/src/index_mapper.rs @@ -71,9 +71,10 @@ impl IndexMapper { &self, path: &Path, date: Option<(OffsetDateTime, OffsetDateTime)>, + map_size: usize, ) -> Result { let mut options = EnvOpenOptions::new(); - options.map_size(clamp_to_page_size(self.index_size)); + options.map_size(clamp_to_page_size(map_size)); options.max_readers(1024); if let Some((created, updated)) = date { @@ -102,7 +103,7 @@ impl IndexMapper { let index_path = self.base_path.join(uuid.to_string()); fs::create_dir_all(&index_path)?; - let index = self.create_or_open_index(&index_path, date)?; + let index = self.create_or_open_index(&index_path, date, self.index_size)?; wtxn.commit()?; // TODO: it would be better to lazily create the index. But we need an Index::open function for milli. @@ -196,7 +197,8 @@ impl IndexMapper { Entry::Vacant(entry) => { let index_path = self.base_path.join(uuid.to_string()); - let index = self.create_or_open_index(&index_path, None)?; + let index = + self.create_or_open_index(&index_path, None, self.index_size)?; entry.insert(Available(index.clone())); index } From 6cc3797aa16aa0911fb0415e411ff8e30a23838b Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 9 Jan 2023 19:30:29 +0100 Subject: [PATCH 182/186] IndexScheduler::tick returns a TickOutcome --- index-scheduler/src/lib.rs | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 8dd16f961..4ef218143 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -423,12 +423,12 @@ impl IndexScheduler { #[cfg(test)] run.breakpoint(Breakpoint::Init); - loop { - run.wake_up.wait(); + run.wake_up.wait(); + loop { match run.tick() { - Ok(0) => (), - Ok(_) => run.wake_up.signal(), + Ok(TickOutcome::TickAgain(_)) => (), + Ok(TickOutcome::WaitForSignal) => run.wake_up.wait(), Err(e) => { log::error!("{}", e); // Wait one second when an irrecoverable error occurs. @@ -441,7 +441,6 @@ impl IndexScheduler { ) { std::thread::sleep(Duration::from_secs(1)); } - run.wake_up.signal(); } } } @@ -927,7 +926,7 @@ impl IndexScheduler { /// 5. Reset the in-memory list of processed tasks. /// /// Returns the number of processed tasks. - fn tick(&self) -> Result { + fn tick(&self) -> Result { #[cfg(test)] { *self.run_loop_iteration.write().unwrap() += 1; @@ -938,7 +937,7 @@ impl IndexScheduler { let batch = match self.create_next_batch(&rtxn).map_err(|e| Error::CreateBatch(Box::new(e)))? { Some(batch) => batch, - None => return Ok(0), + None => return Ok(TickOutcome::WaitForSignal), }; drop(rtxn); @@ -1010,7 +1009,7 @@ impl IndexScheduler { // the `started_at` date times and `processings` of the current processing tasks. // This date time is used by the task cancelation to store the right `started_at` // date in the task on disk. - return Ok(0); + return Ok(TickOutcome::TickAgain(0)); } // In case of a failure we must get back and patch all the tasks with the error. Err(err) => { @@ -1050,7 +1049,7 @@ impl IndexScheduler { #[cfg(test)] self.breakpoint(Breakpoint::AfterProcessing); - Ok(processed_tasks) + Ok(TickOutcome::TickAgain(processed_tasks)) } pub(crate) fn delete_persisted_task_data(&self, task: &Task) -> Result<()> { @@ -1085,6 +1084,16 @@ impl IndexScheduler { } } +/// The outcome of calling the [`IndexScheduler::tick`] function. +pub enum TickOutcome { + /// The scheduler should immediately attempt another `tick`. + /// + /// The `usize` field contains the number of processed tasks. + TickAgain(usize), + /// The scheduler should wait for an external signal before attempting another `tick`. + WaitForSignal, +} + #[cfg(test)] mod tests { use std::io::{BufWriter, Seek, Write}; From 1c670d7fa0993a48c35ddeb1ee65c9552bb56067 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 9 Jan 2023 18:11:01 +0100 Subject: [PATCH 183/186] Add IndexStatus::BeingResized --- index-scheduler/src/index_mapper.rs | 97 +++++++++++++++++++---------- 1 file changed, 63 insertions(+), 34 deletions(-) diff --git a/index-scheduler/src/index_mapper.rs b/index-scheduler/src/index_mapper.rs index 9b9da9852..1562a918d 100644 --- a/index-scheduler/src/index_mapper.rs +++ b/index-scheduler/src/index_mapper.rs @@ -9,10 +9,11 @@ use meilisearch_types::heed::types::Str; use meilisearch_types::heed::{Database, Env, EnvOpenOptions, RoTxn, RwTxn}; use meilisearch_types::milli::update::IndexerConfig; use meilisearch_types::milli::Index; +use synchronoise::SignalEvent; use time::OffsetDateTime; use uuid::Uuid; -use self::IndexStatus::{Available, BeingDeleted}; +use self::IndexStatus::{Available, BeingDeleted, BeingResized}; use crate::uuid_codec::UuidCodec; use crate::{clamp_to_page_size, Error, Result}; @@ -45,6 +46,8 @@ pub struct IndexMapper { pub enum IndexStatus { /// Do not insert it back in the index map as it is currently being deleted. BeingDeleted, + /// Temporarily do not insert the index in the index map as it is currently being resized. + BeingResized(Arc), /// You can use the index without worrying about anything. Available(Index), } @@ -106,11 +109,12 @@ impl IndexMapper { let index = self.create_or_open_index(&index_path, date, self.index_size)?; wtxn.commit()?; + // Error if the UUIDv4 somehow already exists in the map, since it should be fresh. + // This is very unlikely to happen in practice. // TODO: it would be better to lazily create the index. But we need an Index::open function for milli. - if let Some(BeingDeleted) = - self.index_map.write().unwrap().insert(uuid, Available(index.clone())) + if self.index_map.write().unwrap().insert(uuid, Available(index.clone())).is_some() { - panic!("Uuid v4 conflict."); + panic!("Uuid v4 conflict: index with UUID {uuid} already exists."); } Ok(index) @@ -132,13 +136,23 @@ impl IndexMapper { wtxn.commit()?; // We remove the index from the in-memory index map. - let mut lock = self.index_map.write().unwrap(); - let closing_event = match lock.insert(uuid, BeingDeleted) { - Some(Available(index)) => Some(index.prepare_for_closing()), - _ => None, - }; + let closing_event = loop { + let mut lock = self.index_map.write().unwrap(); + let resize_operation = match lock.insert(uuid, BeingDeleted) { + Some(Available(index)) => break Some(index.prepare_for_closing()), + // The target index is in the middle of a resize operation. + // Wait for this operation to complete, then try again. + Some(BeingResized(resize_operation)) => resize_operation.clone(), + // The index is already being deleted or doesn't exist. + // It's OK to remove it from the map again. + _ => break None, + }; - drop(lock); + // Avoiding deadlocks: we need to drop the lock before waiting for the end of the resize, which + // will involve operations on the very map we're locking. + drop(lock); + resize_operation.wait(); + }; let index_map = self.index_map.clone(); let index_path = self.base_path.join(uuid.to_string()); @@ -180,32 +194,47 @@ impl IndexMapper { .ok_or_else(|| Error::IndexNotFound(name.to_string()))?; // we clone here to drop the lock before entering the match - let index = self.index_map.read().unwrap().get(&uuid).cloned(); - let index = match index { - Some(Available(index)) => index, - Some(BeingDeleted) => return Err(Error::IndexNotFound(name.to_string())), - // since we're lazy, it's possible that the index has not been opened yet. - None => { - let mut index_map = self.index_map.write().unwrap(); - // between the read lock and the write lock it's not impossible - // that someone already opened the index (eg if two search happens - // at the same time), thus before opening it we check a second time - // if it's not already there. - // Since there is a good chance it's not already there we can use - // the entry method. - match index_map.entry(uuid) { - Entry::Vacant(entry) => { - let index_path = self.base_path.join(uuid.to_string()); + let index = loop { + let index = self.index_map.read().unwrap().get(&uuid).cloned(); - let index = - self.create_or_open_index(&index_path, None, self.index_size)?; - entry.insert(Available(index.clone())); - index + match index { + Some(Available(index)) => break index, + Some(BeingResized(ref resize_operation)) => { + // Avoiding deadlocks: no lock taken while doing this operation. + resize_operation.wait(); + continue; + } + Some(BeingDeleted) => return Err(Error::IndexNotFound(name.to_string())), + // since we're lazy, it's possible that the index has not been opened yet. + None => { + let mut index_map = self.index_map.write().unwrap(); + // between the read lock and the write lock it's not impossible + // that someone already opened the index (eg if two search happens + // at the same time), thus before opening it we check a second time + // if it's not already there. + // Since there is a good chance it's not already there we can use + // the entry method. + match index_map.entry(uuid) { + Entry::Vacant(entry) => { + let index_path = self.base_path.join(uuid.to_string()); + + let index = + self.create_or_open_index(&index_path, None, self.index_size)?; + entry.insert(Available(index.clone())); + break index; + } + Entry::Occupied(entry) => match entry.get() { + Available(index) => break index.clone(), + BeingResized(resize_operation) => { + // Avoiding the deadlock: we drop the lock before waiting + let resize_operation = resize_operation.clone(); + drop(index_map); + resize_operation.wait(); + continue; + } + BeingDeleted => return Err(Error::IndexNotFound(name.to_string())), + }, } - Entry::Occupied(entry) => match entry.get() { - Available(index) => index.clone(), - BeingDeleted => return Err(Error::IndexNotFound(name.to_string())), - }, } } }; From 951a5b5832de732c76d616ea5341f0a764ce9eb7 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 9 Jan 2023 09:35:37 +0100 Subject: [PATCH 184/186] Add IndexMapper::resize_index fn --- index-scheduler/src/index_mapper.rs | 63 +++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/index-scheduler/src/index_mapper.rs b/index-scheduler/src/index_mapper.rs index 1562a918d..7a9591254 100644 --- a/index-scheduler/src/index_mapper.rs +++ b/index-scheduler/src/index_mapper.rs @@ -186,6 +186,69 @@ impl IndexMapper { Ok(self.index_mapping.get(rtxn, name)?.is_some()) } + /// Resizes the maximum size of the specified index to the double of its current maximum size. + /// + /// This operation involves closing the underlying environment and so can take a long time to complete. + /// + /// # Panics + /// + /// - If the Index corresponding to the passed name is concurrently being deleted/resized or cannot be found in the + /// in memory hash map. + pub fn resize_index(&self, rtxn: &RoTxn, name: &str) -> Result<()> { + // fixme: factor to a function? + let uuid = self + .index_mapping + .get(rtxn, name)? + .ok_or_else(|| Error::IndexNotFound(name.to_string()))?; + + // We remove the index from the in-memory index map. + let mut lock = self.index_map.write().unwrap(); + // signal that will be sent when the resize operation completes + let resize_operation = Arc::new(SignalEvent::manual(false)); + let index = match lock.insert(uuid, BeingResized(resize_operation)) { + Some(Available(index)) => index, + Some(previous_status) => { + lock.insert(uuid, previous_status); + panic!( + "Attempting to resize index {name} that is already being resized or deleted." + ) + } + None => { + panic!("Could not find the status of index {name} in the in-memory index mapper.") + } + }; + + drop(lock); + + let current_size = index.map_size()?; + let new_size = current_size * 2; + let closing_event = index.prepare_for_closing(); + + log::debug!("Waiting for index {name} to close"); + + if !closing_event.wait_timeout(std::time::Duration::from_secs(600)) { + // fail after 10 minutes waiting + panic!("Could not resize index {name} (unable to close it)"); + } + + log::info!("Resized index {name} from {current_size} to {new_size} bytes"); + + let index_path = self.base_path.join(uuid.to_string()); + let index = self.create_or_open_index(&index_path, None, new_size)?; + + // Add back the resized index + let mut lock = self.index_map.write().unwrap(); + let Some(BeingResized(resize_operation)) = lock.insert(uuid, Available(index)) else { + panic!("Index state for index {name} was modified while it was being resized") + }; + + // drop the lock before signaling completion so that other threads don't immediately await on the lock after waking up. + drop(lock); + resize_operation.signal(); + + Ok(()) + } + /// Return an index, may open it if it wasn't already opened. pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result { let uuid = self From 11167884754b4e4815effc23b45f876e6d87633a Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Mon, 9 Jan 2023 09:36:00 +0100 Subject: [PATCH 185/186] Resize indexes when they're full --- index-scheduler/src/lib.rs | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 4ef218143..61e78503b 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -764,8 +764,8 @@ impl IndexScheduler { Ok(task) } - /// Register a new task comming from a dump in the scheduler. - /// By takinig a mutable ref we're pretty sure no one will ever import a dump while actix is running. + /// Register a new task coming from a dump in the scheduler. + /// By taking a mutable ref we're pretty sure no one will ever import a dump while actix is running. pub fn register_dumped_task( &mut self, task: TaskDump, @@ -939,6 +939,7 @@ impl IndexScheduler { Some(batch) => batch, None => return Ok(TickOutcome::WaitForSignal), }; + let index_uid = batch.index_uid().map(ToOwned::to_owned); drop(rtxn); // 1. store the starting date with the bitmap of processing tasks. @@ -1011,6 +1012,22 @@ impl IndexScheduler { // date in the task on disk. return Ok(TickOutcome::TickAgain(0)); } + // If an index said it was full, we need to: + // 1. identify which index is full + // 2. close the associated environment + // 3. resize it + // 4. re-schedule tasks + Err(Error::Milli(milli::Error::UserError( + milli::UserError::MaxDatabaseSizeReached, + ))) if index_uid.is_some() => { + // fixme: add index_uid to match to avoid the unwrap + let index_uid = index_uid.unwrap(); + // fixme: handle error more gracefully? not sure when this could happen + self.index_mapper.resize_index(&wtxn, &index_uid)?; + wtxn.abort().map_err(Error::HeedTransaction)?; + + return Ok(TickOutcome::TickAgain(0)); + } // In case of a failure we must get back and patch all the tasks with the error. Err(err) => { #[cfg(test)] From 35f6c624bc5fd2c55598a61cfdcf286a44983e41 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 10 Jan 2023 17:35:50 +0100 Subject: [PATCH 186/186] Make sure we don't leave the in memory hashmap in an inconsistent state --- index-scheduler/src/index_mapper.rs | 50 ++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/index-scheduler/src/index_mapper.rs b/index-scheduler/src/index_mapper.rs index 7a9591254..d1fe7c57d 100644 --- a/index-scheduler/src/index_mapper.rs +++ b/index-scheduler/src/index_mapper.rs @@ -220,33 +220,51 @@ impl IndexMapper { drop(lock); - let current_size = index.map_size()?; - let new_size = current_size * 2; - let closing_event = index.prepare_for_closing(); + let resize_succeeded = (move || { + let current_size = index.map_size()?; + let new_size = current_size * 2; + let closing_event = index.prepare_for_closing(); - log::debug!("Waiting for index {name} to close"); + log::debug!("Waiting for index {name} to close"); - if !closing_event.wait_timeout(std::time::Duration::from_secs(600)) { - // fail after 10 minutes waiting - panic!("Could not resize index {name} (unable to close it)"); - } + if !closing_event.wait_timeout(std::time::Duration::from_secs(600)) { + // fail after 10 minutes waiting + panic!("Could not resize index {name} (unable to close it)"); + } - log::info!("Resized index {name} from {current_size} to {new_size} bytes"); + log::info!("Resized index {name} from {current_size} to {new_size} bytes"); + let index_path = self.base_path.join(uuid.to_string()); + let index = self.create_or_open_index(&index_path, None, new_size)?; + Ok(index) + })(); - let index_path = self.base_path.join(uuid.to_string()); - let index = self.create_or_open_index(&index_path, None, new_size)?; - - // Add back the resized index + // Put the map back to a consistent state. + // Even if there was an error we don't want to leave the map in an inconsistent state as it would cause + // deadlocks. let mut lock = self.index_map.write().unwrap(); - let Some(BeingResized(resize_operation)) = lock.insert(uuid, Available(index)) else { - panic!("Index state for index {name} was modified while it was being resized") + let (resize_operation, resize_succeeded) = match resize_succeeded { + Ok(index) => { + // insert the resized index + let Some(BeingResized(resize_operation)) = lock.insert(uuid, Available(index)) else { + panic!("Index state for index {name} was modified while it was being resized") + }; + + (resize_operation, Ok(())) + } + Err(error) => { + // there was an error, not much we can do... delete the index from the in-memory map to prevent future errors + let Some(BeingResized(resize_operation)) = lock.remove(&uuid) else { + panic!("Index state for index {name} was modified while it was being resized") + }; + (resize_operation, Err(error)) + } }; // drop the lock before signaling completion so that other threads don't immediately await on the lock after waking up. drop(lock); resize_operation.signal(); - Ok(()) + resize_succeeded } /// Return an index, may open it if it wasn't already opened.