2173: chore(all): replace chrono with time r=irevoire a=irevoire

Chrono has been unmaintained for a few month now and there is a CVE on it.

Also I updated all the error messages related to the API key as you can see here: https://github.com/meilisearch/specifications/pull/114

fix #2172

Co-authored-by: Irevoire <tamo@meilisearch.com>
This commit is contained in:
bors[bot] 2022-02-17 14:12:23 +00:00 committed by GitHub
commit c3e3c900f2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 369 additions and 226 deletions

72
Cargo.lock generated
View File

@ -300,6 +300,12 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f" checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f"
[[package]]
name = "arrayvec"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
[[package]] [[package]]
name = "as-slice" name = "as-slice"
version = "0.1.5" version = "0.1.5"
@ -647,7 +653,6 @@ dependencies = [
"libc", "libc",
"num-integer", "num-integer",
"num-traits", "num-traits",
"serde",
"time 0.1.44", "time 0.1.44",
"winapi", "winapi",
] ]
@ -989,9 +994,9 @@ dependencies = [
[[package]] [[package]]
name = "filter-parser" name = "filter-parser"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.22.1#ea15ad6c34492b32eb7ac06e69de02b6dc70a707" source = "git+https://github.com/meilisearch/milli.git?tag=v0.22.2#f2984f66e64838d51f5cce412693fa411ee3f2d4"
dependencies = [ dependencies = [
"nom", "nom 7.1.0",
"nom_locate", "nom_locate",
] ]
@ -1479,6 +1484,15 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9"
[[package]]
name = "iso8601-duration"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b51dd97fa24074214b9eb14da518957573f4dec3189112610ae1ccec9ac464"
dependencies = [
"nom 5.1.2",
]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.10.3" version = "0.10.3"
@ -1568,6 +1582,19 @@ dependencies = [
"fst", "fst",
] ]
[[package]]
name = "lexical-core"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe"
dependencies = [
"arrayvec",
"bitflags",
"cfg-if 1.0.0",
"ryu",
"static_assertions",
]
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.114" version = "0.2.114"
@ -1688,7 +1715,6 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
name = "meilisearch-auth" name = "meilisearch-auth"
version = "0.26.0" version = "0.26.0"
dependencies = [ dependencies = [
"chrono",
"enum-iterator", "enum-iterator",
"heed", "heed",
"meilisearch-error", "meilisearch-error",
@ -1697,6 +1723,7 @@ dependencies = [
"serde_json", "serde_json",
"sha2", "sha2",
"thiserror", "thiserror",
"time 0.3.7",
] ]
[[package]] [[package]]
@ -1727,7 +1754,6 @@ dependencies = [
"byte-unit", "byte-unit",
"bytes", "bytes",
"cargo_toml", "cargo_toml",
"chrono",
"clap", "clap",
"crossbeam-channel", "crossbeam-channel",
"either", "either",
@ -1740,6 +1766,7 @@ dependencies = [
"hex", "hex",
"http", "http",
"indexmap", "indexmap",
"iso8601-duration",
"itertools", "itertools",
"jsonwebtoken", "jsonwebtoken",
"log", "log",
@ -1775,6 +1802,7 @@ dependencies = [
"tempfile", "tempfile",
"thiserror", "thiserror",
"tikv-jemallocator", "tikv-jemallocator",
"time 0.3.7",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
"urlencoding", "urlencoding",
@ -1796,7 +1824,6 @@ dependencies = [
"atomic_refcell", "atomic_refcell",
"byte-unit", "byte-unit",
"bytes", "bytes",
"chrono",
"clap", "clap",
"crossbeam-channel", "crossbeam-channel",
"csv", "csv",
@ -1840,6 +1867,7 @@ dependencies = [
"tar", "tar",
"tempfile", "tempfile",
"thiserror", "thiserror",
"time 0.3.7",
"tokio", "tokio",
"uuid", "uuid",
"walkdir", "walkdir",
@ -1888,14 +1916,13 @@ dependencies = [
[[package]] [[package]]
name = "milli" name = "milli"
version = "0.22.1" version = "0.23.0"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.22.1#ea15ad6c34492b32eb7ac06e69de02b6dc70a707" source = "git+https://github.com/meilisearch/milli.git?tag=v0.22.2#f2984f66e64838d51f5cce412693fa411ee3f2d4"
dependencies = [ dependencies = [
"bimap", "bimap",
"bincode", "bincode",
"bstr", "bstr",
"byteorder", "byteorder",
"chrono",
"concat-arrays", "concat-arrays",
"crossbeam-channel", "crossbeam-channel",
"csv", "csv",
@ -1927,6 +1954,7 @@ dependencies = [
"smallstr", "smallstr",
"smallvec", "smallvec",
"tempfile", "tempfile",
"time 0.3.7",
"uuid", "uuid",
] ]
@ -2029,6 +2057,17 @@ name = "nelson"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/MarinPostma/nelson.git?rev=675f13885548fb415ead8fbb447e9e6d9314000a#675f13885548fb415ead8fbb447e9e6d9314000a" source = "git+https://github.com/MarinPostma/nelson.git?rev=675f13885548fb415ead8fbb447e9e6d9314000a#675f13885548fb415ead8fbb447e9e6d9314000a"
[[package]]
name = "nom"
version = "5.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
dependencies = [
"lexical-core",
"memchr",
"version_check",
]
[[package]] [[package]]
name = "nom" name = "nom"
version = "7.1.0" version = "7.1.0"
@ -2048,7 +2087,7 @@ checksum = "37794436ca3029a3089e0b95d42da1f0b565ad271e4d3bb4bad0c7bb70b10605"
dependencies = [ dependencies = [
"bytecount", "bytecount",
"memchr", "memchr",
"nom", "nom 7.1.0",
] ]
[[package]] [[package]]
@ -2780,16 +2819,16 @@ dependencies = [
[[package]] [[package]]
name = "segment" name = "segment"
version = "0.1.2" version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bdcc286fff0e7c5ccd46c06a301c7a8a848b06acedc6983707bd311eb358002" checksum = "5c14967a911a216177366bac6dfa1209b597e311a32360431c63526e27b814fb"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"chrono",
"reqwest", "reqwest",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror",
"time 0.3.7",
] ]
[[package]] [[package]]
@ -2976,6 +3015,12 @@ dependencies = [
"path-slash", "path-slash",
] ]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]] [[package]]
name = "strsim" name = "strsim"
version = "0.10.0" version = "0.10.0"
@ -3147,6 +3192,7 @@ dependencies = [
"itoa 1.0.1", "itoa 1.0.1",
"libc", "libc",
"num_threads", "num_threads",
"serde",
"time-macros", "time-macros",
] ]

View File

@ -7,9 +7,9 @@ edition = "2021"
enum-iterator = "0.7.0" enum-iterator = "0.7.0"
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" } heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
sha2 = "0.9.6" sha2 = "0.9.6"
chrono = { version = "0.4.19", features = ["serde"] }
meilisearch-error = { path = "../meilisearch-error" } meilisearch-error = { path = "../meilisearch-error" }
serde_json = { version = "1.0.67", features = ["preserve_order"] } serde_json = { version = "1.0.67", features = ["preserve_order"] }
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
rand = "0.8.4" rand = "0.8.4"
serde = { version = "1.0.130", features = ["derive"] } serde = { version = "1.0.130", features = ["derive"] }
thiserror = "1.0.28" thiserror = "1.0.28"

View File

@ -10,13 +10,13 @@ pub type Result<T> = std::result::Result<T, AuthControllerError>;
pub enum AuthControllerError { pub enum AuthControllerError {
#[error("`{0}` field is mandatory.")] #[error("`{0}` field is mandatory.")]
MissingParameter(&'static str), MissingParameter(&'static str),
#[error("actions field value `{0}` is invalid. It should be an array of string representing action names.")] #[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
InvalidApiKeyActions(Value), InvalidApiKeyActions(Value),
#[error("indexes field value `{0}` is invalid. It should be an array of string representing index names.")] #[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
InvalidApiKeyIndexes(Value), InvalidApiKeyIndexes(Value),
#[error("expiresAt field value `{0}` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'.")] #[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
InvalidApiKeyExpiresAt(Value), InvalidApiKeyExpiresAt(Value),
#[error("description field value `{0}` is invalid. It should be a string or specified as a null value.")] #[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
InvalidApiKeyDescription(Value), InvalidApiKeyDescription(Value),
#[error("API key `{0}` not found.")] #[error("API key `{0}` not found.")]
ApiKeyNotFound(String), ApiKeyNotFound(String),

View File

@ -1,10 +1,12 @@
use crate::action::Action; use crate::action::Action;
use crate::error::{AuthControllerError, Result}; use crate::error::{AuthControllerError, Result};
use crate::store::{KeyId, KEY_ID_LENGTH}; use crate::store::{KeyId, KEY_ID_LENGTH};
use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};
use rand::Rng; use rand::Rng;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{from_value, Value}; use serde_json::{from_value, Value};
use time::format_description::well_known::Rfc3339;
use time::macros::{format_description, time};
use time::{Date, OffsetDateTime, PrimitiveDateTime};
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub struct Key { pub struct Key {
@ -13,9 +15,12 @@ pub struct Key {
pub id: KeyId, pub id: KeyId,
pub actions: Vec<Action>, pub actions: Vec<Action>,
pub indexes: Vec<String>, pub indexes: Vec<String>,
pub expires_at: Option<DateTime<Utc>>, #[serde(with = "time::serde::rfc3339::option")]
pub created_at: DateTime<Utc>, pub expires_at: Option<OffsetDateTime>,
pub updated_at: DateTime<Utc>, #[serde(with = "time::serde::rfc3339")]
pub created_at: OffsetDateTime,
#[serde(with = "time::serde::rfc3339")]
pub updated_at: OffsetDateTime,
} }
impl Key { impl Key {
@ -52,8 +57,8 @@ impl Key {
.map(parse_expiration_date) .map(parse_expiration_date)
.ok_or(AuthControllerError::MissingParameter("expiresAt"))??; .ok_or(AuthControllerError::MissingParameter("expiresAt"))??;
let created_at = Utc::now(); let created_at = OffsetDateTime::now_utc();
let updated_at = Utc::now(); let updated_at = created_at;
Ok(Self { Ok(Self {
description, description,
@ -89,24 +94,26 @@ impl Key {
self.expires_at = parse_expiration_date(exp)?; self.expires_at = parse_expiration_date(exp)?;
} }
self.updated_at = Utc::now(); self.updated_at = OffsetDateTime::now_utc();
Ok(()) Ok(())
} }
pub(crate) fn default_admin() -> Self { pub(crate) fn default_admin() -> Self {
let now = OffsetDateTime::now_utc();
Self { Self {
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()), description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
id: generate_id(), id: generate_id(),
actions: vec![Action::All], actions: vec![Action::All],
indexes: vec!["*".to_string()], indexes: vec!["*".to_string()],
expires_at: None, expires_at: None,
created_at: Utc::now(), created_at: now,
updated_at: Utc::now(), updated_at: now,
} }
} }
pub(crate) fn default_search() -> Self { pub(crate) fn default_search() -> Self {
let now = OffsetDateTime::now_utc();
Self { Self {
description: Some( description: Some(
"Default Search API Key (Use it to search from the frontend)".to_string(), "Default Search API Key (Use it to search from the frontend)".to_string(),
@ -115,8 +122,8 @@ impl Key {
actions: vec![Action::Search], actions: vec![Action::Search],
indexes: vec!["*".to_string()], indexes: vec!["*".to_string()],
expires_at: None, expires_at: None,
created_at: Utc::now(), created_at: now,
updated_at: Utc::now(), updated_at: now,
} }
} }
} }
@ -134,22 +141,34 @@ fn generate_id() -> [u8; KEY_ID_LENGTH] {
bytes bytes
} }
fn parse_expiration_date(value: &Value) -> Result<Option<DateTime<Utc>>> { fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
match value { match value {
Value::String(string) => DateTime::parse_from_rfc3339(string) Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
.map(|d| d.into())
.or_else(|_| { .or_else(|_| {
NaiveDateTime::parse_from_str(string, "%Y-%m-%dT%H:%M:%S") PrimitiveDateTime::parse(
.map(|naive| DateTime::from_utc(naive, Utc)) string,
format_description!(
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
),
).map(|datetime| datetime.assume_utc())
}) })
.or_else(|_| { .or_else(|_| {
NaiveDate::parse_from_str(string, "%Y-%m-%d") PrimitiveDateTime::parse(
.map(|naive| DateTime::from_utc(naive.and_hms(0, 0, 0), Utc)) string,
format_description!(
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
),
).map(|datetime| datetime.assume_utc())
})
.or_else(|_| {
Date::parse(string, format_description!(
"[year repr:full base:calendar]-[month repr:numerical]-[day]"
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc())
}) })
.map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone())) .map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
// check if the key is already expired. // check if the key is already expired.
.and_then(|d| { .and_then(|d| {
if d > Utc::now() { if d > OffsetDateTime::now_utc() {
Ok(d) Ok(d)
} else { } else {
Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone())) Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))

View File

@ -9,10 +9,10 @@ use std::path::Path;
use std::str::from_utf8; use std::str::from_utf8;
use std::sync::Arc; use std::sync::Arc;
use chrono::Utc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use time::OffsetDateTime;
pub use action::{actions, Action}; pub use action::{actions, Action};
use error::{AuthControllerError, Result}; use error::{AuthControllerError, Result};
@ -148,7 +148,7 @@ impl AuthController {
None => self.store.prefix_first_expiration_date(key, action)?, None => self.store.prefix_first_expiration_date(key, action)?,
}) { }) {
// check expiration date. // check expiration date.
Some(Some(exp)) => Ok(Utc::now() < exp), Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp),
// no expiration date. // no expiration date.
Some(None) => Ok(true), Some(None) => Ok(true),
// action or index forbidden. // action or index forbidden.

View File

@ -8,9 +8,9 @@ use std::path::Path;
use std::str; use std::str;
use std::sync::Arc; use std::sync::Arc;
use chrono::{DateTime, Utc};
use heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; use heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
use heed::{Database, Env, EnvOpenOptions, RwTxn}; use heed::{Database, Env, EnvOpenOptions, RwTxn};
use time::OffsetDateTime;
use super::error::Result; use super::error::Result;
use super::{Action, Key}; use super::{Action, Key};
@ -27,7 +27,7 @@ pub type KeyId = [u8; KEY_ID_LENGTH];
pub struct HeedAuthStore { pub struct HeedAuthStore {
env: Arc<Env>, env: Arc<Env>,
keys: Database<ByteSlice, SerdeJson<Key>>, keys: Database<ByteSlice, SerdeJson<Key>>,
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<DateTime<Utc>>>>, action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<OffsetDateTime>>>,
should_close_on_drop: bool, should_close_on_drop: bool,
} }
@ -146,7 +146,7 @@ impl HeedAuthStore {
key: &[u8], key: &[u8],
action: Action, action: Action,
index: Option<&[u8]>, index: Option<&[u8]>,
) -> Result<Option<Option<DateTime<Utc>>>> { ) -> Result<Option<Option<OffsetDateTime>>> {
let rtxn = self.env.read_txn()?; let rtxn = self.env.read_txn()?;
match self.get_key_id(key) { match self.get_key_id(key) {
Some(id) => { Some(id) => {
@ -161,7 +161,7 @@ impl HeedAuthStore {
&self, &self,
key: &[u8], key: &[u8],
action: Action, action: Action,
) -> Result<Option<Option<DateTime<Utc>>>> { ) -> Result<Option<Option<OffsetDateTime>>> {
let rtxn = self.env.read_txn()?; let rtxn = self.env.read_txn()?;
match self.get_key_id(key) { match self.get_key_id(key) {
Some(id) => { Some(id) => {

View File

@ -32,7 +32,6 @@ async-trait = "0.1.51"
bstr = "0.2.17" bstr = "0.2.17"
byte-unit = { version = "4.0.12", default-features = false, features = ["std", "serde"] } byte-unit = { version = "4.0.12", default-features = false, features = ["std", "serde"] }
bytes = "1.1.0" bytes = "1.1.0"
chrono = { version = "0.4.19", features = ["serde"] }
crossbeam-channel = "0.5.1" crossbeam-channel = "0.5.1"
either = "1.6.1" either = "1.6.1"
env_logger = "0.9.0" env_logger = "0.9.0"
@ -43,6 +42,7 @@ futures-util = "0.3.17"
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" } heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
http = "0.2.4" http = "0.2.4"
indexmap = { version = "1.7.0", features = ["serde-1"] } indexmap = { version = "1.7.0", features = ["serde-1"] }
iso8601-duration = "0.1.0"
itertools = "0.10.1" itertools = "0.10.1"
jsonwebtoken = "7" jsonwebtoken = "7"
log = "0.4.14" log = "0.4.14"
@ -61,7 +61,7 @@ rayon = "1.5.1"
regex = "1.5.4" regex = "1.5.4"
rustls = "0.20.2" rustls = "0.20.2"
rustls-pemfile = "0.2" rustls-pemfile = "0.2"
segment = { version = "0.1.2", optional = true } segment = { version = "0.2.0", optional = true }
serde = { version = "1.0.130", features = ["derive"] } serde = { version = "1.0.130", features = ["derive"] }
serde_json = { version = "1.0.67", features = ["preserve_order"] } serde_json = { version = "1.0.67", features = ["preserve_order"] }
sha2 = "0.9.6" sha2 = "0.9.6"
@ -73,6 +73,7 @@ sysinfo = "0.20.2"
tar = "0.4.37" tar = "0.4.37"
tempfile = "3.2.0" tempfile = "3.2.0"
thiserror = "1.0.28" thiserror = "1.0.28"
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
tokio = { version = "1.11.0", features = ["full"] } tokio = { version = "1.11.0", features = ["full"] }
tokio-stream = "0.1.7" tokio-stream = "0.1.7"
uuid = { version = "0.8.2", features = ["serde"] } uuid = { version = "0.8.2", features = ["serde"] }

View File

@ -6,7 +6,6 @@ use std::time::{Duration, Instant};
use actix_web::http::header::USER_AGENT; use actix_web::http::header::USER_AGENT;
use actix_web::HttpRequest; use actix_web::HttpRequest;
use chrono::{DateTime, Utc};
use http::header::CONTENT_TYPE; use http::header::CONTENT_TYPE;
use meilisearch_auth::SearchRules; use meilisearch_auth::SearchRules;
use meilisearch_lib::index::{SearchQuery, SearchResult}; use meilisearch_lib::index::{SearchQuery, SearchResult};
@ -18,6 +17,7 @@ use segment::message::{Identify, Track, User};
use segment::{AutoBatcher, Batcher, HttpClient}; use segment::{AutoBatcher, Batcher, HttpClient};
use serde_json::{json, Value}; use serde_json::{json, Value};
use sysinfo::{DiskExt, System, SystemExt}; use sysinfo::{DiskExt, System, SystemExt};
use time::OffsetDateTime;
use tokio::select; use tokio::select;
use tokio::sync::mpsc::{self, Receiver, Sender}; use tokio::sync::mpsc::{self, Receiver, Sender};
use uuid::Uuid; use uuid::Uuid;
@ -323,7 +323,7 @@ impl Segment {
#[derive(Default)] #[derive(Default)]
pub struct SearchAggregator { pub struct SearchAggregator {
timestamp: Option<DateTime<Utc>>, timestamp: Option<OffsetDateTime>,
// context // context
user_agents: HashSet<String>, user_agents: HashSet<String>,
@ -360,7 +360,7 @@ pub struct SearchAggregator {
impl SearchAggregator { impl SearchAggregator {
pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self { pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self {
let mut ret = Self::default(); let mut ret = Self::default();
ret.timestamp = Some(chrono::offset::Utc::now()); ret.timestamp = Some(OffsetDateTime::now_utc());
ret.total_received = 1; ret.total_received = 1;
ret.user_agents = extract_user_agents(request).into_iter().collect(); ret.user_agents = extract_user_agents(request).into_iter().collect();
@ -504,7 +504,7 @@ impl SearchAggregator {
#[derive(Default)] #[derive(Default)]
pub struct DocumentsAggregator { pub struct DocumentsAggregator {
timestamp: Option<DateTime<Utc>>, timestamp: Option<OffsetDateTime>,
// set to true when at least one request was received // set to true when at least one request was received
updated: bool, updated: bool,
@ -524,7 +524,7 @@ impl DocumentsAggregator {
request: &HttpRequest, request: &HttpRequest,
) -> Self { ) -> Self {
let mut ret = Self::default(); let mut ret = Self::default();
ret.timestamp = Some(chrono::offset::Utc::now()); ret.timestamp = Some(OffsetDateTime::now_utc());
ret.updated = true; ret.updated = true;
ret.user_agents = extract_user_agents(request).into_iter().collect(); ret.user_agents = extract_user_agents(request).into_iter().collect();

View File

@ -94,10 +94,10 @@ pub trait Policy {
} }
pub mod policies { pub mod policies {
use chrono::Utc;
use jsonwebtoken::{dangerous_insecure_decode, decode, Algorithm, DecodingKey, Validation}; use jsonwebtoken::{dangerous_insecure_decode, decode, Algorithm, DecodingKey, Validation};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use crate::extractors::authentication::Policy; use crate::extractors::authentication::Policy;
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules}; use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
@ -183,7 +183,7 @@ pub mod policies {
// Check if token is expired. // Check if token is expired.
if let Some(exp) = exp { if let Some(exp) = exp {
if Utc::now().timestamp() > exp { if OffsetDateTime::now_utc().unix_timestamp() > exp {
return None; return None;
} }
} }

View File

@ -1,11 +1,11 @@
use std::str; use std::str;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use chrono::SecondsFormat;
use meilisearch_auth::{Action, AuthController, Key}; use meilisearch_auth::{Action, AuthController, Key};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use time::OffsetDateTime;
use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::authentication::{policies::*, GuardedData};
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
@ -92,9 +92,12 @@ struct KeyView {
key: String, key: String,
actions: Vec<Action>, actions: Vec<Action>,
indexes: Vec<String>, indexes: Vec<String>,
expires_at: Option<String>, #[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
created_at: String, expires_at: Option<OffsetDateTime>,
updated_at: String, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
created_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
updated_at: OffsetDateTime,
} }
impl KeyView { impl KeyView {
@ -107,11 +110,9 @@ impl KeyView {
key: generated_key, key: generated_key,
actions: key.actions, actions: key.actions,
indexes: key.indexes, indexes: key.indexes,
expires_at: key expires_at: key.expires_at,
.expires_at created_at: key.created_at,
.map(|dt| dt.to_rfc3339_opts(SecondsFormat::Secs, true)), updated_at: key.updated_at,
created_at: key.created_at.to_rfc3339_opts(SecondsFormat::Secs, true),
updated_at: key.updated_at.to_rfc3339_opts(SecondsFormat::Secs, true),
} }
} }
} }

View File

@ -1,11 +1,11 @@
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use log::debug; use log::debug;
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
use meilisearch_lib::index_controller::Update; use meilisearch_lib::index_controller::Update;
use meilisearch_lib::MeiliSearch; use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use time::OffsetDateTime;
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::authentication::{policies::*, GuardedData};
@ -95,9 +95,12 @@ pub struct UpdateIndexRequest {
pub struct UpdateIndexResponse { pub struct UpdateIndexResponse {
name: String, name: String,
uid: String, uid: String,
created_at: DateTime<Utc>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
updated_at: DateTime<Utc>, created_at: OffsetDateTime,
primary_key: Option<String>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
updated_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
primary_key: OffsetDateTime,
} }
pub async fn get_index( pub async fn get_index(

View File

@ -1,10 +1,10 @@
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use log::debug; use log::debug;
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
use meilisearch_lib::MeiliSearch; use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use time::OffsetDateTime;
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::authentication::{policies::*, GuardedData};
@ -20,9 +20,12 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
pub struct UpdateIndexResponse { pub struct UpdateIndexResponse {
name: String, name: String,
uid: String, uid: String,
created_at: DateTime<Utc>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
updated_at: DateTime<Utc>, created_at: OffsetDateTime,
primary_key: Option<String>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
updated_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
primary_key: OffsetDateTime,
} }
#[derive(Deserialize)] #[derive(Deserialize)]

View File

@ -1,7 +1,7 @@
use actix_web::{web, HttpResponse}; use actix_web::{web, HttpResponse};
use chrono::{DateTime, Utc};
use log::debug; use log::debug;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked}; use meilisearch_lib::index::{Settings, Unchecked};
@ -54,8 +54,10 @@ pub struct ProcessedUpdateResult {
#[serde(rename = "type")] #[serde(rename = "type")]
pub update_type: UpdateType, pub update_type: UpdateType,
pub duration: f64, // in seconds pub duration: f64, // in seconds
pub enqueued_at: DateTime<Utc>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
pub processed_at: DateTime<Utc>, pub enqueued_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
pub processed_at: OffsetDateTime,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -66,8 +68,10 @@ pub struct FailedUpdateResult {
pub update_type: UpdateType, pub update_type: UpdateType,
pub error: ResponseError, pub error: ResponseError,
pub duration: f64, // in seconds pub duration: f64, // in seconds
pub enqueued_at: DateTime<Utc>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
pub processed_at: DateTime<Utc>, pub enqueued_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
pub processed_at: OffsetDateTime,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -76,9 +80,13 @@ pub struct EnqueuedUpdateResult {
pub update_id: u64, pub update_id: u64,
#[serde(rename = "type")] #[serde(rename = "type")]
pub update_type: UpdateType, pub update_type: UpdateType,
pub enqueued_at: DateTime<Utc>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
#[serde(skip_serializing_if = "Option::is_none")] pub enqueued_at: OffsetDateTime,
pub started_processing_at: Option<DateTime<Utc>>, #[serde(
skip_serializing_if = "Option::is_none",
serialize_with = "time::serde::rfc3339::option::serialize"
)]
pub started_processing_at: Option<OffsetDateTime>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]

View File

@ -1,4 +1,6 @@
use chrono::{DateTime, Duration, Utc}; use std::fmt::Write;
use std::write;
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked}; use meilisearch_lib::index::{Settings, Unchecked};
use meilisearch_lib::milli::update::IndexDocumentsMethod; use meilisearch_lib::milli::update::IndexDocumentsMethod;
@ -7,6 +9,7 @@ use meilisearch_lib::tasks::task::{
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult, DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
}; };
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
use time::{Duration, OffsetDateTime};
use crate::AUTOBATCHING_ENABLED; use crate::AUTOBATCHING_ENABLED;
@ -79,14 +82,52 @@ enum TaskDetails {
ClearAll { deleted_documents: Option<u64> }, ClearAll { deleted_documents: Option<u64> },
} }
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
/// https://github.com/time-rs/time/issues/378.
/// This code is a port of the old code of time that was removed in 0.2.
fn serialize_duration<S: Serializer>( fn serialize_duration<S: Serializer>(
duration: &Option<Duration>, duration: &Option<Duration>,
serializer: S, serializer: S,
) -> Result<S::Ok, S::Error> { ) -> Result<S::Ok, S::Error> {
match duration { match duration {
Some(duration) => { Some(duration) => {
let duration_str = duration.to_string(); // technically speaking, negative duration is not valid ISO 8601
serializer.serialize_str(&duration_str) if duration.is_negative() {
return serializer.serialize_none();
}
const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds();
let secs = duration.whole_seconds();
let days = secs / SECS_PER_DAY;
let secs = secs - days * SECS_PER_DAY;
let hasdate = days != 0;
let nanos = duration.subsec_nanoseconds();
let hastime = (secs != 0 || nanos != 0) || !hasdate;
// all the following unwrap can't fail
let mut res = String::new();
write!(&mut res, "P").unwrap();
if hasdate {
write!(&mut res, "{}D", days).unwrap();
}
const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds();
const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds();
if hastime {
if nanos == 0 {
write!(&mut res, "T{}S", secs).unwrap();
} else if nanos % NANOS_PER_MILLI == 0 {
write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap();
} else if nanos % NANOS_PER_MICRO == 0 {
write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap();
} else {
write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap();
}
}
serializer.serialize_str(&res)
} }
None => serializer.serialize_none(), None => serializer.serialize_none(),
} }
@ -106,9 +147,12 @@ pub struct TaskView {
error: Option<ResponseError>, error: Option<ResponseError>,
#[serde(serialize_with = "serialize_duration")] #[serde(serialize_with = "serialize_duration")]
duration: Option<Duration>, duration: Option<Duration>,
enqueued_at: DateTime<Utc>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
started_at: Option<DateTime<Utc>>, enqueued_at: OffsetDateTime,
finished_at: Option<DateTime<Utc>>, #[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
started_at: Option<OffsetDateTime>,
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
finished_at: Option<OffsetDateTime>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
batch_uid: Option<Option<BatchId>>, batch_uid: Option<Option<BatchId>>,
} }
@ -302,7 +346,8 @@ pub struct SummarizedTaskView {
status: TaskStatus, status: TaskStatus,
#[serde(rename = "type")] #[serde(rename = "type")]
task_type: TaskType, task_type: TaskType,
enqueued_at: DateTime<Utc>, #[serde(serialize_with = "time::serde::rfc3339::serialize")]
enqueued_at: OffsetDateTime,
} }
impl From<Task> for SummarizedTaskView { impl From<Task> for SummarizedTaskView {

View File

@ -316,7 +316,7 @@ async fn error_add_api_key_invalid_parameters_description() {
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
let expected_response = json!({ let expected_response = json!({
"message": r#"description field value `{"name":"products"}` is invalid. It should be a string or specified as a null value."#, "message": r#"`description` field value `{"name":"products"}` is invalid. It should be a string or specified as a null value."#,
"code": "invalid_api_key_description", "code": "invalid_api_key_description",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_description" "link": "https://docs.meilisearch.com/errors#invalid_api_key_description"
@ -342,7 +342,7 @@ async fn error_add_api_key_invalid_parameters_indexes() {
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
let expected_response = json!({ let expected_response = json!({
"message": r#"indexes field value `{"name":"products"}` is invalid. It should be an array of string representing index names."#, "message": r#"`indexes` field value `{"name":"products"}` is invalid. It should be an array of string representing index names."#,
"code": "invalid_api_key_indexes", "code": "invalid_api_key_indexes",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
@ -366,7 +366,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
let expected_response = json!({ let expected_response = json!({
"message": r#"actions field value `{"name":"products"}` is invalid. It should be an array of string representing action names."#, "message": r#"`actions` field value `{"name":"products"}` is invalid. It should be an array of string representing action names."#,
"code": "invalid_api_key_actions", "code": "invalid_api_key_actions",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
@ -386,7 +386,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
let expected_response = json!({ let expected_response = json!({
"message": r#"actions field value `["doc.add"]` is invalid. It should be an array of string representing action names."#, "message": r#"`actions` field value `["doc.add"]` is invalid. It should be an array of string representing action names."#,
"code": "invalid_api_key_actions", "code": "invalid_api_key_actions",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
@ -412,7 +412,7 @@ async fn error_add_api_key_invalid_parameters_expires_at() {
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
let expected_response = json!({ let expected_response = json!({
"message": r#"expiresAt field value `{"name":"products"}` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'."#, "message": r#"`expiresAt` field value `{"name":"products"}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'."#,
"code": "invalid_api_key_expires_at", "code": "invalid_api_key_expires_at",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
@ -438,7 +438,7 @@ async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() {
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
let expected_response = json!({ let expected_response = json!({
"message": r#"expiresAt field value `"2010-11-13T00:00:00Z"` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'."#, "message": r#"`expiresAt` field value `"2010-11-13T00:00:00Z"` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'."#,
"code": "invalid_api_key_expires_at", "code": "invalid_api_key_expires_at",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
@ -1213,7 +1213,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
let (response, code) = server.patch_api_key(&key, content).await; let (response, code) = server.patch_api_key(&key, content).await;
let expected_response = json!({ let expected_response = json!({
"message": "description field value `13` is invalid. It should be a string or specified as a null value.", "message": "`description` field value `13` is invalid. It should be a string or specified as a null value.",
"code": "invalid_api_key_description", "code": "invalid_api_key_description",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_description" "link": "https://docs.meilisearch.com/errors#invalid_api_key_description"
@ -1230,7 +1230,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
let (response, code) = server.patch_api_key(&key, content).await; let (response, code) = server.patch_api_key(&key, content).await;
let expected_response = json!({ let expected_response = json!({
"message": "indexes field value `13` is invalid. It should be an array of string representing index names.", "message": "`indexes` field value `13` is invalid. It should be an array of string representing index names.",
"code": "invalid_api_key_indexes", "code": "invalid_api_key_indexes",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
@ -1246,7 +1246,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
let (response, code) = server.patch_api_key(&key, content).await; let (response, code) = server.patch_api_key(&key, content).await;
let expected_response = json!({ let expected_response = json!({
"message": "actions field value `13` is invalid. It should be an array of string representing action names.", "message": "`actions` field value `13` is invalid. It should be an array of string representing action names.",
"code": "invalid_api_key_actions", "code": "invalid_api_key_actions",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
@ -1262,7 +1262,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
let (response, code) = server.patch_api_key(&key, content).await; let (response, code) = server.patch_api_key(&key, content).await;
let expected_response = json!({ let expected_response = json!({
"message": "expiresAt field value `13` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'.", "message": "`expiresAt` field value `13` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.",
"code": "invalid_api_key_expires_at", "code": "invalid_api_key_expires_at",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"

View File

@ -1,9 +1,10 @@
use crate::common::Server; use crate::common::Server;
use chrono::{Duration, Utc}; use ::time::format_description::well_known::Rfc3339;
use maplit::{hashmap, hashset}; use maplit::{hashmap, hashset};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde_json::{json, Value}; use serde_json::{json, Value};
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use time::{Duration, OffsetDateTime};
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> = pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
Lazy::new(|| { Lazy::new(|| {
@ -76,7 +77,7 @@ async fn error_access_expired_key() {
let content = json!({ let content = json!({
"indexes": ["products"], "indexes": ["products"],
"actions": ALL_ACTIONS.clone(), "actions": ALL_ACTIONS.clone(),
"expiresAt": (Utc::now() + Duration::seconds(1)), "expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
@ -106,7 +107,7 @@ async fn error_access_unauthorized_index() {
let content = json!({ let content = json!({
"indexes": ["sales"], "indexes": ["sales"],
"actions": ALL_ACTIONS.clone(), "actions": ALL_ACTIONS.clone(),
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
@ -137,7 +138,7 @@ async fn error_access_unauthorized_action() {
let content = json!({ let content = json!({
"indexes": ["products"], "indexes": ["products"],
"actions": [], "actions": [],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
@ -174,7 +175,7 @@ async fn access_authorized_restricted_index() {
let content = json!({ let content = json!({
"indexes": ["products"], "indexes": ["products"],
"actions": [], "actions": [],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
@ -213,7 +214,7 @@ async fn access_authorized_no_index_restriction() {
let content = json!({ let content = json!({
"indexes": ["*"], "indexes": ["*"],
"actions": [], "actions": [],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
@ -263,7 +264,7 @@ async fn access_authorized_stats_restricted_index() {
let content = json!({ let content = json!({
"indexes": ["products"], "indexes": ["products"],
"actions": ["stats.get"], "actions": ["stats.get"],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201); assert_eq!(code, 201);
@ -303,7 +304,7 @@ async fn access_authorized_stats_no_index_restriction() {
let content = json!({ let content = json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ["stats.get"], "actions": ["stats.get"],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201); assert_eq!(code, 201);
@ -343,7 +344,7 @@ async fn list_authorized_indexes_restricted_index() {
let content = json!({ let content = json!({
"indexes": ["products"], "indexes": ["products"],
"actions": ["indexes.get"], "actions": ["indexes.get"],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201); assert_eq!(code, 201);
@ -384,7 +385,7 @@ async fn list_authorized_indexes_no_index_restriction() {
let content = json!({ let content = json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ["indexes.get"], "actions": ["indexes.get"],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201); assert_eq!(code, 201);
@ -424,7 +425,7 @@ async fn list_authorized_tasks_restricted_index() {
let content = json!({ let content = json!({
"indexes": ["products"], "indexes": ["products"],
"actions": ["tasks.get"], "actions": ["tasks.get"],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201); assert_eq!(code, 201);
@ -464,7 +465,7 @@ async fn list_authorized_tasks_no_index_restriction() {
let content = json!({ let content = json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ["tasks.get"], "actions": ["tasks.get"],
"expiresAt": Utc::now() + Duration::hours(1), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201); assert_eq!(code, 201);

View File

@ -1,9 +1,10 @@
use crate::common::Server; use crate::common::Server;
use chrono::{Duration, Utc}; use ::time::format_description::well_known::Rfc3339;
use maplit::hashmap; use maplit::hashmap;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde_json::{json, Value}; use serde_json::{json, Value};
use std::collections::HashMap; use std::collections::HashMap;
use time::{Duration, OffsetDateTime};
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS}; use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
@ -63,22 +64,22 @@ static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
json!({ json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ["*"], "actions": ["*"],
"expiresAt": Utc::now() + Duration::days(1) "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}), }),
json!({ json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ["search"], "actions": ["search"],
"expiresAt": Utc::now() + Duration::days(1) "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}), }),
json!({ json!({
"indexes": ["sales"], "indexes": ["sales"],
"actions": ["*"], "actions": ["*"],
"expiresAt": Utc::now() + Duration::days(1) "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}), }),
json!({ json!({
"indexes": ["sales"], "indexes": ["sales"],
"actions": ["search"], "actions": ["search"],
"expiresAt": Utc::now() + Duration::days(1) "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}), }),
] ]
}); });
@ -89,23 +90,23 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
json!({ json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(), "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
"expiresAt": Utc::now() + Duration::days(1) "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}), }),
json!({ json!({
"indexes": ["sales"], "indexes": ["sales"],
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(), "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
"expiresAt": Utc::now() + Duration::days(1) "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}), }),
// bad index // bad index
json!({ json!({
"indexes": ["products"], "indexes": ["products"],
"actions": ["*"], "actions": ["*"],
"expiresAt": Utc::now() + Duration::days(1) "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}), }),
json!({ json!({
"indexes": ["products"], "indexes": ["products"],
"actions": ["search"], "actions": ["search"],
"expiresAt": Utc::now() + Duration::days(1) "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}), }),
] ]
}); });
@ -204,19 +205,19 @@ async fn search_authorized_simple_token() {
let tenant_tokens = vec![ let tenant_tokens = vec![
hashmap! { hashmap! {
"searchRules" => json!({"*": {}}), "searchRules" => json!({"*": {}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!(["*"]), "searchRules" => json!(["*"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": {}}), "searchRules" => json!({"sales": {}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!(["sales"]), "searchRules" => json!(["sales"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"*": {}}), "searchRules" => json!({"*": {}}),
@ -253,19 +254,19 @@ async fn search_authorized_filter_token() {
let tenant_tokens = vec![ let tenant_tokens = vec![
hashmap! { hashmap! {
"searchRules" => json!({"*": {"filter": "color = blue"}}), "searchRules" => json!({"*": {"filter": "color = blue"}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": {"filter": "color = blue"}}), "searchRules" => json!({"sales": {"filter": "color = blue"}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"*": {"filter": ["color = blue"]}}), "searchRules" => json!({"*": {"filter": ["color = blue"]}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}), "searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
// filter on sales should override filters on * // filter on sales should override filters on *
hashmap! { hashmap! {
@ -273,28 +274,28 @@ async fn search_authorized_filter_token() {
"*": {"filter": "color = green"}, "*": {"filter": "color = green"},
"sales": {"filter": "color = blue"} "sales": {"filter": "color = blue"}
}), }),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({ "searchRules" => json!({
"*": {}, "*": {},
"sales": {"filter": "color = blue"} "sales": {"filter": "color = blue"}
}), }),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({ "searchRules" => json!({
"*": {"filter": "color = green"}, "*": {"filter": "color = green"},
"sales": {"filter": ["color = blue"]} "sales": {"filter": ["color = blue"]}
}), }),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({ "searchRules" => json!({
"*": {}, "*": {},
"sales": {"filter": ["color = blue"]} "sales": {"filter": ["color = blue"]}
}), }),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
]; ];
@ -307,19 +308,19 @@ async fn filter_search_authorized_filter_token() {
let tenant_tokens = vec![ let tenant_tokens = vec![
hashmap! { hashmap! {
"searchRules" => json!({"*": {"filter": "color = blue"}}), "searchRules" => json!({"*": {"filter": "color = blue"}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": {"filter": "color = blue"}}), "searchRules" => json!({"sales": {"filter": "color = blue"}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"*": {"filter": ["color = blue"]}}), "searchRules" => json!({"*": {"filter": ["color = blue"]}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}), "searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
// filter on sales should override filters on * // filter on sales should override filters on *
hashmap! { hashmap! {
@ -327,28 +328,28 @@ async fn filter_search_authorized_filter_token() {
"*": {"filter": "color = green"}, "*": {"filter": "color = green"},
"sales": {"filter": "color = blue"} "sales": {"filter": "color = blue"}
}), }),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({ "searchRules" => json!({
"*": {}, "*": {},
"sales": {"filter": "color = blue"} "sales": {"filter": "color = blue"}
}), }),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({ "searchRules" => json!({
"*": {"filter": "color = green"}, "*": {"filter": "color = green"},
"sales": {"filter": ["color = blue"]} "sales": {"filter": ["color = blue"]}
}), }),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({ "searchRules" => json!({
"*": {}, "*": {},
"sales": {"filter": ["color = blue"]} "sales": {"filter": ["color = blue"]}
}), }),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
]; ];
@ -361,27 +362,27 @@ async fn error_search_token_forbidden_parent_key() {
let tenant_tokens = vec![ let tenant_tokens = vec![
hashmap! { hashmap! {
"searchRules" => json!({"*": {}}), "searchRules" => json!({"*": {}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"*": Value::Null}), "searchRules" => json!({"*": Value::Null}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!(["*"]), "searchRules" => json!(["*"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": {}}), "searchRules" => json!({"sales": {}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": Value::Null}), "searchRules" => json!({"sales": Value::Null}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!(["sales"]), "searchRules" => json!(["sales"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
]; ];
@ -395,11 +396,11 @@ async fn error_search_forbidden_token() {
// bad index // bad index
hashmap! { hashmap! {
"searchRules" => json!({"products": {}}), "searchRules" => json!({"products": {}}),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!(["products"]), "searchRules" => json!(["products"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"products": {}}), "searchRules" => json!({"products": {}}),
@ -416,27 +417,27 @@ async fn error_search_forbidden_token() {
// expired token // expired token
hashmap! { hashmap! {
"searchRules" => json!({"*": {}}), "searchRules" => json!({"*": {}}),
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"*": Value::Null}), "searchRules" => json!({"*": Value::Null}),
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!(["*"]), "searchRules" => json!(["*"]),
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": {}}), "searchRules" => json!({"sales": {}}),
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!({"sales": Value::Null}), "searchRules" => json!({"sales": Value::Null}),
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
}, },
hashmap! { hashmap! {
"searchRules" => json!(["sales"]), "searchRules" => json!(["sales"]),
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
}, },
]; ];
@ -452,7 +453,7 @@ async fn error_access_forbidden_routes() {
let content = json!({ let content = json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ["*"], "actions": ["*"],
"expiresAt": (Utc::now() + Duration::hours(1)), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
@ -463,7 +464,7 @@ async fn error_access_forbidden_routes() {
let tenant_token = hashmap! { let tenant_token = hashmap! {
"searchRules" => json!(["*"]), "searchRules" => json!(["*"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}; };
let web_token = generate_tenant_token(&key, tenant_token); let web_token = generate_tenant_token(&key, tenant_token);
server.use_api_key(&web_token); server.use_api_key(&web_token);
@ -487,7 +488,7 @@ async fn error_access_expired_parent_key() {
let content = json!({ let content = json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ["*"], "actions": ["*"],
"expiresAt": (Utc::now() + Duration::seconds(1)), "expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
@ -498,7 +499,7 @@ async fn error_access_expired_parent_key() {
let tenant_token = hashmap! { let tenant_token = hashmap! {
"searchRules" => json!(["*"]), "searchRules" => json!(["*"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}; };
let web_token = generate_tenant_token(&key, tenant_token); let web_token = generate_tenant_token(&key, tenant_token);
server.use_api_key(&web_token); server.use_api_key(&web_token);
@ -529,7 +530,7 @@ async fn error_access_modified_token() {
let content = json!({ let content = json!({
"indexes": ["*"], "indexes": ["*"],
"actions": ["*"], "actions": ["*"],
"expiresAt": (Utc::now() + Duration::hours(1)), "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
}); });
let (response, code) = server.add_api_key(content).await; let (response, code) = server.add_api_key(content).await;
@ -540,7 +541,7 @@ async fn error_access_modified_token() {
let tenant_token = hashmap! { let tenant_token = hashmap! {
"searchRules" => json!(["products"]), "searchRules" => json!(["products"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}; };
let web_token = generate_tenant_token(&key, tenant_token); let web_token = generate_tenant_token(&key, tenant_token);
server.use_api_key(&web_token); server.use_api_key(&web_token);
@ -554,7 +555,7 @@ async fn error_access_modified_token() {
let tenant_token = hashmap! { let tenant_token = hashmap! {
"searchRules" => json!(["*"]), "searchRules" => json!(["*"]),
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
}; };
let alt = generate_tenant_token(&key, tenant_token); let alt = generate_tenant_token(&key, tenant_token);

View File

@ -1,8 +1,8 @@
use crate::common::{GetAllDocumentsOptions, Server}; use crate::common::{GetAllDocumentsOptions, Server};
use actix_web::test; use actix_web::test;
use chrono::DateTime;
use meilisearch_http::{analytics, create_app}; use meilisearch_http::{analytics, create_app};
use serde_json::{json, Value}; use serde_json::{json, Value};
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
/// This is the basic usage of our API and every other tests uses the content-type application/json /// This is the basic usage of our API and every other tests uses the content-type application/json
#[actix_rt::test] #[actix_rt::test]
@ -568,9 +568,9 @@ async fn add_documents_no_index_creation() {
assert_eq!(response["details"]["indexedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1);
let processed_at = let processed_at =
DateTime::parse_from_rfc3339(response["finishedAt"].as_str().unwrap()).unwrap(); OffsetDateTime::parse(response["finishedAt"].as_str().unwrap(), &Rfc3339).unwrap();
let enqueued_at = let enqueued_at =
DateTime::parse_from_rfc3339(response["enqueuedAt"].as_str().unwrap()).unwrap(); OffsetDateTime::parse(response["enqueuedAt"].as_str().unwrap(), &Rfc3339).unwrap();
assert!(processed_at > enqueued_at); assert!(processed_at > enqueued_at);
// index was created, and primary key was infered. // index was created, and primary key was infered.

View File

@ -1,6 +1,6 @@
use crate::common::Server; use crate::common::Server;
use chrono::DateTime;
use serde_json::json; use serde_json::json;
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
#[actix_rt::test] #[actix_rt::test]
async fn update_primary_key() { async fn update_primary_key() {
@ -25,8 +25,10 @@ async fn update_primary_key() {
assert!(response.get("createdAt").is_some()); assert!(response.get("createdAt").is_some());
assert!(response.get("updatedAt").is_some()); assert!(response.get("updatedAt").is_some());
let created_at = DateTime::parse_from_rfc3339(response["createdAt"].as_str().unwrap()).unwrap(); let created_at =
let updated_at = DateTime::parse_from_rfc3339(response["updatedAt"].as_str().unwrap()).unwrap(); OffsetDateTime::parse(response["createdAt"].as_str().unwrap(), &Rfc3339).unwrap();
let updated_at =
OffsetDateTime::parse(response["updatedAt"].as_str().unwrap(), &Rfc3339).unwrap();
assert!(created_at < updated_at); assert!(created_at < updated_at);
assert_eq!(response["primaryKey"], "primary"); assert_eq!(response["primaryKey"], "primary");

View File

@ -1,6 +1,7 @@
use crate::common::Server; use crate::common::Server;
use chrono::{DateTime, Utc};
use serde_json::json; use serde_json::json;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
#[actix_rt::test] #[actix_rt::test]
async fn error_get_task_unexisting_index() { async fn error_get_task_unexisting_index() {
@ -98,7 +99,8 @@ macro_rules! assert_valid_summarized_task {
assert_eq!($response["status"], "enqueued"); assert_eq!($response["status"], "enqueued");
assert_eq!($response["type"], $task_type); assert_eq!($response["type"], $task_type);
let date = $response["enqueuedAt"].as_str().expect("missing date"); let date = $response["enqueuedAt"].as_str().expect("missing date");
date.parse::<DateTime<Utc>>().unwrap();
OffsetDateTime::parse(date, &Rfc3339).unwrap();
}}; }};
} }

View File

@ -12,7 +12,6 @@ async-stream = "0.3.2"
async-trait = "0.1.51" async-trait = "0.1.51"
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] } byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
bytes = "1.1.0" bytes = "1.1.0"
chrono = { version = "0.4.19", features = ["serde"] }
csv = "1.1.6" csv = "1.1.6"
crossbeam-channel = "0.5.1" crossbeam-channel = "0.5.1"
either = "1.6.1" either = "1.6.1"
@ -28,7 +27,7 @@ lazy_static = "1.4.0"
log = "0.4.14" log = "0.4.14"
meilisearch-error = { path = "../meilisearch-error" } meilisearch-error = { path = "../meilisearch-error" }
meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-auth = { path = "../meilisearch-auth" }
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.22.1" } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.22.2" }
mime = "0.3.16" mime = "0.3.16"
num_cpus = "1.13.0" num_cpus = "1.13.0"
once_cell = "1.8.0" once_cell = "1.8.0"
@ -45,6 +44,7 @@ clap = { version = "3.0", features = ["derive", "env"] }
tar = "0.4.37" tar = "0.4.37"
tempfile = "3.2.0" tempfile = "3.2.0"
thiserror = "1.0.28" thiserror = "1.0.28"
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
tokio = { version = "1.11.0", features = ["full"] } tokio = { version = "1.11.0", features = ["full"] }
uuid = { version = "0.8.2", features = ["serde"] } uuid = { version = "0.8.2", features = ["serde"] }
walkdir = "2.3.2" walkdir = "2.3.2"

View File

@ -5,12 +5,12 @@ use std::ops::Deref;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use chrono::{DateTime, Utc};
use heed::{EnvOpenOptions, RoTxn}; use heed::{EnvOpenOptions, RoTxn};
use milli::update::{IndexerConfig, Setting}; use milli::update::{IndexerConfig, Setting};
use milli::{obkv_to_json, FieldDistribution, FieldId}; use milli::{obkv_to_json, FieldDistribution, FieldId};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{Map, Value}; use serde_json::{Map, Value};
use time::OffsetDateTime;
use uuid::Uuid; use uuid::Uuid;
use crate::EnvSizer; use crate::EnvSizer;
@ -24,8 +24,10 @@ pub type Document = Map<String, Value>;
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct IndexMeta { pub struct IndexMeta {
pub created_at: DateTime<Utc>, #[serde(with = "time::serde::rfc3339")]
pub updated_at: DateTime<Utc>, pub created_at: OffsetDateTime,
#[serde(with = "time::serde::rfc3339")]
pub updated_at: OffsetDateTime,
pub primary_key: Option<String>, pub primary_key: Option<String>,
} }

View File

@ -3,9 +3,10 @@ use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use async_stream::stream; use async_stream::stream;
use chrono::Utc;
use futures::{lock::Mutex, stream::StreamExt}; use futures::{lock::Mutex, stream::StreamExt};
use log::{error, trace}; use log::{error, trace};
use time::macros::format_description;
use time::OffsetDateTime;
use tokio::sync::{mpsc, oneshot, RwLock}; use tokio::sync::{mpsc, oneshot, RwLock};
use super::error::{DumpActorError, Result}; use super::error::{DumpActorError, Result};
@ -29,7 +30,9 @@ pub struct DumpActor {
/// Generate uid from creation date /// Generate uid from creation date
fn generate_uid() -> String { fn generate_uid() -> String {
Utc::now().format("%Y%m%d-%H%M%S%3f").to_string() OffsetDateTime::now_utc()
.format(format_description!("%Y%m%d-%H%M%S%3f"))
.unwrap()
} }
impl DumpActor { impl DumpActor {

View File

@ -1,8 +1,8 @@
use anyhow::bail; use anyhow::bail;
use chrono::{DateTime, Utc};
use meilisearch_error::Code; use meilisearch_error::Code;
use milli::update::IndexDocumentsMethod; use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid; use uuid::Uuid;
use crate::index::{Settings, Unchecked}; use crate::index::{Settings, Unchecked};
@ -51,7 +51,7 @@ pub enum UpdateMeta {
pub struct Enqueued { pub struct Enqueued {
pub update_id: u64, pub update_id: u64,
pub meta: UpdateMeta, pub meta: UpdateMeta,
pub enqueued_at: DateTime<Utc>, pub enqueued_at: OffsetDateTime,
pub content: Option<Uuid>, pub content: Option<Uuid>,
} }
@ -59,7 +59,7 @@ pub struct Enqueued {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Processed { pub struct Processed {
pub success: UpdateResult, pub success: UpdateResult,
pub processed_at: DateTime<Utc>, pub processed_at: OffsetDateTime,
#[serde(flatten)] #[serde(flatten)]
pub from: Processing, pub from: Processing,
} }
@ -69,7 +69,7 @@ pub struct Processed {
pub struct Processing { pub struct Processing {
#[serde(flatten)] #[serde(flatten)]
pub from: Enqueued, pub from: Enqueued,
pub started_processing_at: DateTime<Utc>, pub started_processing_at: OffsetDateTime,
} }
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
@ -77,7 +77,7 @@ pub struct Processing {
pub struct Aborted { pub struct Aborted {
#[serde(flatten)] #[serde(flatten)]
pub from: Enqueued, pub from: Enqueued,
pub aborted_at: DateTime<Utc>, pub aborted_at: OffsetDateTime,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -86,7 +86,7 @@ pub struct Failed {
#[serde(flatten)] #[serde(flatten)]
pub from: Processing, pub from: Processing,
pub error: ResponseError, pub error: ResponseError,
pub failed_at: DateTime<Utc>, pub failed_at: OffsetDateTime,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]

View File

@ -1,7 +1,7 @@
use chrono::{DateTime, Utc};
use meilisearch_error::{Code, ResponseError}; use meilisearch_error::{Code, ResponseError};
use milli::update::IndexDocumentsMethod; use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid; use uuid::Uuid;
use crate::index::{Settings, Unchecked}; use crate::index::{Settings, Unchecked};
@ -107,7 +107,7 @@ pub enum UpdateMeta {
pub struct Enqueued { pub struct Enqueued {
pub update_id: u64, pub update_id: u64,
pub meta: Update, pub meta: Update,
pub enqueued_at: DateTime<Utc>, pub enqueued_at: OffsetDateTime,
} }
impl Enqueued { impl Enqueued {
@ -122,7 +122,7 @@ impl Enqueued {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Processed { pub struct Processed {
pub success: v2::UpdateResult, pub success: v2::UpdateResult,
pub processed_at: DateTime<Utc>, pub processed_at: OffsetDateTime,
#[serde(flatten)] #[serde(flatten)]
pub from: Processing, pub from: Processing,
} }
@ -144,7 +144,7 @@ impl Processed {
pub struct Processing { pub struct Processing {
#[serde(flatten)] #[serde(flatten)]
pub from: Enqueued, pub from: Enqueued,
pub started_processing_at: DateTime<Utc>, pub started_processing_at: OffsetDateTime,
} }
impl Processing { impl Processing {
@ -163,7 +163,7 @@ pub struct Failed {
pub from: Processing, pub from: Processing,
pub msg: String, pub msg: String,
pub code: Code, pub code: Code,
pub failed_at: DateTime<Utc>, pub failed_at: OffsetDateTime,
} }
impl Failed { impl Failed {

View File

@ -3,9 +3,9 @@ use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use anyhow::bail; use anyhow::bail;
use chrono::{DateTime, Utc};
use log::{info, trace}; use log::{info, trace};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
pub use actor::DumpActor; pub use actor::DumpActor;
pub use handle_impl::*; pub use handle_impl::*;
@ -40,7 +40,7 @@ pub struct Metadata {
db_version: String, db_version: String,
index_db_size: usize, index_db_size: usize,
update_db_size: usize, update_db_size: usize,
dump_date: DateTime<Utc>, dump_date: OffsetDateTime,
} }
impl Metadata { impl Metadata {
@ -49,7 +49,7 @@ impl Metadata {
db_version: env!("CARGO_PKG_VERSION").to_string(), db_version: env!("CARGO_PKG_VERSION").to_string(),
index_db_size, index_db_size,
update_db_size, update_db_size,
dump_date: Utc::now(), dump_date: OffsetDateTime::now_utc(),
} }
} }
} }
@ -144,7 +144,7 @@ impl MetadataVersion {
} }
} }
pub fn dump_date(&self) -> Option<&DateTime<Utc>> { pub fn dump_date(&self) -> Option<&OffsetDateTime> {
match self { match self {
MetadataVersion::V1(_) => None, MetadataVersion::V1(_) => None,
MetadataVersion::V2(meta) | MetadataVersion::V3(meta) | MetadataVersion::V4(meta) => { MetadataVersion::V2(meta) | MetadataVersion::V3(meta) | MetadataVersion::V4(meta) => {
@ -169,9 +169,13 @@ pub struct DumpInfo {
pub status: DumpStatus, pub status: DumpStatus,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>, pub error: Option<String>,
started_at: DateTime<Utc>, #[serde(with = "time::serde::rfc3339")]
#[serde(skip_serializing_if = "Option::is_none")] started_at: OffsetDateTime,
finished_at: Option<DateTime<Utc>>, #[serde(
skip_serializing_if = "Option::is_none",
with = "time::serde::rfc3339::option"
)]
finished_at: Option<OffsetDateTime>,
} }
impl DumpInfo { impl DumpInfo {
@ -180,19 +184,19 @@ impl DumpInfo {
uid, uid,
status, status,
error: None, error: None,
started_at: Utc::now(), started_at: OffsetDateTime::now_utc(),
finished_at: None, finished_at: None,
} }
} }
pub fn with_error(&mut self, error: String) { pub fn with_error(&mut self, error: String) {
self.status = DumpStatus::Failed; self.status = DumpStatus::Failed;
self.finished_at = Some(Utc::now()); self.finished_at = Some(OffsetDateTime::now_utc());
self.error = Some(error); self.error = Some(error);
} }
pub fn done(&mut self) { pub fn done(&mut self) {
self.finished_at = Some(Utc::now()); self.finished_at = Some(OffsetDateTime::now_utc());
self.status = DumpStatus::Done; self.status = DumpStatus::Done;
} }

View File

@ -8,11 +8,11 @@ use std::time::Duration;
use actix_web::error::PayloadError; use actix_web::error::PayloadError;
use bytes::Bytes; use bytes::Bytes;
use chrono::{DateTime, Utc};
use futures::Stream; use futures::Stream;
use futures::StreamExt; use futures::StreamExt;
use milli::update::IndexDocumentsMethod; use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tokio::sync::{mpsc, RwLock}; use tokio::sync::{mpsc, RwLock};
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use tokio::time::sleep; use tokio::time::sleep;
@ -107,7 +107,7 @@ impl fmt::Display for DocumentAdditionFormat {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Stats { pub struct Stats {
pub database_size: u64, pub database_size: u64,
pub last_update: Option<DateTime<Utc>>, pub last_update: Option<OffsetDateTime>,
pub indexes: BTreeMap<String, IndexStats>, pub indexes: BTreeMap<String, IndexStats>,
} }
@ -579,7 +579,7 @@ where
} }
pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> { pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> {
let mut last_task: Option<DateTime<_>> = None; let mut last_task: Option<OffsetDateTime> = None;
let mut indexes = BTreeMap::new(); let mut indexes = BTreeMap::new();
let mut database_size = 0; let mut database_size = 0;
let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?; let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?;

View File

@ -6,7 +6,6 @@ use std::convert::{TryFrom, TryInto};
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use chrono::Utc;
use error::{IndexResolverError, Result}; use error::{IndexResolverError, Result};
use heed::Env; use heed::Env;
use index_store::{IndexStore, MapIndexStore}; use index_store::{IndexStore, MapIndexStore};
@ -14,6 +13,7 @@ use meilisearch_error::ResponseError;
use meta_store::{HeedMetaStore, IndexMetaStore}; use meta_store::{HeedMetaStore, IndexMetaStore};
use milli::update::{DocumentDeletionResult, IndexerConfig}; use milli::update::{DocumentDeletionResult, IndexerConfig};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tokio::sync::oneshot; use tokio::sync::oneshot;
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use uuid::Uuid; use uuid::Uuid;
@ -115,18 +115,19 @@ where
self.process_document_addition_batch(batch).await self.process_document_addition_batch(batch).await
} else { } else {
if let Some(task) = batch.tasks.first_mut() { if let Some(task) = batch.tasks.first_mut() {
task.events.push(TaskEvent::Processing(Utc::now())); task.events
.push(TaskEvent::Processing(OffsetDateTime::now_utc()));
match self.process_task(task).await { match self.process_task(task).await {
Ok(success) => { Ok(success) => {
task.events.push(TaskEvent::Succeded { task.events.push(TaskEvent::Succeded {
result: success, result: success,
timestamp: Utc::now(), timestamp: OffsetDateTime::now_utc(),
}); });
} }
Err(err) => task.events.push(TaskEvent::Failed { Err(err) => task.events.push(TaskEvent::Failed {
error: err.into(), error: err.into(),
timestamp: Utc::now(), timestamp: OffsetDateTime::now_utc(),
}), }),
} }
} }
@ -225,7 +226,7 @@ where
// If the index doesn't exist and we are not allowed to create it with the first // If the index doesn't exist and we are not allowed to create it with the first
// task, we must fails the whole batch. // task, we must fails the whole batch.
let now = Utc::now(); let now = OffsetDateTime::now_utc();
let index = match index { let index = match index {
Ok(index) => index, Ok(index) => index,
Err(e) => { Err(e) => {
@ -253,17 +254,17 @@ where
let event = match result { let event = match result {
Ok(Ok(result)) => TaskEvent::Succeded { Ok(Ok(result)) => TaskEvent::Succeded {
timestamp: Utc::now(), timestamp: OffsetDateTime::now_utc(),
result: TaskResult::DocumentAddition { result: TaskResult::DocumentAddition {
indexed_documents: result.indexed_documents, indexed_documents: result.indexed_documents,
}, },
}, },
Ok(Err(e)) => TaskEvent::Failed { Ok(Err(e)) => TaskEvent::Failed {
timestamp: Utc::now(), timestamp: OffsetDateTime::now_utc(),
error: e.into(), error: e.into(),
}, },
Err(e) => TaskEvent::Failed { Err(e) => TaskEvent::Failed {
timestamp: Utc::now(), timestamp: OffsetDateTime::now_utc(),
error: IndexResolverError::from(e).into(), error: IndexResolverError::from(e).into(),
}, },
}; };
@ -524,7 +525,7 @@ mod test {
}; };
if primary_key.is_some() { if primary_key.is_some() {
mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key") mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key")
.then(move |_| Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None })); .then(move |_| Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None }));
} }
mocker.when::<(IndexDocumentsMethod, Option<String>, UpdateFileStore, IntoIter<Uuid>), IndexResult<DocumentAdditionResult>>("update_documents") mocker.when::<(IndexDocumentsMethod, Option<String>, UpdateFileStore, IntoIter<Uuid>), IndexResult<DocumentAdditionResult>>("update_documents")
.then(move |(_, _, _, _)| result()); .then(move |(_, _, _, _)| result());
@ -569,7 +570,7 @@ mod test {
| TaskContent::IndexCreation { primary_key } => { | TaskContent::IndexCreation { primary_key } => {
if primary_key.is_some() { if primary_key.is_some() {
let result = move || if !index_op_fails { let result = move || if !index_op_fails {
Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None }) Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None })
} else { } else {
// return this error because it's easy to generate... // return this error because it's easy to generate...
Err(IndexError::DocumentNotFound("a doc".into())) Err(IndexError::DocumentNotFound("a doc".into()))
@ -640,7 +641,7 @@ mod test {
let update_file_store = UpdateFileStore::mock(mocker); let update_file_store = UpdateFileStore::mock(mocker);
let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store); let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store);
let batch = Batch { id: 1, created_at: Utc::now(), tasks: vec![task.clone()] }; let batch = Batch { id: 1, created_at: OffsetDateTime::now_utc(), tasks: vec![task.clone()] };
let result = index_resolver.process_batch(batch).await; let result = index_resolver.process_batch(batch).await;
// Test for some expected output scenarios: // Test for some expected output scenarios:

View File

@ -1,4 +1,4 @@
use chrono::{DateTime, Utc}; use time::OffsetDateTime;
use super::task::Task; use super::task::Task;
@ -7,7 +7,7 @@ pub type BatchId = u64;
#[derive(Debug)] #[derive(Debug)]
pub struct Batch { pub struct Batch {
pub id: BatchId, pub id: BatchId,
pub created_at: DateTime<Utc>, pub created_at: OffsetDateTime,
pub tasks: Vec<Task>, pub tasks: Vec<Task>,
} }

View File

@ -6,8 +6,8 @@ use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use atomic_refcell::AtomicRefCell; use atomic_refcell::AtomicRefCell;
use chrono::Utc;
use milli::update::IndexDocumentsMethod; use milli::update::IndexDocumentsMethod;
use time::OffsetDateTime;
use tokio::sync::{watch, RwLock}; use tokio::sync::{watch, RwLock};
use crate::options::SchedulerConfig; use crate::options::SchedulerConfig;
@ -357,7 +357,7 @@ impl Scheduler {
tasks.iter_mut().for_each(|t| { tasks.iter_mut().for_each(|t| {
t.events.push(TaskEvent::Batched { t.events.push(TaskEvent::Batched {
batch_id: id, batch_id: id,
timestamp: Utc::now(), timestamp: OffsetDateTime::now_utc(),
}) })
}); });
@ -365,7 +365,7 @@ impl Scheduler {
let batch = Batch { let batch = Batch {
id, id,
created_at: Utc::now(), created_at: OffsetDateTime::now_utc(),
tasks, tasks,
}; };

View File

@ -1,9 +1,9 @@
use std::path::PathBuf; use std::path::PathBuf;
use chrono::{DateTime, Utc};
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
use milli::update::{DocumentAdditionResult, IndexDocumentsMethod}; use milli::update::{DocumentAdditionResult, IndexDocumentsMethod};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tokio::sync::oneshot; use tokio::sync::oneshot;
use uuid::Uuid; use uuid::Uuid;
@ -36,22 +36,22 @@ impl From<DocumentAdditionResult> for TaskResult {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))] #[cfg_attr(test, derive(proptest_derive::Arbitrary))]
pub enum TaskEvent { pub enum TaskEvent {
Created(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] DateTime<Utc>), Created(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] OffsetDateTime),
Batched { Batched {
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] #[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
timestamp: DateTime<Utc>, timestamp: OffsetDateTime,
batch_id: BatchId, batch_id: BatchId,
}, },
Processing(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] DateTime<Utc>), Processing(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] OffsetDateTime),
Succeded { Succeded {
result: TaskResult, result: TaskResult,
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] #[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
timestamp: DateTime<Utc>, timestamp: OffsetDateTime,
}, },
Failed { Failed {
error: ResponseError, error: ResponseError,
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] #[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
timestamp: DateTime<Utc>, timestamp: OffsetDateTime,
}, },
} }
@ -165,7 +165,7 @@ mod test {
] ]
} }
pub(super) fn datetime_strategy() -> impl Strategy<Value = DateTime<Utc>> { pub(super) fn datetime_strategy() -> impl Strategy<Value = OffsetDateTime> {
Just(Utc::now()) Just(OffsetDateTime::now_utc())
} }
} }

View File

@ -5,9 +5,9 @@ use std::io::{BufWriter, Write};
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use chrono::Utc;
use heed::{Env, RwTxn}; use heed::{Env, RwTxn};
use log::debug; use log::debug;
use time::OffsetDateTime;
use super::error::TaskError; use super::error::TaskError;
use super::task::{Task, TaskContent, TaskId}; use super::task::{Task, TaskContent, TaskId};
@ -72,7 +72,7 @@ impl TaskStore {
let task = tokio::task::spawn_blocking(move || -> Result<Task> { let task = tokio::task::spawn_blocking(move || -> Result<Task> {
let mut txn = store.wtxn()?; let mut txn = store.wtxn()?;
let next_task_id = store.next_task_id(&mut txn)?; let next_task_id = store.next_task_id(&mut txn)?;
let created_at = TaskEvent::Created(Utc::now()); let created_at = TaskEvent::Created(OffsetDateTime::now_utc());
let task = Task { let task = Task {
id: next_task_id, id: next_task_id,
index_uid, index_uid,

View File

@ -1,7 +1,7 @@
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use chrono::Utc; use time::OffsetDateTime;
use tokio::sync::{watch, RwLock}; use tokio::sync::{watch, RwLock};
use tokio::time::interval_at; use tokio::time::interval_at;
@ -63,7 +63,8 @@ where
match pending { match pending {
Pending::Batch(mut batch) => { Pending::Batch(mut batch) => {
for task in &mut batch.tasks { for task in &mut batch.tasks {
task.events.push(TaskEvent::Processing(Utc::now())); task.events
.push(TaskEvent::Processing(OffsetDateTime::now_utc()));
} }
batch.tasks = { batch.tasks = {